Bug 1383831 - Upgrade rust-url to version 1.5.1 r=froydnj
MozReview-Commit-ID: L3j6K5dDWHi
This commit is contained in:
@@ -8,6 +8,6 @@ name = "rust_url_capi"
|
||||
|
||||
[dependencies]
|
||||
libc = "0.2.0"
|
||||
url = "1.4.0"
|
||||
url = "1.5.1"
|
||||
nsstring = { path = "../../../xpcom/rust/nsstring" }
|
||||
nserror = { path = "../../../xpcom/rust/nserror" }
|
||||
|
||||
27
testing/geckodriver/Cargo.lock
generated
27
testing/geckodriver/Cargo.lock
generated
@@ -181,17 +181,17 @@ dependencies = [
|
||||
"traitobject 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"typeable 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicase 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"url 1.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "0.1.2"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicode-bidi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicode-normalization 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicode-normalization 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -364,6 +364,11 @@ dependencies = [
|
||||
"libc 0.2.24 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "percent-encoding"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "podio"
|
||||
version = "0.1.5"
|
||||
@@ -580,7 +585,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "unicode-normalization"
|
||||
version = "0.1.4"
|
||||
version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
@@ -603,11 +608,12 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "url"
|
||||
version = "1.4.1"
|
||||
version = "1.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"idna 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"idna 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"percent-encoding 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -646,7 +652,7 @@ dependencies = [
|
||||
"regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"time 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"url 1.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -703,7 +709,7 @@ dependencies = [
|
||||
"checksum gcc 0.3.42 (registry+https://github.com/rust-lang/crates.io-index)" = "291055c78f59ca3d84c99026c9501c469413d386bb46be1e1cf1d285cd1db3b0"
|
||||
"checksum httparse 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "77f756bed9ee3a83ce98774f4155b42a31b787029013f3a7d83eca714e500e21"
|
||||
"checksum hyper 0.10.10 (registry+https://github.com/rust-lang/crates.io-index)" = "36e108e0b1fa2d17491cbaac4bc460dc0956029d10ccf83c913dd0e5db3e7f07"
|
||||
"checksum idna 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "2233d4940b1f19f0418c158509cd7396b8d70a5db5705ce410914dc8fa603b37"
|
||||
"checksum idna 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "014b298351066f1512874135335d62a789ffe78a9974f94b43ed5621951eaf7d"
|
||||
"checksum isatty 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "fa500db770a99afe2a0f2229be2a3d09c7ed9d7e4e8440bf71253141994e240f"
|
||||
"checksum kernel32-sys 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "e014dab1082fd9d80ea1fa6fcb261b47ed3eb511612a14198bb507701add083e"
|
||||
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
|
||||
@@ -726,6 +732,7 @@ dependencies = [
|
||||
"checksum num-iter 0.1.33 (registry+https://github.com/rust-lang/crates.io-index)" = "f7d1891bd7b936f12349b7d1403761c8a0b85a18b148e9da4429d5d102c1a41e"
|
||||
"checksum num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "e1cbfa3781f3fe73dc05321bed52a06d2d491eaa764c52335cf4399f046ece99"
|
||||
"checksum num_cpus 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a225d1e2717567599c24f88e49f00856c6e825a12125181ee42c4257e3688d39"
|
||||
"checksum percent-encoding 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de154f638187706bde41d9b4738748933d64e6b37bdbffc0b47a97d16a6ae356"
|
||||
"checksum podio 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e5422a1ee1bc57cc47ae717b0137314258138f38fd5f3cea083f43a9725383a0"
|
||||
"checksum rand 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "022e0636ec2519ddae48154b028864bdce4eaf7d35226ab8e65c611be97b189d"
|
||||
"checksum redox_syscall 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "8dd35cc9a8bdec562c757e3d43c1526b5c6d2653e23e2315065bc25556550753"
|
||||
@@ -754,11 +761,11 @@ dependencies = [
|
||||
"checksum typeable 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1410f6f91f21d1612654e7cc69193b0334f909dcf2c790c4826254fbb86f8887"
|
||||
"checksum unicase 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "13a5906ca2b98c799f4b1ab4557b76367ebd6ae5ef14930ec841c74aed5f3764"
|
||||
"checksum unicode-bidi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a6a2c4e3710edd365cd7e78383153ed739fa31af19f9172f72d3575060f5a43a"
|
||||
"checksum unicode-normalization 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "e28fa37426fceeb5cf8f41ee273faa7c82c47dc8fba5853402841e665fcd86ff"
|
||||
"checksum unicode-normalization 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "51ccda9ef9efa3f7ef5d91e8f9b83bbe6955f9bf86aec89d5cce2c874625920f"
|
||||
"checksum unicode-segmentation 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "18127285758f0e2c6cf325bb3f3d138a12fee27de4f23e146cd6a179f26c2cf3"
|
||||
"checksum unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bf3a113775714a22dcb774d8ea3655c53a32debae63a063acc00a91cc586245f"
|
||||
"checksum unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1f2ae5ddb18e1c92664717616dd9549dde73f539f01bd7b77c2edb2446bdff91"
|
||||
"checksum url 1.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3e2ba3456fbe5c0098cb877cf08b92b76c3e18e0be9e47c35b487220d377d24e"
|
||||
"checksum url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eeb819346883532a271eb626deb43c4a1bb4c4dd47c519bd78137c3e72a4fe27"
|
||||
"checksum utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "662fab6525a98beff2921d7f61a39e7d59e0b425ebc7d0d9e66d316e55124122"
|
||||
"checksum uuid 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)" = "78c590b5bd79ed10aad8fb75f078a59d8db445af6c743e55c4a53227fc01c13f"
|
||||
"checksum vec_map 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "887b5b631c2ad01628bbbaa7dd4c869f80d3186688f8d0b6f58774fbe324988c"
|
||||
|
||||
2
third_party/rust/idna/.cargo-checksum.json
vendored
2
third_party/rust/idna/.cargo-checksum.json
vendored
@@ -1 +1 @@
|
||||
{"files":{".cargo-ok":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","Cargo.toml":"35fb5d8cfa50a27a476e718e437bfd1405ae4d38ddeb87a7d3404af67f8f6baa","src/IdnaMappingTable.txt":"a20be6e70dd1e48c2d15615455fef6098ba104756e5e37427bf8bd919b9d1118","src/lib.rs":"1fbd82781f2402e3dd3017673e7f2b2c40c8272c5a550cef7695f4e59df90c01","src/make_uts46_mapping_table.py":"60201ddefb8a3cb6f344b79808e5f93dfef5e21dcbacd4c8a0a36f172701c486","src/punycode.rs":"efb547848493d735aab32a0d0b2a2c795360ca9706272412524738794a540223","src/uts46.rs":"e6bb573e1469e9c0b6b83353083120696eb36d224821af5e3f39e8c397870877","src/uts46_mapping_table.rs":"88c01d8bcbd32741a9f3f7ea2cfb9e7e9883e3f83ee2de024d03bdc65a62d7f8","tests/IdnaTest.txt":"24817204a6dc010b91e98d899a8df627a94471f8893d703afca1d022f808c887","tests/punycode.rs":"57854e04949a43ed4b6b263c24d7d6502617a31e439cebb5b1cbd8cbce013dfb","tests/punycode_tests.json":"3d4ac0cf25984c37b9ce197f5df680a0136f728fb8ec82bc76624e42139eb3a8","tests/tests.rs":"bb92e129dc5e17e9a86ec6062dd7b3f4c905c4af69e773d7c70efea177654c7b","tests/uts46.rs":"4723a16d52e453b136a763fd883e48db5f198c45476b541c1917ed44725c3c7f"},"package":"2233d4940b1f19f0418c158509cd7396b8d70a5db5705ce410914dc8fa603b37"}
|
||||
{"files":{".cargo-ok":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","Cargo.toml":"82f46006a9e4959473d4426a9e4254172c5bb85fc191089dcda0b556e2b8e8be","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"20c7855c364d57ea4c97889a5e8d98470a9952dade37bd9248b9a54431670e5e","src/IdnaMappingTable.txt":"813a8308aeff8bcb9368751e1fd0ad7cc467130965d53ac860f82c4d0d11523f","src/lib.rs":"1fbd82781f2402e3dd3017673e7f2b2c40c8272c5a550cef7695f4e59df90c01","src/make_uts46_mapping_table.py":"3fa8cf34744fa0f531a77bd5d4e2231df85aa29bc82abed6e269fd9d9f33be6b","src/punycode.rs":"efb547848493d735aab32a0d0b2a2c795360ca9706272412524738794a540223","src/uts46.rs":"7ce58359fba57690ecf526ffd9031f76fb413d371a040d4e0a641973dcb32c6c","src/uts46_mapping_table.rs":"25c88d5ea382b8dc0880d5d48205c4007a80186f2a17e563d2f40462f29199e4","tests/IdnaTest.txt":"921c68e5d3fbb631b26140d232af90040fc4df612857d1894641ded319e52822","tests/punycode.rs":"57854e04949a43ed4b6b263c24d7d6502617a31e439cebb5b1cbd8cbce013dfb","tests/punycode_tests.json":"3d4ac0cf25984c37b9ce197f5df680a0136f728fb8ec82bc76624e42139eb3a8","tests/tests.rs":"d9f4ab9d8fc43b2f81031c45fe16f4013a866091797be695e4115478572e3965","tests/unit.rs":"d2993b27bc6242f2c0315c66cfc1875187b329980569571adfc17c302d266d3f","tests/uts46.rs":"4723a16d52e453b136a763fd883e48db5f198c45476b541c1917ed44725c3c7f"},"package":"014b298351066f1512874135335d62a789ffe78a9974f94b43ed5621951eaf7d"}
|
||||
9
third_party/rust/idna/Cargo.toml
vendored
9
third_party/rust/idna/Cargo.toml
vendored
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "idna"
|
||||
version = "0.1.2"
|
||||
version = "0.1.4"
|
||||
authors = ["The rust-url developers"]
|
||||
description = "IDNA (Internationalizing Domain Names in Applications) and Punycode."
|
||||
repository = "https://github.com/servo/rust-url/"
|
||||
@@ -14,11 +14,14 @@ test = false
|
||||
name = "tests"
|
||||
harness = false
|
||||
|
||||
[[test]]
|
||||
name = "unit"
|
||||
|
||||
[dev-dependencies]
|
||||
rustc-test = "0.1"
|
||||
rustc-test = "0.2"
|
||||
rustc-serialize = "0.3"
|
||||
|
||||
[dependencies]
|
||||
unicode-bidi = "0.3"
|
||||
unicode-normalization = "0.1.3"
|
||||
unicode-normalization = "0.1.5"
|
||||
matches = "0.1"
|
||||
|
||||
201
third_party/rust/idna/LICENSE-APACHE
vendored
Normal file
201
third_party/rust/idna/LICENSE-APACHE
vendored
Normal file
@@ -0,0 +1,201 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
25
third_party/rust/idna/LICENSE-MIT
vendored
Normal file
25
third_party/rust/idna/LICENSE-MIT
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
Copyright (c) 2013-2016 The rust-url developers
|
||||
|
||||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
documentation files (the "Software"), to deal in the
|
||||
Software without restriction, including without
|
||||
limitation the rights to use, copy, modify, merge,
|
||||
publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice
|
||||
shall be included in all copies or substantial portions
|
||||
of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
||||
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
||||
121
third_party/rust/idna/src/IdnaMappingTable.txt
vendored
121
third_party/rust/idna/src/IdnaMappingTable.txt
vendored
@@ -1,6 +1,6 @@
|
||||
# IdnaMappingTable-9.0.0.txt
|
||||
# Date: 2016-06-16, 13:35:01 GMT
|
||||
# © 2016 Unicode®, Inc.
|
||||
# IdnaMappingTable-10.0.0.txt
|
||||
# Date: 2017-02-23, 14:18:32 GMT
|
||||
# © 2017 Unicode®, Inc.
|
||||
# Unicode and the Unicode Logo are registered trademarks of Unicode, Inc. in the U.S. and other countries.
|
||||
# For terms of use, see http://www.unicode.org/terms_of_use.html
|
||||
#
|
||||
@@ -964,7 +964,9 @@
|
||||
0840..085B ; valid # 6.0 MANDAIC LETTER HALQA..MANDAIC GEMINATION MARK
|
||||
085C..085D ; disallowed # NA <reserved-085C>..<reserved-085D>
|
||||
085E ; valid ; ; NV8 # 6.0 MANDAIC PUNCTUATION
|
||||
085F..089F ; disallowed # NA <reserved-085F>..<reserved-089F>
|
||||
085F ; disallowed # NA <reserved-085F>
|
||||
0860..086A ; valid # 10.0 SYRIAC LETTER MALAYALAM NGA..SYRIAC LETTER MALAYALAM SSA
|
||||
086B..089F ; disallowed # NA <reserved-086B>..<reserved-089F>
|
||||
08A0 ; valid # 6.1 ARABIC LETTER BEH WITH SMALL V BELOW
|
||||
08A1 ; valid # 7.0 ARABIC LETTER BEH WITH HAMZA ABOVE
|
||||
08A2..08AC ; valid # 6.1 ARABIC LETTER JEEM WITH TWO DOTS ABOVE..ARABIC LETTER ROHINGYA YEH
|
||||
@@ -1043,7 +1045,9 @@
|
||||
09E6..09F1 ; valid # 1.1 BENGALI DIGIT ZERO..BENGALI LETTER RA WITH LOWER DIAGONAL
|
||||
09F2..09FA ; valid ; ; NV8 # 1.1 BENGALI RUPEE MARK..BENGALI ISSHAR
|
||||
09FB ; valid ; ; NV8 # 5.2 BENGALI GANDA MARK
|
||||
09FC..0A00 ; disallowed # NA <reserved-09FC>..<reserved-0A00>
|
||||
09FC ; valid # 10.0 BENGALI LETTER VEDIC ANUSVARA
|
||||
09FD ; valid ; ; NV8 # 10.0 BENGALI ABBREVIATION SIGN
|
||||
09FE..0A00 ; disallowed # NA <reserved-09FE>..<reserved-0A00>
|
||||
0A01 ; valid # 4.0 GURMUKHI SIGN ADAK BINDI
|
||||
0A02 ; valid # 1.1 GURMUKHI SIGN BINDI
|
||||
0A03 ; valid # 4.0 GURMUKHI SIGN VISARGA
|
||||
@@ -1116,7 +1120,8 @@
|
||||
0AF1 ; valid ; ; NV8 # 4.0 GUJARATI RUPEE SIGN
|
||||
0AF2..0AF8 ; disallowed # NA <reserved-0AF2>..<reserved-0AF8>
|
||||
0AF9 ; valid # 8.0 GUJARATI LETTER ZHA
|
||||
0AFA..0B00 ; disallowed # NA <reserved-0AFA>..<reserved-0B00>
|
||||
0AFA..0AFF ; valid # 10.0 GUJARATI SIGN SUKUN..GUJARATI SIGN TWO-CIRCLE NUKTA ABOVE
|
||||
0B00 ; disallowed # NA <reserved-0B00>
|
||||
0B01..0B03 ; valid # 1.1 ORIYA SIGN CANDRABINDU..ORIYA SIGN VISARGA
|
||||
0B04 ; disallowed # NA <reserved-0B04>
|
||||
0B05..0B0C ; valid # 1.1 ORIYA LETTER A..ORIYA LETTER VOCALIC L
|
||||
@@ -1251,7 +1256,8 @@
|
||||
0CE6..0CEF ; valid # 1.1 KANNADA DIGIT ZERO..KANNADA DIGIT NINE
|
||||
0CF0 ; disallowed # NA <reserved-0CF0>
|
||||
0CF1..0CF2 ; valid # 5.0 KANNADA SIGN JIHVAMULIYA..KANNADA SIGN UPADHMANIYA
|
||||
0CF3..0D00 ; disallowed # NA <reserved-0CF3>..<reserved-0D00>
|
||||
0CF3..0CFF ; disallowed # NA <reserved-0CF3>..<reserved-0CFF>
|
||||
0D00 ; valid # 10.0 MALAYALAM SIGN COMBINING ANUSVARA ABOVE
|
||||
0D01 ; valid # 7.0 MALAYALAM SIGN CANDRABINDU
|
||||
0D02..0D03 ; valid # 1.1 MALAYALAM SIGN ANUSVARA..MALAYALAM SIGN VISARGA
|
||||
0D04 ; disallowed # NA <reserved-0D04>
|
||||
@@ -1263,7 +1269,7 @@
|
||||
0D29 ; valid # 6.0 MALAYALAM LETTER NNNA
|
||||
0D2A..0D39 ; valid # 1.1 MALAYALAM LETTER PA..MALAYALAM LETTER HA
|
||||
0D3A ; valid # 6.0 MALAYALAM LETTER TTTA
|
||||
0D3B..0D3C ; disallowed # NA <reserved-0D3B>..<reserved-0D3C>
|
||||
0D3B..0D3C ; valid # 10.0 MALAYALAM SIGN VERTICAL BAR VIRAMA..MALAYALAM SIGN CIRCULAR VIRAMA
|
||||
0D3D ; valid # 5.1 MALAYALAM SIGN AVAGRAHA
|
||||
0D3E..0D43 ; valid # 1.1 MALAYALAM VOWEL SIGN AA..MALAYALAM VOWEL SIGN VOCALIC R
|
||||
0D44 ; valid # 5.1 MALAYALAM VOWEL SIGN VOCALIC RR
|
||||
@@ -1677,7 +1683,7 @@
|
||||
1CD3 ; valid ; ; NV8 # 5.2 VEDIC SIGN NIHSHVASA
|
||||
1CD4..1CF2 ; valid # 5.2 VEDIC SIGN YAJURVEDIC MIDLINE SVARITA..VEDIC SIGN ARDHAVISARGA
|
||||
1CF3..1CF6 ; valid # 6.1 VEDIC SIGN ROTATED ARDHAVISARGA..VEDIC SIGN UPADHMANIYA
|
||||
1CF7 ; disallowed # NA <reserved-1CF7>
|
||||
1CF7 ; valid # 10.0 VEDIC SIGN ATIKRAMA
|
||||
1CF8..1CF9 ; valid # 7.0 VEDIC TONE RING ABOVE..VEDIC TONE DOUBLE RING ABOVE
|
||||
1CFA..1CFF ; disallowed # NA <reserved-1CFA>..<reserved-1CFF>
|
||||
1D00..1D2B ; valid # 4.0 LATIN LETTER SMALL CAPITAL A..CYRILLIC LETTER SMALL CAPITAL EL
|
||||
@@ -1789,7 +1795,8 @@
|
||||
1DC4..1DCA ; valid # 5.0 COMBINING MACRON-ACUTE..COMBINING LATIN SMALL LETTER R BELOW
|
||||
1DCB..1DE6 ; valid # 5.1 COMBINING BREVE-MACRON..COMBINING LATIN SMALL LETTER Z
|
||||
1DE7..1DF5 ; valid # 7.0 COMBINING LATIN SMALL LETTER ALPHA..COMBINING UP TACK ABOVE
|
||||
1DF6..1DFA ; disallowed # NA <reserved-1DF6>..<reserved-1DFA>
|
||||
1DF6..1DF9 ; valid # 10.0 COMBINING KAVYKA ABOVE RIGHT..COMBINING WIDE INVERTED BRIDGE BELOW
|
||||
1DFA ; disallowed # NA <reserved-1DFA>
|
||||
1DFB ; valid # 9.0 COMBINING DELETION MARK
|
||||
1DFC ; valid # 6.0 COMBINING DOUBLE INVERTED BREVE BELOW
|
||||
1DFD ; valid # 5.2 COMBINING ALMOST EQUAL TO BELOW
|
||||
@@ -2338,7 +2345,8 @@
|
||||
20BA ; valid ; ; NV8 # 6.2 TURKISH LIRA SIGN
|
||||
20BB..20BD ; valid ; ; NV8 # 7.0 NORDIC MARK SIGN..RUBLE SIGN
|
||||
20BE ; valid ; ; NV8 # 8.0 LARI SIGN
|
||||
20BF..20CF ; disallowed # NA <reserved-20BF>..<reserved-20CF>
|
||||
20BF ; valid ; ; NV8 # 10.0 BITCOIN SIGN
|
||||
20C0..20CF ; disallowed # NA <reserved-20C0>..<reserved-20CF>
|
||||
20D0..20E1 ; valid ; ; NV8 # 1.1 COMBINING LEFT HARPOON ABOVE..COMBINING LEFT RIGHT ARROW ABOVE
|
||||
20E2..20E3 ; valid ; ; NV8 # 3.0 COMBINING ENCLOSING SCREEN..COMBINING ENCLOSING KEYCAP
|
||||
20E4..20EA ; valid ; ; NV8 # 3.2 COMBINING ENCLOSING UPWARD POINTING TRIANGLE..COMBINING LEFTWARDS ARROW OVERLAY
|
||||
@@ -2497,7 +2505,7 @@
|
||||
23E9..23F3 ; valid ; ; NV8 # 6.0 BLACK RIGHT-POINTING DOUBLE TRIANGLE..HOURGLASS WITH FLOWING SAND
|
||||
23F4..23FA ; valid ; ; NV8 # 7.0 BLACK MEDIUM LEFT-POINTING TRIANGLE..BLACK CIRCLE FOR RECORD
|
||||
23FB..23FE ; valid ; ; NV8 # 9.0 POWER SYMBOL..POWER SLEEP SYMBOL
|
||||
23FF ; disallowed # NA <reserved-23FF>
|
||||
23FF ; valid ; ; NV8 # 10.0 OBSERVER EYE SYMBOL
|
||||
2400..2424 ; valid ; ; NV8 # 1.1 SYMBOL FOR NULL..SYMBOL FOR NEWLINE
|
||||
2425..2426 ; valid ; ; NV8 # 3.0 SYMBOL FOR DELETE FORM TWO..SYMBOL FOR SUBSTITUTE FORM TWO
|
||||
2427..243F ; disallowed # NA <reserved-2427>..<reserved-243F>
|
||||
@@ -2719,7 +2727,8 @@
|
||||
2BBD..2BC8 ; valid ; ; NV8 # 7.0 BALLOT BOX WITH LIGHT X..BLACK MEDIUM RIGHT-POINTING TRIANGLE CENTRED
|
||||
2BC9 ; disallowed # NA <reserved-2BC9>
|
||||
2BCA..2BD1 ; valid ; ; NV8 # 7.0 TOP HALF BLACK CIRCLE..UNCERTAINTY SIGN
|
||||
2BD2..2BEB ; disallowed # NA <reserved-2BD2>..<reserved-2BEB>
|
||||
2BD2 ; valid ; ; NV8 # 10.0 GROUP MARK
|
||||
2BD3..2BEB ; disallowed # NA <reserved-2BD3>..<reserved-2BEB>
|
||||
2BEC..2BEF ; valid ; ; NV8 # 8.0 LEFTWARDS TWO-HEADED ARROW WITH TRIANGLE ARROWHEADS..DOWNWARDS TWO-HEADED ARROW WITH TRIANGLE ARROWHEADS
|
||||
2BF0..2BFF ; disallowed # NA <reserved-2BF0>..<reserved-2BFF>
|
||||
2C00 ; mapped ; 2C30 # 4.1 GLAGOLITIC CAPITAL LETTER AZU
|
||||
@@ -2950,7 +2959,8 @@
|
||||
2E32..2E3B ; valid ; ; NV8 # 6.1 TURNED COMMA..THREE-EM DASH
|
||||
2E3C..2E42 ; valid ; ; NV8 # 7.0 STENOGRAPHIC FULL STOP..DOUBLE LOW-REVERSED-9 QUOTATION MARK
|
||||
2E43..2E44 ; valid ; ; NV8 # 9.0 DASH WITH LEFT UPTURN..DOUBLE SUSPENSION MARK
|
||||
2E45..2E7F ; disallowed # NA <reserved-2E45>..<reserved-2E7F>
|
||||
2E45..2E49 ; valid ; ; NV8 # 10.0 INVERTED LOW KAVYKA..DOUBLE STACKED COMMA
|
||||
2E4A..2E7F ; disallowed # NA <reserved-2E4A>..<reserved-2E7F>
|
||||
2E80..2E99 ; valid ; ; NV8 # 3.0 CJK RADICAL REPEAT..CJK RADICAL RAP
|
||||
2E9A ; disallowed # NA <reserved-2E9A>
|
||||
2E9B..2E9E ; valid ; ; NV8 # 3.0 CJK RADICAL CHOKE..CJK RADICAL DEATH
|
||||
@@ -3208,7 +3218,8 @@
|
||||
3100..3104 ; disallowed # NA <reserved-3100>..<reserved-3104>
|
||||
3105..312C ; valid # 1.1 BOPOMOFO LETTER B..BOPOMOFO LETTER GN
|
||||
312D ; valid # 5.1 BOPOMOFO LETTER IH
|
||||
312E..3130 ; disallowed # NA <reserved-312E>..<reserved-3130>
|
||||
312E ; valid # 10.0 BOPOMOFO LETTER O WITH DOT ABOVE
|
||||
312F..3130 ; disallowed # NA <reserved-312F>..<reserved-3130>
|
||||
3131 ; mapped ; 1100 # 1.1 HANGUL LETTER KIYEOK
|
||||
3132 ; mapped ; 1101 # 1.1 HANGUL LETTER SSANGKIYEOK
|
||||
3133 ; mapped ; 11AA # 1.1 HANGUL LETTER KIYEOK-SIOS
|
||||
@@ -3840,7 +3851,8 @@
|
||||
9FC4..9FCB ; valid # 5.2 CJK UNIFIED IDEOGRAPH-9FC4..CJK UNIFIED IDEOGRAPH-9FCB
|
||||
9FCC ; valid # 6.1 CJK UNIFIED IDEOGRAPH-9FCC
|
||||
9FCD..9FD5 ; valid # 8.0 CJK UNIFIED IDEOGRAPH-9FCD..CJK UNIFIED IDEOGRAPH-9FD5
|
||||
9FD6..9FFF ; disallowed # NA <reserved-9FD6>..<reserved-9FFF>
|
||||
9FD6..9FEA ; valid # 10.0 CJK UNIFIED IDEOGRAPH-9FD6..CJK UNIFIED IDEOGRAPH-9FEA
|
||||
9FEB..9FFF ; disallowed # NA <reserved-9FEB>..<reserved-9FFF>
|
||||
A000..A48C ; valid # 3.0 YI SYLLABLE IT..YI SYLLABLE YYR
|
||||
A48D..A48F ; disallowed # NA <reserved-A48D>..<reserved-A48F>
|
||||
A490..A4A1 ; valid ; ; NV8 # 3.0 YI RADICAL QOT..YI RADICAL GA
|
||||
@@ -5687,7 +5699,8 @@ FFFE..FFFF ; disallowed # 1.1 <noncharacter-FFFE
|
||||
10300..1031E ; valid # 3.1 OLD ITALIC LETTER A..OLD ITALIC LETTER UU
|
||||
1031F ; valid # 7.0 OLD ITALIC LETTER ESS
|
||||
10320..10323 ; valid ; ; NV8 # 3.1 OLD ITALIC NUMERAL ONE..OLD ITALIC NUMERAL FIFTY
|
||||
10324..1032F ; disallowed # NA <reserved-10324>..<reserved-1032F>
|
||||
10324..1032C ; disallowed # NA <reserved-10324>..<reserved-1032C>
|
||||
1032D..1032F ; valid # 10.0 OLD ITALIC LETTER YE..OLD ITALIC LETTER SOUTHERN TSE
|
||||
10330..10340 ; valid # 3.1 GOTHIC LETTER AHSA..GOTHIC LETTER PAIRTHRA
|
||||
10341 ; valid ; ; NV8 # 3.1 GOTHIC LETTER NINETY
|
||||
10342..10349 ; valid # 3.1 GOTHIC LETTER RAIDA..GOTHIC LETTER OTHAL
|
||||
@@ -6110,7 +6123,18 @@ FFFE..FFFF ; disallowed # 1.1 <noncharacter-FFFE
|
||||
118EA..118F2 ; valid ; ; NV8 # 7.0 WARANG CITI NUMBER TEN..WARANG CITI NUMBER NINETY
|
||||
118F3..118FE ; disallowed # NA <reserved-118F3>..<reserved-118FE>
|
||||
118FF ; valid # 7.0 WARANG CITI OM
|
||||
11900..11ABF ; disallowed # NA <reserved-11900>..<reserved-11ABF>
|
||||
11900..119FF ; disallowed # NA <reserved-11900>..<reserved-119FF>
|
||||
11A00..11A3E ; valid # 10.0 ZANABAZAR SQUARE LETTER A..ZANABAZAR SQUARE CLUSTER-FINAL LETTER VA
|
||||
11A3F..11A46 ; valid ; ; NV8 # 10.0 ZANABAZAR SQUARE INITIAL HEAD MARK..ZANABAZAR SQUARE CLOSING DOUBLE-LINED HEAD MARK
|
||||
11A47 ; valid # 10.0 ZANABAZAR SQUARE SUBJOINER
|
||||
11A48..11A4F ; disallowed # NA <reserved-11A48>..<reserved-11A4F>
|
||||
11A50..11A83 ; valid # 10.0 SOYOMBO LETTER A..SOYOMBO LETTER KSSA
|
||||
11A84..11A85 ; disallowed # NA <reserved-11A84>..<reserved-11A85>
|
||||
11A86..11A99 ; valid # 10.0 SOYOMBO CLUSTER-INITIAL LETTER RA..SOYOMBO SUBJOINER
|
||||
11A9A..11A9C ; valid ; ; NV8 # 10.0 SOYOMBO MARK TSHEG..SOYOMBO MARK DOUBLE SHAD
|
||||
11A9D ; disallowed # NA <reserved-11A9D>
|
||||
11A9E..11AA2 ; valid ; ; NV8 # 10.0 SOYOMBO HEAD MARK WITH MOON AND SUN AND TRIPLE FLAME..SOYOMBO TERMINAL MARK-2
|
||||
11AA3..11ABF ; disallowed # NA <reserved-11AA3>..<reserved-11ABF>
|
||||
11AC0..11AF8 ; valid # 7.0 PAU CIN HAU LETTER PA..PAU CIN HAU GLOTTAL STOP FINAL
|
||||
11AF9..11BFF ; disallowed # NA <reserved-11AF9>..<reserved-11BFF>
|
||||
11C00..11C08 ; valid # 9.0 BHAIKSUKI LETTER A..BHAIKSUKI LETTER VOCALIC L
|
||||
@@ -6129,7 +6153,21 @@ FFFE..FFFF ; disallowed # 1.1 <noncharacter-FFFE
|
||||
11C92..11CA7 ; valid # 9.0 MARCHEN SUBJOINED LETTER KA..MARCHEN SUBJOINED LETTER ZA
|
||||
11CA8 ; disallowed # NA <reserved-11CA8>
|
||||
11CA9..11CB6 ; valid # 9.0 MARCHEN SUBJOINED LETTER YA..MARCHEN SIGN CANDRABINDU
|
||||
11CB7..11FFF ; disallowed # NA <reserved-11CB7>..<reserved-11FFF>
|
||||
11CB7..11CFF ; disallowed # NA <reserved-11CB7>..<reserved-11CFF>
|
||||
11D00..11D06 ; valid # 10.0 MASARAM GONDI LETTER A..MASARAM GONDI LETTER E
|
||||
11D07 ; disallowed # NA <reserved-11D07>
|
||||
11D08..11D09 ; valid # 10.0 MASARAM GONDI LETTER AI..MASARAM GONDI LETTER O
|
||||
11D0A ; disallowed # NA <reserved-11D0A>
|
||||
11D0B..11D36 ; valid # 10.0 MASARAM GONDI LETTER AU..MASARAM GONDI VOWEL SIGN VOCALIC R
|
||||
11D37..11D39 ; disallowed # NA <reserved-11D37>..<reserved-11D39>
|
||||
11D3A ; valid # 10.0 MASARAM GONDI VOWEL SIGN E
|
||||
11D3B ; disallowed # NA <reserved-11D3B>
|
||||
11D3C..11D3D ; valid # 10.0 MASARAM GONDI VOWEL SIGN AI..MASARAM GONDI VOWEL SIGN O
|
||||
11D3E ; disallowed # NA <reserved-11D3E>
|
||||
11D3F..11D47 ; valid # 10.0 MASARAM GONDI VOWEL SIGN AU..MASARAM GONDI RA-KARA
|
||||
11D48..11D4F ; disallowed # NA <reserved-11D48>..<reserved-11D4F>
|
||||
11D50..11D59 ; valid # 10.0 MASARAM GONDI DIGIT ZERO..MASARAM GONDI DIGIT NINE
|
||||
11D5A..11FFF ; disallowed # NA <reserved-11D5A>..<reserved-11FFF>
|
||||
12000..1236E ; valid # 5.0 CUNEIFORM SIGN A..CUNEIFORM SIGN ZUM
|
||||
1236F..12398 ; valid # 7.0 CUNEIFORM SIGN KAP ELAMITE..CUNEIFORM SIGN UM TIMES ME
|
||||
12399 ; valid # 8.0 CUNEIFORM SIGN U U
|
||||
@@ -6179,13 +6217,17 @@ FFFE..FFFF ; disallowed # 1.1 <noncharacter-FFFE
|
||||
16F8F..16F9F ; valid # 6.1 MIAO TONE RIGHT..MIAO LETTER REFORMED TONE-8
|
||||
16FA0..16FDF ; disallowed # NA <reserved-16FA0>..<reserved-16FDF>
|
||||
16FE0 ; valid # 9.0 TANGUT ITERATION MARK
|
||||
16FE1..16FFF ; disallowed # NA <reserved-16FE1>..<reserved-16FFF>
|
||||
16FE1 ; valid # 10.0 NUSHU ITERATION MARK
|
||||
16FE2..16FFF ; disallowed # NA <reserved-16FE2>..<reserved-16FFF>
|
||||
17000..187EC ; valid # 9.0 TANGUT IDEOGRAPH-17000..TANGUT IDEOGRAPH-187EC
|
||||
187ED..187FF ; disallowed # NA <reserved-187ED>..<reserved-187FF>
|
||||
18800..18AF2 ; valid # 9.0 TANGUT COMPONENT-001..TANGUT COMPONENT-755
|
||||
18AF3..1AFFF ; disallowed # NA <reserved-18AF3>..<reserved-1AFFF>
|
||||
1B000..1B001 ; valid # 6.0 KATAKANA LETTER ARCHAIC E..HIRAGANA LETTER ARCHAIC YE
|
||||
1B002..1BBFF ; disallowed # NA <reserved-1B002>..<reserved-1BBFF>
|
||||
1B002..1B11E ; valid # 10.0 HENTAIGANA LETTER A-1..HENTAIGANA LETTER N-MU-MO-2
|
||||
1B11F..1B16F ; disallowed # NA <reserved-1B11F>..<reserved-1B16F>
|
||||
1B170..1B2FB ; valid # 10.0 NUSHU CHARACTER-1B170..NUSHU CHARACTER-1B2FB
|
||||
1B2FC..1BBFF ; disallowed # NA <reserved-1B2FC>..<reserved-1BBFF>
|
||||
1BC00..1BC6A ; valid # 7.0 DUPLOYAN LETTER H..DUPLOYAN LETTER VOCALIC M
|
||||
1BC6B..1BC6F ; disallowed # NA <reserved-1BC6B>..<reserved-1BC6F>
|
||||
1BC70..1BC7C ; valid # 7.0 DUPLOYAN AFFIX LEFT HORIZONTAL SECANT..DUPLOYAN AFFIX ATTACHED TANGENT HOOK
|
||||
@@ -7659,7 +7701,9 @@ FFFE..FFFF ; disallowed # 1.1 <noncharacter-FFFE
|
||||
1F249..1F24F ; disallowed # NA <reserved-1F249>..<reserved-1F24F>
|
||||
1F250 ; mapped ; 5F97 # 6.0 CIRCLED IDEOGRAPH ADVANTAGE
|
||||
1F251 ; mapped ; 53EF # 6.0 CIRCLED IDEOGRAPH ACCEPT
|
||||
1F252..1F2FF ; disallowed # NA <reserved-1F252>..<reserved-1F2FF>
|
||||
1F252..1F25F ; disallowed # NA <reserved-1F252>..<reserved-1F25F>
|
||||
1F260..1F265 ; valid ; ; NV8 # 10.0 ROUNDED SYMBOL FOR FU..ROUNDED SYMBOL FOR CAI
|
||||
1F266..1F2FF ; disallowed # NA <reserved-1F266>..<reserved-1F2FF>
|
||||
1F300..1F320 ; valid ; ; NV8 # 6.0 CYCLONE..SHOOTING STAR
|
||||
1F321..1F32C ; valid ; ; NV8 # 7.0 THERMOMETER..WIND BLOWING FACE
|
||||
1F32D..1F32F ; valid ; ; NV8 # 8.0 HOT DOG..BURRITO
|
||||
@@ -7730,12 +7774,14 @@ FFFE..FFFF ; disallowed # 1.1 <noncharacter-FFFE
|
||||
1F6C6..1F6CF ; valid ; ; NV8 # 7.0 TRIANGLE WITH ROUNDED CORNERS..BED
|
||||
1F6D0 ; valid ; ; NV8 # 8.0 PLACE OF WORSHIP
|
||||
1F6D1..1F6D2 ; valid ; ; NV8 # 9.0 OCTAGONAL SIGN..SHOPPING TROLLEY
|
||||
1F6D3..1F6DF ; disallowed # NA <reserved-1F6D3>..<reserved-1F6DF>
|
||||
1F6D3..1F6D4 ; valid ; ; NV8 # 10.0 STUPA..PAGODA
|
||||
1F6D5..1F6DF ; disallowed # NA <reserved-1F6D5>..<reserved-1F6DF>
|
||||
1F6E0..1F6EC ; valid ; ; NV8 # 7.0 HAMMER AND WRENCH..AIRPLANE ARRIVING
|
||||
1F6ED..1F6EF ; disallowed # NA <reserved-1F6ED>..<reserved-1F6EF>
|
||||
1F6F0..1F6F3 ; valid ; ; NV8 # 7.0 SATELLITE..PASSENGER SHIP
|
||||
1F6F4..1F6F6 ; valid ; ; NV8 # 9.0 SCOOTER..CANOE
|
||||
1F6F7..1F6FF ; disallowed # NA <reserved-1F6F7>..<reserved-1F6FF>
|
||||
1F6F7..1F6F8 ; valid ; ; NV8 # 10.0 SLED..FLYING SAUCER
|
||||
1F6F9..1F6FF ; disallowed # NA <reserved-1F6F9>..<reserved-1F6FF>
|
||||
1F700..1F773 ; valid ; ; NV8 # 6.0 ALCHEMICAL SYMBOL FOR QUINTESSENCE..ALCHEMICAL SYMBOL FOR HALF OUNCE
|
||||
1F774..1F77F ; disallowed # NA <reserved-1F774>..<reserved-1F77F>
|
||||
1F780..1F7D4 ; valid ; ; NV8 # 7.0 BLACK LEFT-POINTING ISOSCELES RIGHT TRIANGLE..HEAVY TWELVE POINTED PINWHEEL STAR
|
||||
@@ -7749,25 +7795,32 @@ FFFE..FFFF ; disallowed # 1.1 <noncharacter-FFFE
|
||||
1F860..1F887 ; valid ; ; NV8 # 7.0 WIDE-HEADED LEFTWARDS LIGHT BARB ARROW..WIDE-HEADED SOUTH WEST VERY HEAVY BARB ARROW
|
||||
1F888..1F88F ; disallowed # NA <reserved-1F888>..<reserved-1F88F>
|
||||
1F890..1F8AD ; valid ; ; NV8 # 7.0 LEFTWARDS TRIANGLE ARROWHEAD..WHITE ARROW SHAFT WIDTH TWO THIRDS
|
||||
1F8AE..1F90F ; disallowed # NA <reserved-1F8AE>..<reserved-1F90F>
|
||||
1F8AE..1F8FF ; disallowed # NA <reserved-1F8AE>..<reserved-1F8FF>
|
||||
1F900..1F90B ; valid ; ; NV8 # 10.0 CIRCLED CROSS FORMEE WITH FOUR DOTS..DOWNWARD FACING NOTCHED HOOK WITH DOT
|
||||
1F90C..1F90F ; disallowed # NA <reserved-1F90C>..<reserved-1F90F>
|
||||
1F910..1F918 ; valid ; ; NV8 # 8.0 ZIPPER-MOUTH FACE..SIGN OF THE HORNS
|
||||
1F919..1F91E ; valid ; ; NV8 # 9.0 CALL ME HAND..HAND WITH INDEX AND MIDDLE FINGERS CROSSED
|
||||
1F91F ; disallowed # NA <reserved-1F91F>
|
||||
1F91F ; valid ; ; NV8 # 10.0 I LOVE YOU HAND SIGN
|
||||
1F920..1F927 ; valid ; ; NV8 # 9.0 FACE WITH COWBOY HAT..SNEEZING FACE
|
||||
1F928..1F92F ; disallowed # NA <reserved-1F928>..<reserved-1F92F>
|
||||
1F928..1F92F ; valid ; ; NV8 # 10.0 FACE WITH ONE EYEBROW RAISED..SHOCKED FACE WITH EXPLODING HEAD
|
||||
1F930 ; valid ; ; NV8 # 9.0 PREGNANT WOMAN
|
||||
1F931..1F932 ; disallowed # NA <reserved-1F931>..<reserved-1F932>
|
||||
1F931..1F932 ; valid ; ; NV8 # 10.0 BREAST-FEEDING..PALMS UP TOGETHER
|
||||
1F933..1F93E ; valid ; ; NV8 # 9.0 SELFIE..HANDBALL
|
||||
1F93F ; disallowed # NA <reserved-1F93F>
|
||||
1F940..1F94B ; valid ; ; NV8 # 9.0 WILTED FLOWER..MARTIAL ARTS UNIFORM
|
||||
1F94C..1F94F ; disallowed # NA <reserved-1F94C>..<reserved-1F94F>
|
||||
1F94C ; valid ; ; NV8 # 10.0 CURLING STONE
|
||||
1F94D..1F94F ; disallowed # NA <reserved-1F94D>..<reserved-1F94F>
|
||||
1F950..1F95E ; valid ; ; NV8 # 9.0 CROISSANT..PANCAKES
|
||||
1F95F..1F97F ; disallowed # NA <reserved-1F95F>..<reserved-1F97F>
|
||||
1F95F..1F96B ; valid ; ; NV8 # 10.0 DUMPLING..CANNED FOOD
|
||||
1F96C..1F97F ; disallowed # NA <reserved-1F96C>..<reserved-1F97F>
|
||||
1F980..1F984 ; valid ; ; NV8 # 8.0 CRAB..UNICORN FACE
|
||||
1F985..1F991 ; valid ; ; NV8 # 9.0 EAGLE..SQUID
|
||||
1F992..1F9BF ; disallowed # NA <reserved-1F992>..<reserved-1F9BF>
|
||||
1F992..1F997 ; valid ; ; NV8 # 10.0 GIRAFFE FACE..CRICKET
|
||||
1F998..1F9BF ; disallowed # NA <reserved-1F998>..<reserved-1F9BF>
|
||||
1F9C0 ; valid ; ; NV8 # 8.0 CHEESE WEDGE
|
||||
1F9C1..1FFFD ; disallowed # NA <reserved-1F9C1>..<reserved-1FFFD>
|
||||
1F9C1..1F9CF ; disallowed # NA <reserved-1F9C1>..<reserved-1F9CF>
|
||||
1F9D0..1F9E6 ; valid ; ; NV8 # 10.0 FACE WITH MONOCLE..SOCKS
|
||||
1F9E7..1FFFD ; disallowed # NA <reserved-1F9E7>..<reserved-1FFFD>
|
||||
1FFFE..1FFFF ; disallowed # 2.0 <noncharacter-1FFFE>..<noncharacter-1FFFF>
|
||||
20000..2A6D6 ; valid # 3.1 CJK UNIFIED IDEOGRAPH-20000..CJK UNIFIED IDEOGRAPH-2A6D6
|
||||
2A6D7..2A6FF ; disallowed # NA <reserved-2A6D7>..<reserved-2A6FF>
|
||||
@@ -7776,7 +7829,9 @@ FFFE..FFFF ; disallowed # 1.1 <noncharacter-FFFE
|
||||
2B740..2B81D ; valid # 6.0 CJK UNIFIED IDEOGRAPH-2B740..CJK UNIFIED IDEOGRAPH-2B81D
|
||||
2B81E..2B81F ; disallowed # NA <reserved-2B81E>..<reserved-2B81F>
|
||||
2B820..2CEA1 ; valid # 8.0 CJK UNIFIED IDEOGRAPH-2B820..CJK UNIFIED IDEOGRAPH-2CEA1
|
||||
2CEA2..2F7FF ; disallowed # NA <reserved-2CEA2>..<reserved-2F7FF>
|
||||
2CEA2..2CEAF ; disallowed # NA <reserved-2CEA2>..<reserved-2CEAF>
|
||||
2CEB0..2EBE0 ; valid # 10.0 CJK UNIFIED IDEOGRAPH-2CEB0..CJK UNIFIED IDEOGRAPH-2EBE0
|
||||
2EBE1..2F7FF ; disallowed # NA <reserved-2EBE1>..<reserved-2F7FF>
|
||||
2F800 ; mapped ; 4E3D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F800
|
||||
2F801 ; mapped ; 4E38 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F801
|
||||
2F802 ; mapped ; 4E41 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F802
|
||||
|
||||
@@ -51,7 +51,15 @@ def strtab_slice(s):
|
||||
return c
|
||||
|
||||
def rust_slice(s):
|
||||
return "(StringTableSlice { byte_start: %d, byte_len: %d })" % s
|
||||
start = s[0]
|
||||
length = s[1]
|
||||
start_lo = start & 0xff
|
||||
start_hi = start >> 8
|
||||
assert length <= 255
|
||||
assert start_hi <= 255
|
||||
return "(StringTableSlice { byte_start_lo: %d, byte_start_hi: %d, byte_len: %d })" % (start_lo, start_hi, length)
|
||||
|
||||
ranges = []
|
||||
|
||||
for line in txt:
|
||||
# remove comments
|
||||
@@ -66,12 +74,58 @@ for line in txt:
|
||||
if not last:
|
||||
last = first
|
||||
mapping = fields[1].strip().replace('_', ' ').title().replace(' ', '')
|
||||
unicode_str = None
|
||||
if len(fields) > 2:
|
||||
if fields[2].strip():
|
||||
unicode_str = u''.join(char(c) for c in fields[2].strip().split(' '))
|
||||
mapping += rust_slice(strtab_slice(unicode_str))
|
||||
elif mapping == "Deviation":
|
||||
mapping += rust_slice(strtab_slice(''))
|
||||
unicode_str = u''
|
||||
ranges.append((first, last, mapping, unicode_str))
|
||||
|
||||
def mergeable_key(r):
|
||||
mapping = r[2]
|
||||
# These types have associated data, so we should not merge them.
|
||||
if mapping in ('Mapped', 'Deviation', 'DisallowedStd3Mapped'):
|
||||
return r
|
||||
assert mapping in ('Valid', 'Ignored', 'Disallowed', 'DisallowedStd3Valid')
|
||||
return mapping
|
||||
|
||||
grouped_ranges = itertools.groupby(ranges, key=mergeable_key)
|
||||
|
||||
optimized_ranges = []
|
||||
|
||||
for (k, g) in grouped_ranges:
|
||||
group = list(g)
|
||||
if len(group) == 1:
|
||||
optimized_ranges.append(group[0])
|
||||
continue
|
||||
# Assert that nothing in the group has an associated unicode string.
|
||||
for g in group:
|
||||
if g[3] is not None and len(g[3]) > 2:
|
||||
assert not g[3][2].strip()
|
||||
# Assert that consecutive members of the group don't leave gaps in
|
||||
# the codepoint space.
|
||||
a, b = itertools.tee(group)
|
||||
next(b, None)
|
||||
for (g1, g2) in itertools.izip(a, b):
|
||||
last_char = int(g1[1], 16)
|
||||
next_char = int(g2[0], 16)
|
||||
if last_char + 1 == next_char:
|
||||
continue
|
||||
# There's a gap where surrogates would appear, but we don't have to
|
||||
# worry about that gap, as surrogates never appear in Rust strings.
|
||||
# Assert we're seeing the surrogate case here.
|
||||
assert last_char == 0xd7ff
|
||||
assert next_char == 0xe000
|
||||
first = group[0][0]
|
||||
last = group[-1][1]
|
||||
mapping = group[0][2]
|
||||
unicode_str = group[0][3]
|
||||
optimized_ranges.append((first, last, mapping, unicode_str))
|
||||
|
||||
for (first, last, mapping, unicode_str) in optimized_ranges:
|
||||
if unicode_str is not None:
|
||||
mapping += rust_slice(strtab_slice(unicode_str))
|
||||
print(" Range { from: '%s', to: '%s', mapping: %s }," % (escape_char(char(first)),
|
||||
escape_char(char(last)),
|
||||
mapping))
|
||||
|
||||
206
third_party/rust/idna/src/uts46.rs
vendored
206
third_party/rust/idna/src/uts46.rs
vendored
@@ -12,25 +12,35 @@
|
||||
use self::Mapping::*;
|
||||
use punycode;
|
||||
use std::ascii::AsciiExt;
|
||||
use std::cmp::Ordering::{Equal, Less, Greater};
|
||||
use unicode_bidi::{BidiClass, bidi_class};
|
||||
use unicode_normalization::UnicodeNormalization;
|
||||
use unicode_normalization::char::is_combining_mark;
|
||||
use unicode_bidi::{BidiClass, bidi_class};
|
||||
|
||||
include!("uts46_mapping_table.rs");
|
||||
|
||||
|
||||
pub static PUNYCODE_PREFIX: &'static str = "xn--";
|
||||
|
||||
|
||||
#[derive(Debug)]
|
||||
struct StringTableSlice {
|
||||
byte_start: u16,
|
||||
byte_len: u16,
|
||||
// Store these as separate fields so the structure will have an
|
||||
// alignment of 1 and thus pack better into the Mapping enum, below.
|
||||
byte_start_lo: u8,
|
||||
byte_start_hi: u8,
|
||||
byte_len: u8,
|
||||
}
|
||||
|
||||
fn decode_slice(slice: &StringTableSlice) -> &'static str {
|
||||
let start = slice.byte_start as usize;
|
||||
let lo = slice.byte_start_lo as usize;
|
||||
let hi = slice.byte_start_hi as usize;
|
||||
let start = (hi << 8) | lo;
|
||||
let len = slice.byte_len as usize;
|
||||
&STRING_TABLE[start..(start + len)]
|
||||
}
|
||||
|
||||
#[repr(u16)]
|
||||
#[repr(u8)]
|
||||
#[derive(Debug)]
|
||||
enum Mapping {
|
||||
Valid,
|
||||
@@ -49,20 +59,16 @@ struct Range {
|
||||
}
|
||||
|
||||
fn find_char(codepoint: char) -> &'static Mapping {
|
||||
let mut min = 0;
|
||||
let mut max = TABLE.len() - 1;
|
||||
while max > min {
|
||||
let mid = (min + max) >> 1;
|
||||
if codepoint > TABLE[mid].to {
|
||||
min = mid;
|
||||
} else if codepoint < TABLE[mid].from {
|
||||
max = mid;
|
||||
let r = TABLE.binary_search_by(|ref range| {
|
||||
if codepoint > range.to {
|
||||
Less
|
||||
} else if codepoint < range.from {
|
||||
Greater
|
||||
} else {
|
||||
min = mid;
|
||||
max = mid;
|
||||
Equal
|
||||
}
|
||||
}
|
||||
&TABLE[min].mapping
|
||||
});
|
||||
r.ok().map(|i| &TABLE[i].mapping).unwrap()
|
||||
}
|
||||
|
||||
fn map_char(codepoint: char, flags: Flags, output: &mut String, errors: &mut Vec<Error>) {
|
||||
@@ -97,27 +103,32 @@ fn map_char(codepoint: char, flags: Flags, output: &mut String, errors: &mut Vec
|
||||
}
|
||||
|
||||
// http://tools.ietf.org/html/rfc5893#section-2
|
||||
fn passes_bidi(label: &str, transitional_processing: bool) -> bool {
|
||||
fn passes_bidi(label: &str, is_bidi_domain: bool) -> bool {
|
||||
// Rule 0: Bidi Rules apply to Bidi Domain Names: a name with at least one RTL label. A label
|
||||
// is RTL if it contains at least one character of bidi class R, AL or AN.
|
||||
if !is_bidi_domain {
|
||||
return true;
|
||||
}
|
||||
|
||||
let mut chars = label.chars();
|
||||
let class = match chars.next() {
|
||||
let first_char_class = match chars.next() {
|
||||
Some(c) => bidi_class(c),
|
||||
None => return true, // empty string
|
||||
};
|
||||
|
||||
if class == BidiClass::L
|
||||
|| (class == BidiClass::ON && transitional_processing) // starts with \u200D
|
||||
|| (class == BidiClass::ES && transitional_processing) // hack: 1.35.+33.49
|
||||
|| class == BidiClass::EN // hack: starts with number 0à.\u05D0
|
||||
{ // LTR
|
||||
match first_char_class {
|
||||
// LTR label
|
||||
BidiClass::L => {
|
||||
// Rule 5
|
||||
loop {
|
||||
match chars.next() {
|
||||
Some(c) => {
|
||||
let c = bidi_class(c);
|
||||
if !matches!(c, BidiClass::L | BidiClass::EN |
|
||||
if !matches!(bidi_class(c),
|
||||
BidiClass::L | BidiClass::EN |
|
||||
BidiClass::ES | BidiClass::CS |
|
||||
BidiClass::ET | BidiClass::ON |
|
||||
BidiClass::BN | BidiClass::NSM) {
|
||||
BidiClass::BN | BidiClass::NSM
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
@@ -126,27 +137,29 @@ fn passes_bidi(label: &str, transitional_processing: bool) -> bool {
|
||||
}
|
||||
|
||||
// Rule 6
|
||||
// must end in L or EN followed by 0 or more NSM
|
||||
let mut rev_chars = label.chars().rev();
|
||||
let mut last = rev_chars.next();
|
||||
loop { // must end in L or EN followed by 0 or more NSM
|
||||
match last {
|
||||
let mut last_non_nsm = rev_chars.next();
|
||||
loop {
|
||||
match last_non_nsm {
|
||||
Some(c) if bidi_class(c) == BidiClass::NSM => {
|
||||
last = rev_chars.next();
|
||||
last_non_nsm = rev_chars.next();
|
||||
continue;
|
||||
}
|
||||
_ => { break; },
|
||||
}
|
||||
}
|
||||
match last_non_nsm {
|
||||
Some(c) if bidi_class(c) == BidiClass::L
|
||||
|| bidi_class(c) == BidiClass::EN => {},
|
||||
Some(_) => { return false; },
|
||||
_ => {}
|
||||
}
|
||||
|
||||
// TODO: does not pass for àˇ.\u05D0
|
||||
// match last {
|
||||
// Some(c) if bidi_class(c) == BidiClass::L
|
||||
// || bidi_class(c) == BidiClass::EN => {},
|
||||
// Some(c) => { return false; },
|
||||
// _ => {}
|
||||
// }
|
||||
}
|
||||
|
||||
} else if class == BidiClass::R || class == BidiClass::AL { // RTL
|
||||
// RTL label
|
||||
BidiClass::R | BidiClass::AL => {
|
||||
let mut found_en = false;
|
||||
let mut found_an = false;
|
||||
|
||||
@@ -196,36 +209,71 @@ fn passes_bidi(label: &str, transitional_processing: bool) -> bool {
|
||||
if found_an && found_en {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
// Rule 2: Should start with L or R/AL
|
||||
}
|
||||
|
||||
// Rule 1: Should start with L or R/AL
|
||||
_ => {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/// http://www.unicode.org/reports/tr46/#Validity_Criteria
|
||||
fn validate(label: &str, flags: Flags, errors: &mut Vec<Error>) {
|
||||
if label.nfc().ne(label.chars()) {
|
||||
fn validate(label: &str, is_bidi_domain: bool, flags: Flags, errors: &mut Vec<Error>) {
|
||||
let first_char = label.chars().next();
|
||||
if first_char == None {
|
||||
// Empty string, pass
|
||||
}
|
||||
|
||||
// V1: Must be in NFC form.
|
||||
else if label.nfc().ne(label.chars()) {
|
||||
errors.push(Error::ValidityCriteria);
|
||||
}
|
||||
|
||||
// Can not contain '.' since the input is from .split('.')
|
||||
// Spec says that the label must not contain a HYPHEN-MINUS character in both the
|
||||
// V2: No U+002D HYPHEN-MINUS in both third and fourth positions.
|
||||
//
|
||||
// NOTE: Spec says that the label must not contain a HYPHEN-MINUS character in both the
|
||||
// third and fourth positions. But nobody follows this criteria. See the spec issue below:
|
||||
// https://github.com/whatwg/url/issues/53
|
||||
if label.starts_with("-")
|
||||
|| label.ends_with("-")
|
||||
|| label.chars().next().map_or(false, is_combining_mark)
|
||||
|| label.chars().any(|c| match *find_char(c) {
|
||||
//
|
||||
// TODO: Add *CheckHyphens* flag.
|
||||
|
||||
// V3: neither begin nor end with a U+002D HYPHEN-MINUS
|
||||
else if label.starts_with("-") || label.ends_with("-") {
|
||||
errors.push(Error::ValidityCriteria);
|
||||
}
|
||||
|
||||
// V4: not contain a U+002E FULL STOP
|
||||
//
|
||||
// Here, label can't contain '.' since the input is from .split('.')
|
||||
|
||||
// V5: not begin with a GC=Mark
|
||||
else if is_combining_mark(first_char.unwrap()) {
|
||||
errors.push(Error::ValidityCriteria);
|
||||
}
|
||||
|
||||
// V6: Check against Mapping Table
|
||||
else if label.chars().any(|c| match *find_char(c) {
|
||||
Mapping::Valid => false,
|
||||
Mapping::Deviation(_) => flags.transitional_processing,
|
||||
Mapping::DisallowedStd3Valid => flags.use_std3_ascii_rules,
|
||||
_ => true,
|
||||
})
|
||||
|| !passes_bidi(label, flags.transitional_processing)
|
||||
}) {
|
||||
errors.push(Error::ValidityCriteria);
|
||||
}
|
||||
|
||||
// V7: ContextJ rules
|
||||
//
|
||||
// TODO: Implement rules and add *CheckJoiners* flag.
|
||||
|
||||
// V8: Bidi rules
|
||||
//
|
||||
// TODO: Add *CheckBidi* flag
|
||||
else if !passes_bidi(label, is_bidi_domain)
|
||||
{
|
||||
errors.push(Error::ValidityCriteria)
|
||||
errors.push(Error::ValidityCriteria);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -236,22 +284,51 @@ fn processing(domain: &str, flags: Flags, errors: &mut Vec<Error>) -> String {
|
||||
map_char(c, flags, &mut mapped, errors)
|
||||
}
|
||||
let normalized: String = mapped.nfc().collect();
|
||||
let mut validated = String::new();
|
||||
|
||||
// Find out if it's a Bidi Domain Name
|
||||
//
|
||||
// First, check for literal bidi chars
|
||||
let mut is_bidi_domain = domain.chars().any(|c|
|
||||
matches!(bidi_class(c), BidiClass::R | BidiClass::AL | BidiClass::AN)
|
||||
);
|
||||
if !is_bidi_domain {
|
||||
// Then check for punycode-encoded bidi chars
|
||||
for label in normalized.split('.') {
|
||||
if validated.len() > 0 {
|
||||
if label.starts_with(PUNYCODE_PREFIX) {
|
||||
match punycode::decode_to_string(&label[PUNYCODE_PREFIX.len()..]) {
|
||||
Some(decoded_label) => {
|
||||
if decoded_label.chars().any(|c|
|
||||
matches!(bidi_class(c), BidiClass::R | BidiClass::AL | BidiClass::AN)
|
||||
) {
|
||||
is_bidi_domain = true;
|
||||
}
|
||||
}
|
||||
None => {
|
||||
is_bidi_domain = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut validated = String::new();
|
||||
let mut first = true;
|
||||
for label in normalized.split('.') {
|
||||
if !first {
|
||||
validated.push('.');
|
||||
}
|
||||
if label.starts_with("xn--") {
|
||||
match punycode::decode_to_string(&label["xn--".len()..]) {
|
||||
first = false;
|
||||
if label.starts_with(PUNYCODE_PREFIX) {
|
||||
match punycode::decode_to_string(&label[PUNYCODE_PREFIX.len()..]) {
|
||||
Some(decoded_label) => {
|
||||
let flags = Flags { transitional_processing: false, ..flags };
|
||||
validate(&decoded_label, flags, errors);
|
||||
validate(&decoded_label, is_bidi_domain, flags, errors);
|
||||
validated.push_str(&decoded_label)
|
||||
}
|
||||
None => errors.push(Error::PunycodeError)
|
||||
}
|
||||
} else {
|
||||
validate(label, flags, errors);
|
||||
validate(label, is_bidi_domain, flags, errors);
|
||||
validated.push_str(label)
|
||||
}
|
||||
}
|
||||
@@ -273,6 +350,7 @@ enum Error {
|
||||
DissallowedMappedInStd3,
|
||||
DissallowedCharacter,
|
||||
TooLongForDns,
|
||||
TooShortForDns,
|
||||
}
|
||||
|
||||
/// Errors recorded during UTS #46 processing.
|
||||
@@ -286,16 +364,18 @@ pub struct Errors(Vec<Error>);
|
||||
pub fn to_ascii(domain: &str, flags: Flags) -> Result<String, Errors> {
|
||||
let mut errors = Vec::new();
|
||||
let mut result = String::new();
|
||||
let mut first = true;
|
||||
for label in processing(domain, flags, &mut errors).split('.') {
|
||||
if result.len() > 0 {
|
||||
if !first {
|
||||
result.push('.');
|
||||
}
|
||||
first = false;
|
||||
if label.is_ascii() {
|
||||
result.push_str(label);
|
||||
} else {
|
||||
match punycode::encode_str(label) {
|
||||
Some(x) => {
|
||||
result.push_str("xn--");
|
||||
result.push_str(PUNYCODE_PREFIX);
|
||||
result.push_str(&x);
|
||||
},
|
||||
None => errors.push(Error::PunycodeError)
|
||||
@@ -305,8 +385,10 @@ pub fn to_ascii(domain: &str, flags: Flags) -> Result<String, Errors> {
|
||||
|
||||
if flags.verify_dns_length {
|
||||
let domain = if result.ends_with(".") { &result[..result.len()-1] } else { &*result };
|
||||
if domain.len() < 1 || domain.len() > 253 ||
|
||||
domain.split('.').any(|label| label.len() < 1 || label.len() > 63) {
|
||||
if domain.len() < 1 || domain.split('.').any(|label| label.len() < 1) {
|
||||
errors.push(Error::TooShortForDns)
|
||||
}
|
||||
if domain.len() > 253 || domain.split('.').any(|label| label.len() > 63) {
|
||||
errors.push(Error::TooLongForDns)
|
||||
}
|
||||
}
|
||||
|
||||
12742
third_party/rust/idna/src/uts46_mapping_table.rs
vendored
12742
third_party/rust/idna/src/uts46_mapping_table.rs
vendored
File diff suppressed because it is too large
Load Diff
9966
third_party/rust/idna/tests/IdnaTest.txt
vendored
9966
third_party/rust/idna/tests/IdnaTest.txt
vendored
File diff suppressed because it is too large
Load Diff
6
third_party/rust/idna/tests/tests.rs
vendored
6
third_party/rust/idna/tests/tests.rs
vendored
@@ -10,11 +10,7 @@ fn main() {
|
||||
{
|
||||
let mut add_test = |name, run| {
|
||||
tests.push(test::TestDescAndFn {
|
||||
desc: test::TestDesc {
|
||||
name: test::DynTestName(name),
|
||||
ignore: false,
|
||||
should_panic: test::ShouldPanic::No,
|
||||
},
|
||||
desc: test::TestDesc::new(test::DynTestName(name)),
|
||||
testfn: run,
|
||||
})
|
||||
};
|
||||
|
||||
40
third_party/rust/idna/tests/unit.rs
vendored
Normal file
40
third_party/rust/idna/tests/unit.rs
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
extern crate idna;
|
||||
extern crate unicode_normalization;
|
||||
|
||||
use idna::uts46;
|
||||
use unicode_normalization::char::is_combining_mark;
|
||||
|
||||
|
||||
fn _to_ascii(domain: &str) -> Result<String, uts46::Errors> {
|
||||
uts46::to_ascii(domain, uts46::Flags {
|
||||
transitional_processing: false,
|
||||
use_std3_ascii_rules: true,
|
||||
verify_dns_length: true,
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_v5() {
|
||||
// IdnaTest:784 蔏。𑰺
|
||||
assert!(is_combining_mark('\u{11C3A}'));
|
||||
assert!(_to_ascii("\u{11C3A}").is_err());
|
||||
assert!(_to_ascii("\u{850f}.\u{11C3A}").is_err());
|
||||
assert!(_to_ascii("\u{850f}\u{ff61}\u{11C3A}").is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_v8_bidi_rules() {
|
||||
assert_eq!(_to_ascii("abc").unwrap(), "abc");
|
||||
assert_eq!(_to_ascii("123").unwrap(), "123");
|
||||
assert_eq!(_to_ascii("אבּג").unwrap(), "xn--kdb3bdf");
|
||||
assert_eq!(_to_ascii("ابج").unwrap(), "xn--mgbcm");
|
||||
assert_eq!(_to_ascii("abc.ابج").unwrap(), "abc.xn--mgbcm");
|
||||
assert_eq!(_to_ascii("אבּג.ابج").unwrap(), "xn--kdb3bdf.xn--mgbcm");
|
||||
|
||||
// Bidi domain names cannot start with digits
|
||||
assert!(_to_ascii("0a.\u{05D0}").is_err());
|
||||
assert!(_to_ascii("0à.\u{05D0}").is_err());
|
||||
|
||||
// Bidi chars may be punycode-encoded
|
||||
assert!(_to_ascii("xn--0ca24w").is_err());
|
||||
}
|
||||
1
third_party/rust/percent-encoding/.cargo-checksum.json
vendored
Normal file
1
third_party/rust/percent-encoding/.cargo-checksum.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"files":{".cargo-ok":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","Cargo.toml":"f6b6226839c4da49df564d7fc44d4477964621778b671d3a5d4cf980e5524d66","lib.rs":"d32a0b432c49053214a4aa51fd5e6b62215dea5a001f229a8ba1a17eb6be20f1"},"package":"de154f638187706bde41d9b4738748933d64e6b37bdbffc0b47a97d16a6ae356"}
|
||||
0
third_party/rust/percent-encoding/.cargo-ok
vendored
Normal file
0
third_party/rust/percent-encoding/.cargo-ok
vendored
Normal file
16
third_party/rust/percent-encoding/Cargo.toml
vendored
Normal file
16
third_party/rust/percent-encoding/Cargo.toml
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
[package]
|
||||
name = "percent-encoding"
|
||||
version = "1.0.0"
|
||||
authors = ["The rust-url developers"]
|
||||
description = "Percent encoding and decoding"
|
||||
repository = "https://github.com/servo/rust-url/"
|
||||
license = "MIT/Apache-2.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
test = false
|
||||
path = "lib.rs"
|
||||
|
||||
[dev-dependencies]
|
||||
rustc-test = "0.1"
|
||||
rustc-serialize = "0.3"
|
||||
@@ -6,7 +6,32 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use encoding;
|
||||
//! URLs use special chacters to indicate the parts of the request. For example, a forward slash
|
||||
//! indicates a path. In order for that charcter to exist outside of a path separator, that
|
||||
//! charcter would need to be encoded.
|
||||
//!
|
||||
//! Percent encoding replaces reserved charcters with the `%` escape charcter followed by hexidecimal
|
||||
//! ASCII representaton. For non-ASCII charcters that are percent encoded, a UTF-8 byte sequence
|
||||
//! becomes percent encoded. A simple example can be seen when the space literal is replaced with
|
||||
//! `%20`.
|
||||
//!
|
||||
//! Percent encoding is further complicated by the fact that different parts of an URL have
|
||||
//! different encoding requirements. In order to support the variety of encoding requirements,
|
||||
//! `url::percent_encoding` includes different *encode sets*.
|
||||
//! See [URL Standard](https://url.spec.whatwg.org/#percent-encoded-bytes) for details.
|
||||
//!
|
||||
//! This module provides some `*_ENCODE_SET` constants.
|
||||
//! If a different set is required, it can be created with
|
||||
//! the [`define_encode_set!`](../macro.define_encode_set!.html) macro.
|
||||
//!
|
||||
//! # Examples
|
||||
//!
|
||||
//! ```
|
||||
//! use url::percent_encoding::{utf8_percent_encode, DEFAULT_ENCODE_SET};
|
||||
//!
|
||||
//! assert_eq!(utf8_percent_encode("foo bar?", DEFAULT_ENCODE_SET).to_string(), "foo%20bar%3F");
|
||||
//! ```
|
||||
|
||||
use std::ascii::AsciiExt;
|
||||
use std::borrow::Cow;
|
||||
use std::fmt;
|
||||
@@ -44,8 +69,8 @@ pub trait EncodeSet: Clone {
|
||||
/// =======
|
||||
///
|
||||
/// ```rust
|
||||
/// #[macro_use] extern crate url;
|
||||
/// use url::percent_encoding::{utf8_percent_encode, SIMPLE_ENCODE_SET};
|
||||
/// #[macro_use] extern crate percent_encoding;
|
||||
/// use percent_encoding::{utf8_percent_encode, SIMPLE_ENCODE_SET};
|
||||
/// define_encode_set! {
|
||||
/// /// This encode set is used in the URL parser for query strings.
|
||||
/// pub QUERY_ENCODE_SET = [SIMPLE_ENCODE_SET] | {' ', '"', '#', '<', '>'}
|
||||
@@ -58,11 +83,11 @@ pub trait EncodeSet: Clone {
|
||||
macro_rules! define_encode_set {
|
||||
($(#[$attr: meta])* pub $name: ident = [$base_set: expr] | {$($ch: pat),*}) => {
|
||||
$(#[$attr])*
|
||||
#[derive(Copy, Clone)]
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
#[allow(non_camel_case_types)]
|
||||
pub struct $name;
|
||||
|
||||
impl $crate::percent_encoding::EncodeSet for $name {
|
||||
impl $crate::EncodeSet for $name {
|
||||
#[inline]
|
||||
fn contains(&self, byte: u8) -> bool {
|
||||
match byte as char {
|
||||
@@ -77,7 +102,10 @@ macro_rules! define_encode_set {
|
||||
}
|
||||
|
||||
/// This encode set is used for the path of cannot-be-a-base URLs.
|
||||
#[derive(Copy, Clone)]
|
||||
///
|
||||
/// All ASCII charcters less than hexidecimal 20 and greater than 7E are encoded. This includes
|
||||
/// special charcters such as line feed, carriage return, NULL, etc.
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
#[allow(non_camel_case_types)]
|
||||
pub struct SIMPLE_ENCODE_SET;
|
||||
|
||||
@@ -90,21 +118,39 @@ impl EncodeSet for SIMPLE_ENCODE_SET {
|
||||
|
||||
define_encode_set! {
|
||||
/// This encode set is used in the URL parser for query strings.
|
||||
///
|
||||
/// Aside from special chacters defined in the [`SIMPLE_ENCODE_SET`](struct.SIMPLE_ENCODE_SET.html),
|
||||
/// space, double quote ("), hash (#), and inequality qualifiers (<), (>) are encoded.
|
||||
pub QUERY_ENCODE_SET = [SIMPLE_ENCODE_SET] | {' ', '"', '#', '<', '>'}
|
||||
}
|
||||
|
||||
define_encode_set! {
|
||||
/// This encode set is used for path components.
|
||||
///
|
||||
/// Aside from special chacters defined in the [`SIMPLE_ENCODE_SET`](struct.SIMPLE_ENCODE_SET.html),
|
||||
/// space, double quote ("), hash (#), inequality qualifiers (<), (>), backtick (`),
|
||||
/// question mark (?), and curly brackets ({), (}) are encoded.
|
||||
pub DEFAULT_ENCODE_SET = [QUERY_ENCODE_SET] | {'`', '?', '{', '}'}
|
||||
}
|
||||
|
||||
define_encode_set! {
|
||||
/// This encode set is used for on '/'-separated path segment
|
||||
///
|
||||
/// Aside from special chacters defined in the [`SIMPLE_ENCODE_SET`](struct.SIMPLE_ENCODE_SET.html),
|
||||
/// space, double quote ("), hash (#), inequality qualifiers (<), (>), backtick (`),
|
||||
/// question mark (?), and curly brackets ({), (}), percent sign (%), forward slash (/) are
|
||||
/// encoded.
|
||||
pub PATH_SEGMENT_ENCODE_SET = [DEFAULT_ENCODE_SET] | {'%', '/'}
|
||||
}
|
||||
|
||||
define_encode_set! {
|
||||
/// This encode set is used for username and password.
|
||||
///
|
||||
/// Aside from special chacters defined in the [`SIMPLE_ENCODE_SET`](struct.SIMPLE_ENCODE_SET.html),
|
||||
/// space, double quote ("), hash (#), inequality qualifiers (<), (>), backtick (`),
|
||||
/// question mark (?), and curly brackets ({), (}), forward slash (/), colon (:), semi-colon (;),
|
||||
/// equality (=), at (@), backslash (\\), square brackets ([), (]), caret (\^), and pipe (|) are
|
||||
/// encoded.
|
||||
pub USERINFO_ENCODE_SET = [DEFAULT_ENCODE_SET] | {
|
||||
'/', ':', ';', '=', '@', '[', '\\', ']', '^', '|'
|
||||
}
|
||||
@@ -113,6 +159,15 @@ define_encode_set! {
|
||||
/// Return the percent-encoding of the given bytes.
|
||||
///
|
||||
/// This is unconditional, unlike `percent_encode()` which uses an encode set.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use url::percent_encoding::percent_encode_byte;
|
||||
///
|
||||
/// assert_eq!("foo bar".bytes().map(percent_encode_byte).collect::<String>(),
|
||||
/// "%66%6F%6F%20%62%61%72");
|
||||
/// ```
|
||||
pub fn percent_encode_byte(byte: u8) -> &'static str {
|
||||
let index = usize::from(byte) * 3;
|
||||
&"\
|
||||
@@ -146,6 +201,14 @@ pub fn percent_encode_byte(byte: u8) -> &'static str {
|
||||
/// that also implements `Display` and `Into<Cow<str>>`.
|
||||
/// The latter returns `Cow::Borrowed` when none of the bytes in `input`
|
||||
/// are in the given encode set.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use url::percent_encoding::{percent_encode, DEFAULT_ENCODE_SET};
|
||||
///
|
||||
/// assert_eq!(percent_encode(b"foo bar?", DEFAULT_ENCODE_SET).to_string(), "foo%20bar%3F");
|
||||
/// ```
|
||||
#[inline]
|
||||
pub fn percent_encode<E: EncodeSet>(input: &[u8], encode_set: E) -> PercentEncode<E> {
|
||||
PercentEncode {
|
||||
@@ -157,13 +220,21 @@ pub fn percent_encode<E: EncodeSet>(input: &[u8], encode_set: E) -> PercentEncod
|
||||
/// Percent-encode the UTF-8 encoding of the given string.
|
||||
///
|
||||
/// See `percent_encode()` for how to use the return value.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use url::percent_encoding::{utf8_percent_encode, DEFAULT_ENCODE_SET};
|
||||
///
|
||||
/// assert_eq!(utf8_percent_encode("foo bar?", DEFAULT_ENCODE_SET).to_string(), "foo%20bar%3F");
|
||||
/// ```
|
||||
#[inline]
|
||||
pub fn utf8_percent_encode<E: EncodeSet>(input: &str, encode_set: E) -> PercentEncode<E> {
|
||||
percent_encode(input.as_bytes(), encode_set)
|
||||
}
|
||||
|
||||
/// The return type of `percent_encode()` and `utf8_percent_encode()`.
|
||||
#[derive(Clone)]
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct PercentEncode<'a, E: EncodeSet> {
|
||||
bytes: &'a [u8],
|
||||
encode_set: E,
|
||||
@@ -241,6 +312,14 @@ impl<'a, E: EncodeSet> From<PercentEncode<'a, E>> for Cow<'a, str> {
|
||||
/// that also implements `Into<Cow<u8>>`
|
||||
/// (which returns `Cow::Borrowed` when `input` contains no percent-encoded sequence)
|
||||
/// and has `decode_utf8()` and `decode_utf8_lossy()` methods.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use url::percent_encoding::percent_decode;
|
||||
///
|
||||
/// assert_eq!(percent_decode(b"foo%20bar%3F").decode_utf8().unwrap(), "foo bar?");
|
||||
/// ```
|
||||
#[inline]
|
||||
pub fn percent_decode(input: &[u8]) -> PercentDecode {
|
||||
PercentDecode {
|
||||
@@ -249,7 +328,7 @@ pub fn percent_decode(input: &[u8]) -> PercentDecode {
|
||||
}
|
||||
|
||||
/// The return type of `percent_decode()`.
|
||||
#[derive(Clone)]
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct PercentDecode<'a> {
|
||||
bytes: slice::Iter<'a, u8>,
|
||||
}
|
||||
@@ -339,6 +418,25 @@ impl<'a> PercentDecode<'a> {
|
||||
/// Invalid UTF-8 percent-encoded byte sequences will be replaced <20> U+FFFD,
|
||||
/// the replacement character.
|
||||
pub fn decode_utf8_lossy(self) -> Cow<'a, str> {
|
||||
encoding::decode_utf8_lossy(self.clone().into())
|
||||
decode_utf8_lossy(self.clone().into())
|
||||
}
|
||||
}
|
||||
|
||||
fn decode_utf8_lossy(input: Cow<[u8]>) -> Cow<str> {
|
||||
match input {
|
||||
Cow::Borrowed(bytes) => String::from_utf8_lossy(bytes),
|
||||
Cow::Owned(bytes) => {
|
||||
let raw_utf8: *const [u8];
|
||||
match String::from_utf8_lossy(&bytes) {
|
||||
Cow::Borrowed(utf8) => raw_utf8 = utf8.as_bytes(),
|
||||
Cow::Owned(s) => return s.into(),
|
||||
}
|
||||
// from_utf8_lossy returned a borrow of `bytes` unchanged.
|
||||
debug_assert!(raw_utf8 == &*bytes as *const [u8]);
|
||||
// Reuse the existing `Vec` allocation.
|
||||
unsafe { String::from_utf8_unchecked(bytes) }.into()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"files":{".cargo-ok":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",".travis.yml":"493704d6a0d0f27ad2ad6e950f8bce5f42d9ec4081daeb3c5a48066f1030f467","COPYRIGHT":"23860c2a7b5d96b21569afedf033469bab9fe14a1b24a35068b8641c578ce24d","Cargo.toml":"1898b4a4ea43acc71f3700c57d388b800c47c6f36b34d5baaa9df5cb536fdcec","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"7b63ecd5f1902af1b63729947373683c32745c16a10e8e6292e2e2dcd7e90ae0","README.md":"4f0f6696be822c6e05c38ada717f1d99790b18f46d88ba4c029e80be93e735d8","scripts/unicode.py":"64a1b919ab0e251fdb1db8b9c5363d84227fca33ac8375268bb88b74c4462f8f","scripts/unicode_gen_normtests.py":"da891d433fa58068747a1cd121774435b7d486394ce5c85c8079b227d20ea507","src/decompose.rs":"04818a6b0271412ec58508e44535b18c58c80384a5836fe5b2c24e489a5ab4cc","src/lib.rs":"3a2b271fa833f159d899875a88172b053a7a57c0d9786d5c6ac96fd82c6d7efb","src/normalize.rs":"c49af1939734065cd089c75c3612f9fec20063fd63ccb97416d8e894f0910b70","src/recompose.rs":"96b8aea91e09f1fa439467378f2d1fa2aa4c81c86e597d3d36a2a35d7750a0d6","src/tables.rs":"449e09a608ed21b3026e4b60fc728244749bc5347c6c4bc72cd34e6c45357555","src/test.rs":"e47de49aeef9231f5ff177eeb9946618577fc67ed798e889b99710c877e92e1b","src/testdata.rs":"8def8bcd8a24c700881c57eab78c6fdf19295969f4783eb4a138f25616519d75"},"package":"e28fa37426fceeb5cf8f41ee273faa7c82c47dc8fba5853402841e665fcd86ff"}
|
||||
{"files":{".cargo-ok":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",".travis.yml":"493704d6a0d0f27ad2ad6e950f8bce5f42d9ec4081daeb3c5a48066f1030f467","COPYRIGHT":"23860c2a7b5d96b21569afedf033469bab9fe14a1b24a35068b8641c578ce24d","Cargo.toml":"b262a0a92fca7f012aefc1d72eeb8923d93711f0b3a916315a0dc41584ec82fb","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"7b63ecd5f1902af1b63729947373683c32745c16a10e8e6292e2e2dcd7e90ae0","README.md":"4f0f6696be822c6e05c38ada717f1d99790b18f46d88ba4c029e80be93e735d8","scripts/unicode.py":"faff0551e945cfb13b345f21256a53c96f85e0b0d81df6e165b8b62aa8eaa8e9","scripts/unicode_gen_normtests.py":"da891d433fa58068747a1cd121774435b7d486394ce5c85c8079b227d20ea507","src/decompose.rs":"19399cb186245a973a5235118cd3a19e1834926cff4709b0d8a9cc9eea594be3","src/lib.rs":"3a2b271fa833f159d899875a88172b053a7a57c0d9786d5c6ac96fd82c6d7efb","src/normalize.rs":"06580af2b630c17da50e36aaafb9f0e3a728d5ee1de45d6ac1f3043ca723e670","src/recompose.rs":"936bf16efe318f06040bd3a8d2085a4c2e68a03c91d98b7e349f090f88752f9f","src/tables.rs":"566c4b764fa9d21abc8668681821c0bcbb3c54b1956795dc58be506f5540ced7","src/test.rs":"83a05c7dd030069cc1baff70c9933ef3ee65b9aeda4ca32cbbcc1d4c1a33979f","src/testdata.rs":"8def8bcd8a24c700881c57eab78c6fdf19295969f4783eb4a138f25616519d75"},"package":"51ccda9ef9efa3f7ef5d91e8f9b83bbe6955f9bf86aec89d5cce2c874625920f"}
|
||||
@@ -1,21 +1,24 @@
|
||||
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
|
||||
#
|
||||
# When uploading crates to the registry Cargo will automatically
|
||||
# "normalize" Cargo.toml files for maximal compatibility
|
||||
# with all versions of Cargo and also rewrite `path` dependencies
|
||||
# to registry (e.g. crates.io) dependencies
|
||||
#
|
||||
# If you believe there's an error in this file please file an
|
||||
# issue against the rust-lang/cargo repository. If you're
|
||||
# editing this file be aware that the upstream Cargo.toml
|
||||
# will likely look very different (and much more reasonable)
|
||||
|
||||
[package]
|
||||
|
||||
name = "unicode-normalization"
|
||||
version = "0.1.4"
|
||||
version = "0.1.5"
|
||||
authors = ["kwantam <kwantam@gmail.com>"]
|
||||
|
||||
homepage = "https://github.com/unicode-rs/unicode-normalization"
|
||||
repository = "https://github.com/unicode-rs/unicode-normalization"
|
||||
documentation = "https://unicode-rs.github.io/unicode-normalization"
|
||||
|
||||
license = "MIT/Apache-2.0"
|
||||
keywords = ["text", "unicode", "normalization", "decomposition", "recomposition"]
|
||||
readme = "README.md"
|
||||
description = """
|
||||
This crate provides functions for normalization of
|
||||
Unicode strings, including Canonical and Compatible
|
||||
Decomposition and Recomposition, as described in
|
||||
Unicode Standard Annex #15.
|
||||
"""
|
||||
|
||||
exclude = ["target/*", "Cargo.lock", "scripts/tmp", "*.txt"]
|
||||
description = "This crate provides functions for normalization of\nUnicode strings, including Canonical and Compatible\nDecomposition and Recomposition, as described in\nUnicode Standard Annex #15.\n"
|
||||
homepage = "https://github.com/unicode-rs/unicode-normalization"
|
||||
documentation = "https://unicode-rs.github.io/unicode-normalization"
|
||||
readme = "README.md"
|
||||
keywords = ["text", "unicode", "normalization", "decomposition", "recomposition"]
|
||||
license = "MIT/Apache-2.0"
|
||||
repository = "https://github.com/unicode-rs/unicode-normalization"
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
# Since this should not require frequent updates, we just store this
|
||||
# out-of-line and check the unicode.rs file into git.
|
||||
|
||||
import fileinput, re, os, sys
|
||||
import fileinput, re, os, sys, collections
|
||||
|
||||
preamble = '''// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
@@ -160,19 +160,9 @@ def to_combines(combs):
|
||||
return combs_out
|
||||
|
||||
def format_table_content(f, content, indent):
|
||||
line = " "*indent
|
||||
first = True
|
||||
for chunk in content.split(","):
|
||||
if len(line) + len(chunk) < 98:
|
||||
if first:
|
||||
line += chunk
|
||||
else:
|
||||
line += ", " + chunk
|
||||
first = False
|
||||
else:
|
||||
f.write(line + ",\n")
|
||||
line = " "*indent + chunk
|
||||
f.write(line)
|
||||
indent = " "*indent
|
||||
for c in content:
|
||||
f.write("%s%s,\n" % (indent, c))
|
||||
|
||||
def load_properties(f, interestingprops):
|
||||
fetch(f)
|
||||
@@ -220,14 +210,44 @@ def emit_table(f, name, t_data, t_type = "&'static [(char, char)]", is_pub=True,
|
||||
if is_pub:
|
||||
pub_string = "pub "
|
||||
f.write(" %sconst %s: %s = &[\n" % (pub_string, name, t_type))
|
||||
data = ""
|
||||
first = True
|
||||
for dat in t_data:
|
||||
if not first:
|
||||
data += ","
|
||||
first = False
|
||||
data += pfun(dat)
|
||||
format_table_content(f, data, 8)
|
||||
format_table_content(f, [pfun(d) for d in t_data], 8)
|
||||
f.write("\n ];\n\n")
|
||||
|
||||
def emit_strtab_table(f, name, keys, vfun, is_pub=True,
|
||||
tab_entry_type='char', slice_element_sfun=escape_char):
|
||||
pub_string = ""
|
||||
if is_pub:
|
||||
pub_string = "pub "
|
||||
f.write(" %s const %s: &'static [(char, Slice)] = &[\n"
|
||||
% (pub_string, name))
|
||||
|
||||
strtab = collections.OrderedDict()
|
||||
strtab_offset = 0
|
||||
|
||||
# TODO: a more sophisticated algorithm here would not only check for the
|
||||
# existence of v in the strtab, but also v in contiguous substrings of
|
||||
# strtab, if that's possible.
|
||||
for k in keys:
|
||||
v = tuple(vfun(k))
|
||||
if v in strtab:
|
||||
item_slice = strtab[v]
|
||||
else:
|
||||
value_len = len(v)
|
||||
item_slice = (strtab_offset, value_len)
|
||||
strtab[v] = item_slice
|
||||
strtab_offset += value_len
|
||||
|
||||
f.write("%s(%s, Slice { offset: %d, length: %d }),\n"
|
||||
% (" "*8, escape_char(k), item_slice[0], item_slice[1]))
|
||||
|
||||
f.write("\n ];\n\n")
|
||||
|
||||
f.write(" %s const %s_STRTAB: &'static [%s] = &[\n"
|
||||
% (pub_string, name, tab_entry_type))
|
||||
|
||||
for (v, _) in strtab.iteritems():
|
||||
f.write("%s%s,\n" % (" "*8, ', '.join(slice_element_sfun(c) for c in v)))
|
||||
|
||||
f.write("\n ];\n\n")
|
||||
|
||||
def emit_norm_module(f, canon, compat, combine, norm_props, general_category_mark):
|
||||
@@ -251,43 +271,38 @@ def emit_norm_module(f, canon, compat, combine, norm_props, general_category_mar
|
||||
canon_comp_keys.sort()
|
||||
|
||||
f.write("pub mod normalization {\n")
|
||||
f.write("""
|
||||
pub struct Slice {
|
||||
pub offset: u16,
|
||||
pub length: u16,
|
||||
}
|
||||
""")
|
||||
|
||||
def mkdata_fun(table):
|
||||
def f(char):
|
||||
data = "(%s,&[" % escape_char(char)
|
||||
first = True
|
||||
for d in table[char]:
|
||||
if not first:
|
||||
data += ","
|
||||
first = False
|
||||
data += escape_char(d)
|
||||
data += "])"
|
||||
return data
|
||||
return table[char]
|
||||
return f
|
||||
|
||||
# TODO: should the strtab of these two tables be of type &'static str, for
|
||||
# smaller data?
|
||||
f.write(" // Canonical decompositions\n")
|
||||
emit_table(f, "canonical_table", canon_keys, "&'static [(char, &'static [char])]",
|
||||
pfun=mkdata_fun(canon))
|
||||
emit_strtab_table(f, "canonical_table", canon_keys,
|
||||
vfun=mkdata_fun(canon))
|
||||
|
||||
f.write(" // Compatibility decompositions\n")
|
||||
emit_table(f, "compatibility_table", compat_keys, "&'static [(char, &'static [char])]",
|
||||
pfun=mkdata_fun(compat))
|
||||
emit_strtab_table(f, "compatibility_table", compat_keys,
|
||||
vfun=mkdata_fun(compat))
|
||||
|
||||
def comp_pfun(char):
|
||||
data = "(%s,&[" % escape_char(char)
|
||||
canon_comp[char].sort(lambda x, y: x[0] - y[0])
|
||||
first = True
|
||||
for pair in canon_comp[char]:
|
||||
if not first:
|
||||
data += ","
|
||||
first = False
|
||||
data += "(%s,%s)" % (escape_char(pair[0]), escape_char(pair[1]))
|
||||
data += "])"
|
||||
return data
|
||||
def comp_vfun(char):
|
||||
return sorted(canon_comp[char], lambda x, y: x[0] - y[0])
|
||||
|
||||
f.write(" // Canonical compositions\n")
|
||||
emit_table(f, "composition_table", canon_comp_keys,
|
||||
"&'static [(char, &'static [(char, char)])]", pfun=comp_pfun)
|
||||
# "&'static [(char, &'static [(char, char)])]", pfun=comp_pfun)
|
||||
emit_strtab_table(f, "composition_table", canon_comp_keys,
|
||||
vfun=comp_vfun,
|
||||
tab_entry_type="(char, char)",
|
||||
slice_element_sfun=lambda pair: "(%s,%s)" % (escape_char(pair[0]),
|
||||
escape_char(pair[1])))
|
||||
|
||||
f.write("""
|
||||
fn bsearch_range_value_table(c: char, r: &'static [(char, char, u8)]) -> u8 {
|
||||
@@ -335,7 +350,7 @@ def emit_norm_module(f, canon, compat, combine, norm_props, general_category_mar
|
||||
|
||||
""")
|
||||
|
||||
emit_table(f, "general_category_mark", combine, "&'static [(char, char)]", is_pub=False,
|
||||
emit_table(f, "general_category_mark", general_category_mark, "&'static [(char, char)]", is_pub=False,
|
||||
pfun=lambda x: "(%s,%s)" % (escape_char(x[0]), escape_char(x[1])))
|
||||
|
||||
f.write("""
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use std::fmt::{self, Write};
|
||||
|
||||
// Helper functions used for Unicode normalization
|
||||
fn canonical_sort(comb: &mut [(char, u8)]) {
|
||||
@@ -133,3 +134,12 @@ impl<I: Iterator<Item=char>> Iterator for Decompositions<I> {
|
||||
(lower, None)
|
||||
}
|
||||
}
|
||||
|
||||
impl<I: Iterator<Item=char> + Clone> fmt::Display for Decompositions<I> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
for c in self.clone() {
|
||||
f.write_char(c)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,17 +12,22 @@
|
||||
|
||||
use std::cmp::Ordering::{Equal, Less, Greater};
|
||||
use std::ops::FnMut;
|
||||
use tables::normalization::{canonical_table, compatibility_table, composition_table};
|
||||
use tables::normalization::{canonical_table, canonical_table_STRTAB};
|
||||
use tables::normalization::{compatibility_table, compatibility_table_STRTAB};
|
||||
use tables::normalization::{composition_table, composition_table_STRTAB};
|
||||
use tables::normalization::Slice;
|
||||
|
||||
fn bsearch_table<T>(c: char, r: &'static [(char, &'static [T])]) -> Option<&'static [T]> {
|
||||
fn bsearch_table<T>(c: char, r: &'static [(char, Slice)], strtab: &'static [T]) -> Option<&'static [T]> {
|
||||
match r.binary_search_by(|&(val, _)| {
|
||||
if c == val { Equal }
|
||||
else if val < c { Less }
|
||||
else { Greater }
|
||||
}) {
|
||||
Ok(idx) => {
|
||||
let (_, result) = r[idx];
|
||||
Some(result)
|
||||
let ref slice = r[idx].1;
|
||||
let offset = slice.offset as usize;
|
||||
let length = slice.length as usize;
|
||||
Some(&strtab[offset..(offset + length)])
|
||||
}
|
||||
Err(_) => None
|
||||
}
|
||||
@@ -50,7 +55,7 @@ fn d<F>(c: char, i: &mut F, k: bool) where F: FnMut(char) {
|
||||
}
|
||||
|
||||
// First check the canonical decompositions
|
||||
match bsearch_table(c, canonical_table) {
|
||||
match bsearch_table(c, canonical_table, canonical_table_STRTAB) {
|
||||
Some(canon) => {
|
||||
for x in canon {
|
||||
d(*x, i, k);
|
||||
@@ -64,7 +69,7 @@ fn d<F>(c: char, i: &mut F, k: bool) where F: FnMut(char) {
|
||||
if !k { (*i)(c); return; }
|
||||
|
||||
// Then check the compatibility decompositions
|
||||
match bsearch_table(c, compatibility_table) {
|
||||
match bsearch_table(c, compatibility_table, compatibility_table_STRTAB) {
|
||||
Some(compat) => {
|
||||
for x in compat {
|
||||
d(*x, i, k);
|
||||
@@ -83,7 +88,7 @@ fn d<F>(c: char, i: &mut F, k: bool) where F: FnMut(char) {
|
||||
/// for more information.
|
||||
pub fn compose(a: char, b: char) -> Option<char> {
|
||||
compose_hangul(a, b).or_else(|| {
|
||||
match bsearch_table(a, composition_table) {
|
||||
match bsearch_table(a, composition_table, composition_table_STRTAB) {
|
||||
None => None,
|
||||
Some(candidates) => {
|
||||
match candidates.binary_search_by(|&(val, _)| {
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
// except according to those terms.
|
||||
|
||||
use std::collections::VecDeque;
|
||||
use std::fmt::{self, Write};
|
||||
use decompose::Decompositions;
|
||||
|
||||
#[derive(Clone)]
|
||||
@@ -135,3 +136,12 @@ impl<I: Iterator<Item=char>> Iterator for Recompositions<I> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<I: Iterator<Item=char> + Clone> fmt::Display for Recompositions<I> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
for c in self.clone() {
|
||||
f.write_char(c)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
13624
third_party/rust/unicode-normalization/src/tables.rs
vendored
13624
third_party/rust/unicode-normalization/src/tables.rs
vendored
File diff suppressed because it is too large
Load Diff
@@ -8,14 +8,19 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use UnicodeNormalization;
|
||||
|
||||
use std::char;
|
||||
use super::UnicodeNormalization;
|
||||
use super::char::is_combining_mark;
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_nfd() {
|
||||
macro_rules! t {
|
||||
($input: expr, $expected: expr) => {
|
||||
assert_eq!($input.nfd().collect::<String>(), $expected);
|
||||
// A dummy iterator that is not std::str::Chars directly:
|
||||
assert_eq!($input.nfd().to_string(), $expected);
|
||||
// A dummy iterator that is not std::str::Chars directly;
|
||||
// note that `id_func` is used to ensure `Clone` implementation
|
||||
assert_eq!($input.chars().map(|c| c).nfd().collect::<String>(), $expected);
|
||||
}
|
||||
}
|
||||
@@ -35,7 +40,7 @@ fn test_nfd() {
|
||||
fn test_nfkd() {
|
||||
macro_rules! t {
|
||||
($input: expr, $expected: expr) => {
|
||||
assert_eq!($input.nfkd().collect::<String>(), $expected);
|
||||
assert_eq!($input.nfkd().to_string(), $expected);
|
||||
}
|
||||
}
|
||||
t!("abc", "abc");
|
||||
@@ -54,7 +59,7 @@ fn test_nfkd() {
|
||||
fn test_nfc() {
|
||||
macro_rules! t {
|
||||
($input: expr, $expected: expr) => {
|
||||
assert_eq!($input.nfc().collect::<String>(), $expected);
|
||||
assert_eq!($input.nfc().to_string(), $expected);
|
||||
}
|
||||
}
|
||||
t!("abc", "abc");
|
||||
@@ -74,7 +79,7 @@ fn test_nfc() {
|
||||
fn test_nfkc() {
|
||||
macro_rules! t {
|
||||
($input: expr, $expected: expr) => {
|
||||
assert_eq!($input.nfkc().collect::<String>(), $expected);
|
||||
assert_eq!($input.nfkc().to_string(), $expected);
|
||||
}
|
||||
}
|
||||
t!("abc", "abc");
|
||||
@@ -153,3 +158,24 @@ fn test_official() {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_is_combining_mark_ascii() {
|
||||
for cp in 0..0x7f {
|
||||
assert!(!is_combining_mark(char::from_u32(cp).unwrap()));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_combining_mark_misc() {
|
||||
// https://github.com/unicode-rs/unicode-normalization/issues/16
|
||||
// U+11C3A BHAIKSUKI VOWEL SIGN O
|
||||
// Category: Mark, Nonspacing [Mn]
|
||||
assert!(is_combining_mark('\u{11C3A}'));
|
||||
|
||||
// U+11C3F BHAIKSUKI SIGN VIRAMA
|
||||
// Category: Mark, Nonspacing [Mn]
|
||||
assert!(is_combining_mark('\u{11C3F}'));
|
||||
}
|
||||
|
||||
2
third_party/rust/url/.cargo-checksum.json
vendored
2
third_party/rust/url/.cargo-checksum.json
vendored
@@ -1 +1 @@
|
||||
{"files":{".cargo-ok":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",".travis.yml":"7a0722a759238d2a804e715e4afd3cbe7a2696771fd4e5d6ef9cf8d36354d306","Cargo.toml":"9487d35eec356799c7e06355d25c65275900a546769da368c55e1947e3d38d2f","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"20c7855c364d57ea4c97889a5e8d98470a9952dade37bd9248b9a54431670e5e","Makefile":"bffd75d34654b2955d4f005f1a5e85c821c90becf1a8a52cbe10121972f43148","README.md":"eb3f4694003f408cbe3c7f3e9fbbc71241defb940cc55a816981f0f0f144c8eb","UPGRADING.md":"fbcc2d39bdf17db0745793db6626fcd5c909dddd4ce13b27566cfabece22c368","appveyor.yml":"c78486dbfbe6ebbf3d808afb9a19f7ec18c4704ce451c6305f0716999b70a1a6","docs/.nojekyll":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","docs/404.html":"f61e6271c1ea1aa113b64b356e994595fa548f0433f89948d747503ad22195cd","docs/index.html":"f61e6271c1ea1aa113b64b356e994595fa548f0433f89948d747503ad22195cd","github.png":"b432fd855efe7c430fe6a57ccf83935c1996f03a7cdc8d6e1b34154b8c43f6ec","src/encoding.rs":"7fb43e1c109bf9f2a80a05525082f90e79dba8e8056547571c49fba074406d39","src/form_urlencoded.rs":"0778240691a4b501dc1bde94e03b60e933f8f3f163025ef5dcf124fd946845e2","src/host.rs":"76474c04c3b998ce6891d6c7e8ef30fd4680308da8e451c0052655835588bd5d","src/lib.rs":"c433461a2fe3b37ff3f4b1f078ea40ea159d368b315c734f081f4ee770e3998c","src/origin.rs":"76b91ec7522e4a578c3d848541e3c1e08fba8cc0d5a5baabf829797dacc2275d","src/parser.rs":"670f1c36b1e1c2af0456af45196d23fd7b3d83d4f574e23d7ba536eb0003fa73","src/path_segments.rs":"85e16d4e3a7bf4162b2ddd6a14beda0d753395da7c8efe208c52862fc2b21221","src/percent_encoding.rs":"d8f9c2cc18615f705898b4d5e2b8e41506c9639190c74b1615b5f42ea41bafe3","src/quirks.rs":"1231f965e22bb3632c22993e2a8d4c7470bcb4a8de25d049f31784303f0def03","src/slicing.rs":"4e539886b23945a92094625f3e531a4bff40daa44240b5d19ee8577478c4f7fe","tests/data.rs":"c333766897f6492fb6583ab5c8a511973b7a55f58ca550799432343da64d5ca7","tests/setters_tests.json":"ebcbdb52e9a4b5a565f8806d52ebc610d46a34df883e10b0be080d026468ff73","tests/unit.rs":"9cc21b36e7dba8bec4af465cd0b2c1ed3c015cd3c0f85d610aef39109afeb2c4","tests/urltestdata.json":"430c74aa3a31afaa57a92805544e00825f4dffe2def98c1e3c212c3db80268af"},"package":"3e2ba3456fbe5c0098cb877cf08b92b76c3e18e0be9e47c35b487220d377d24e"}
|
||||
{"files":{".cargo-ok":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",".travis.yml":"890af214187ffcba4732acb2d1af30d7adb9aade0679e9fdb06baae363240b8e","Cargo.toml":"ec586106c4d0625919a3591fe3ae915043e82c8bfdd1c9e747171ba5e21047e1","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"20c7855c364d57ea4c97889a5e8d98470a9952dade37bd9248b9a54431670e5e","Makefile":"bffd75d34654b2955d4f005f1a5e85c821c90becf1a8a52cbe10121972f43148","README.md":"eb3f4694003f408cbe3c7f3e9fbbc71241defb940cc55a816981f0f0f144c8eb","UPGRADING.md":"fbcc2d39bdf17db0745793db6626fcd5c909dddd4ce13b27566cfabece22c368","appveyor.yml":"c78486dbfbe6ebbf3d808afb9a19f7ec18c4704ce451c6305f0716999b70a1a6","docs/.nojekyll":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","docs/404.html":"f61e6271c1ea1aa113b64b356e994595fa548f0433f89948d747503ad22195cd","docs/index.html":"f61e6271c1ea1aa113b64b356e994595fa548f0433f89948d747503ad22195cd","github.png":"b432fd855efe7c430fe6a57ccf83935c1996f03a7cdc8d6e1b34154b8c43f6ec","rust-url-todo":"1192cee7b6cedf2133d97dc6074b593a1d19b0ee13fff6f28d6329855044e575","src/encoding.rs":"f3e109ca8ec5a9130da50cdfb3003530aedb6dd5a440f0790d76b71f6981119c","src/form_urlencoded.rs":"7ccaef7148e4bc2577154c50f8705db3a055b641269e24c22770f06222321e1e","src/host.rs":"281165d732ea87b6f01a98f7c68ffcb284c41f84b3ab6ed674fb8e57022d1019","src/lib.rs":"bd156e8bcfbd44f0cd52c8b394e03ec63fea012c0bf5ca554521352714838605","src/origin.rs":"7071dcc1070ccfae84cdcd43586b84a9706e35a9a099ff4dde128da0909bd0bc","src/parser.rs":"9d30868f0900586fec6f122a0322598a08116ab0b4c4d8caf5c35a720381a73a","src/path_segments.rs":"7bd3142eaa568863ef44e2255c181239141f9eeee337f889b9ffaaeab4ca669d","src/quirks.rs":"1231f965e22bb3632c22993e2a8d4c7470bcb4a8de25d049f31784303f0def03","src/slicing.rs":"4e539886b23945a92094625f3e531a4bff40daa44240b5d19ee8577478c4f7fe","tests/data.rs":"c333766897f6492fb6583ab5c8a511973b7a55f58ca550799432343da64d5ca7","tests/setters_tests.json":"ebcbdb52e9a4b5a565f8806d52ebc610d46a34df883e10b0be080d026468ff73","tests/unit.rs":"c2f206f433be619414d761d358a2a4a5a46cfe8a4fea5339adec5e9937d78de2","tests/urltestdata.json":"430c74aa3a31afaa57a92805544e00825f4dffe2def98c1e3c212c3db80268af"},"package":"eeb819346883532a271eb626deb43c4a1bb4c4dd47c519bd78137c3e72a4fe27"}
|
||||
2
third_party/rust/url/.travis.yml
vendored
2
third_party/rust/url/.travis.yml
vendored
@@ -3,7 +3,7 @@ rust:
|
||||
- nightly
|
||||
- beta
|
||||
- stable
|
||||
- 1.15.0
|
||||
- 1.17.0
|
||||
script: make test
|
||||
notifications:
|
||||
webhooks: http://build.servo.org:54856/travis
|
||||
|
||||
74
third_party/rust/url/Cargo.toml
vendored
74
third_party/rust/url/Cargo.toml
vendored
@@ -1,29 +1,24 @@
|
||||
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
|
||||
#
|
||||
# When uploading crates to the registry Cargo will automatically
|
||||
# "normalize" Cargo.toml files for maximal compatibility
|
||||
# with all versions of Cargo and also rewrite `path` dependencies
|
||||
# to registry (e.g. crates.io) dependencies
|
||||
#
|
||||
# If you believe there's an error in this file please file an
|
||||
# issue against the rust-lang/cargo repository. If you're
|
||||
# editing this file be aware that the upstream Cargo.toml
|
||||
# will likely look very different (and much more reasonable)
|
||||
|
||||
[package]
|
||||
|
||||
name = "url"
|
||||
version = "1.4.1"
|
||||
# When updating version, also modify html_root_url in the lib.rs
|
||||
version = "1.5.1"
|
||||
authors = ["The rust-url developers"]
|
||||
|
||||
description = "URL library for Rust, based on the WHATWG URL Standard"
|
||||
documentation = "https://docs.rs/url"
|
||||
repository = "https://github.com/servo/rust-url"
|
||||
readme = "README.md"
|
||||
keywords = ["url", "parser"]
|
||||
categories = ["parser-implementations", "web-programming", "encoding"]
|
||||
license = "MIT/Apache-2.0"
|
||||
repository = "https://github.com/servo/rust-url"
|
||||
|
||||
[lib]
|
||||
test = false
|
||||
[badges]
|
||||
travis-ci = { repository = "servo/rust-url" }
|
||||
appveyor = { repository = "servo/rust-url" }
|
||||
|
||||
[workspace]
|
||||
members = [".", "idna", "percent_encoding", "url_serde"]
|
||||
|
||||
[[test]]
|
||||
name = "unit"
|
||||
@@ -31,41 +26,24 @@ name = "unit"
|
||||
[[test]]
|
||||
name = "data"
|
||||
harness = false
|
||||
[dependencies.encoding]
|
||||
version = "0.2"
|
||||
optional = true
|
||||
|
||||
[dependencies.heapsize]
|
||||
version = ">=0.1.1, <0.5"
|
||||
optional = true
|
||||
[lib]
|
||||
test = false
|
||||
|
||||
[dependencies.rustc-serialize]
|
||||
version = "0.3"
|
||||
optional = true
|
||||
|
||||
[dependencies.idna]
|
||||
version = "0.1.0"
|
||||
|
||||
[dependencies.serde]
|
||||
version = ">=0.6.1, <0.9"
|
||||
optional = true
|
||||
|
||||
[dependencies.matches]
|
||||
version = "0.1"
|
||||
[dev-dependencies.serde_json]
|
||||
version = ">=0.6.1, <0.9"
|
||||
|
||||
[dev-dependencies.rustc-test]
|
||||
version = "0.1"
|
||||
|
||||
[dev-dependencies.rustc-serialize]
|
||||
version = "0.3"
|
||||
[dev-dependencies]
|
||||
rustc-test = "0.1"
|
||||
rustc-serialize = "0.3"
|
||||
serde_json = ">=0.6.1, <0.9"
|
||||
|
||||
[features]
|
||||
heap_size = ["heapsize"]
|
||||
query_encoding = ["encoding"]
|
||||
[badges.appveyor]
|
||||
repository = "servo/rust-url"
|
||||
heap_size = ["heapsize"]
|
||||
|
||||
[badges.travis-ci]
|
||||
repository = "servo/rust-url"
|
||||
[dependencies]
|
||||
encoding = {version = "0.2", optional = true}
|
||||
heapsize = {version = ">=0.1.1, <0.5", optional = true}
|
||||
idna = { version = "0.1.0", path = "./idna" }
|
||||
matches = "0.1"
|
||||
percent-encoding = { version = "1.0.0", path = "./percent_encoding" }
|
||||
rustc-serialize = {version = "0.3", optional = true}
|
||||
serde = {version = ">=0.6.1, <0.9", optional = true}
|
||||
|
||||
14
third_party/rust/url/rust-url-todo
vendored
Normal file
14
third_party/rust/url/rust-url-todo
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
* standalone path parsing?
|
||||
* Test setters
|
||||
* Test trim C0/space
|
||||
* Test remove tab & newline
|
||||
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_path_segments() {
|
||||
let mut url = Url::parse("http://example.net").unwrap();
|
||||
url.push_path_segment("foo").unwrap();
|
||||
url.extend_path_segments(&["bar", "b/az"]).unwrap();
|
||||
assert_eq!(url.as_str(), "http://example.net/foo");
|
||||
}
|
||||
13
third_party/rust/url/src/encoding.rs
vendored
13
third_party/rust/url/src/encoding.rs
vendored
@@ -13,6 +13,7 @@
|
||||
#[cfg(feature = "query_encoding")] extern crate encoding;
|
||||
|
||||
use std::borrow::Cow;
|
||||
#[cfg(feature = "query_encoding")] use std::fmt::{self, Debug, Formatter};
|
||||
|
||||
#[cfg(feature = "query_encoding")] use self::encoding::types::{DecoderTrap, EncoderTrap};
|
||||
#[cfg(feature = "query_encoding")] use self::encoding::label::encoding_from_whatwg_label;
|
||||
@@ -89,9 +90,19 @@ impl EncodingOverride {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "query_encoding")]
|
||||
impl Debug for EncodingOverride {
|
||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||
write!(f, "EncodingOverride {{ encoding: ")?;
|
||||
match self.encoding {
|
||||
Some(e) => write!(f, "{} }}", e.name()),
|
||||
None => write!(f, "None }}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "query_encoding"))]
|
||||
#[derive(Copy, Clone)]
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct EncodingOverride;
|
||||
|
||||
#[cfg(not(feature = "query_encoding"))]
|
||||
|
||||
5
third_party/rust/url/src/form_urlencoded.rs
vendored
5
third_party/rust/url/src/form_urlencoded.rs
vendored
@@ -81,7 +81,7 @@ pub fn parse_with_encoding<'a>(input: &'a [u8],
|
||||
}
|
||||
|
||||
/// The return type of `parse()`.
|
||||
#[derive(Copy, Clone)]
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct Parse<'a> {
|
||||
input: &'a [u8],
|
||||
encoding: EncodingOverride,
|
||||
@@ -145,6 +145,7 @@ impl<'a> Parse<'a> {
|
||||
}
|
||||
|
||||
/// Like `Parse`, but yields pairs of `String` instead of pairs of `Cow<str>`.
|
||||
#[derive(Debug)]
|
||||
pub struct ParseIntoOwned<'a> {
|
||||
inner: Parse<'a>
|
||||
}
|
||||
@@ -168,6 +169,7 @@ pub fn byte_serialize(input: &[u8]) -> ByteSerialize {
|
||||
}
|
||||
|
||||
/// Return value of `byte_serialize()`.
|
||||
#[derive(Debug)]
|
||||
pub struct ByteSerialize<'a> {
|
||||
bytes: &'a [u8],
|
||||
}
|
||||
@@ -209,6 +211,7 @@ impl<'a> Iterator for ByteSerialize<'a> {
|
||||
|
||||
/// The [`application/x-www-form-urlencoded` serializer](
|
||||
/// https://url.spec.whatwg.org/#concept-urlencoded-serializer).
|
||||
#[derive(Debug)]
|
||||
pub struct Serializer<T: Target> {
|
||||
target: Option<T>,
|
||||
start_position: usize,
|
||||
|
||||
13
third_party/rust/url/src/host.rs
vendored
13
third_party/rust/url/src/host.rs
vendored
@@ -176,7 +176,7 @@ impl<S: AsRef<str>> fmt::Display for Host<S> {
|
||||
|
||||
/// This mostly exists because coherence rules don’t allow us to implement
|
||||
/// `ToSocketAddrs for (Host<S>, u16)`.
|
||||
#[derive(Clone)]
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct HostAndPort<S=String> {
|
||||
pub host: Host<S>,
|
||||
pub port: u16,
|
||||
@@ -192,6 +192,15 @@ impl<'a> HostAndPort<&'a str> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<S: AsRef<str>> fmt::Display for HostAndPort<S> {
|
||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||
self.host.fmt(f)?;
|
||||
f.write_str(":")?;
|
||||
self.port.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
impl<S: AsRef<str>> ToSocketAddrs for HostAndPort<S> {
|
||||
type Iter = SocketAddrs;
|
||||
|
||||
@@ -213,10 +222,12 @@ impl<S: AsRef<str>> ToSocketAddrs for HostAndPort<S> {
|
||||
}
|
||||
|
||||
/// Socket addresses for an URL.
|
||||
#[derive(Debug)]
|
||||
pub struct SocketAddrs {
|
||||
state: SocketAddrsState
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum SocketAddrsState {
|
||||
Domain(vec::IntoIter<SocketAddr>),
|
||||
One(SocketAddr),
|
||||
|
||||
742
third_party/rust/url/src/lib.rs
vendored
742
third_party/rust/url/src/lib.rs
vendored
File diff suppressed because it is too large
Load Diff
4
third_party/rust/url/src/origin.rs
vendored
4
third_party/rust/url/src/origin.rs
vendored
@@ -50,7 +50,7 @@ pub fn url_origin(url: &Url) -> Origin {
|
||||
/// the URL does not have the same origin as any other URL.
|
||||
///
|
||||
/// For more information see https://url.spec.whatwg.org/#origin
|
||||
#[derive(PartialEq, Eq, Clone, Debug)]
|
||||
#[derive(PartialEq, Eq, Hash, Clone, Debug)]
|
||||
pub enum Origin {
|
||||
/// A globally unique identifier
|
||||
Opaque(OpaqueOrigin),
|
||||
@@ -123,7 +123,7 @@ impl Origin {
|
||||
}
|
||||
|
||||
/// Opaque identifier for URLs that have file or other schemes
|
||||
#[derive(Eq, PartialEq, Clone, Debug)]
|
||||
#[derive(Eq, PartialEq, Hash, Clone, Debug)]
|
||||
pub struct OpaqueOrigin(usize);
|
||||
|
||||
#[cfg(feature = "heapsize")]
|
||||
|
||||
3
third_party/rust/url/src/parser.rs
vendored
3
third_party/rust/url/src/parser.rs
vendored
@@ -57,6 +57,9 @@ simple_enum_error! {
|
||||
Overflow => "URLs more than 4 GB are not supported",
|
||||
}
|
||||
|
||||
#[cfg(feature = "heapsize")]
|
||||
known_heap_size!(0, ParseError);
|
||||
|
||||
impl fmt::Display for ParseError {
|
||||
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
|
||||
self.description().fmt(fmt)
|
||||
|
||||
73
third_party/rust/url/src/path_segments.rs
vendored
73
third_party/rust/url/src/path_segments.rs
vendored
@@ -18,14 +18,22 @@ use Url;
|
||||
/// Examples:
|
||||
///
|
||||
/// ```rust
|
||||
/// # use url::Url;
|
||||
/// let mut url = Url::parse("mailto:me@example.com").unwrap();
|
||||
/// use url::Url;
|
||||
/// # use std::error::Error;
|
||||
///
|
||||
/// # fn run() -> Result<(), Box<Error>> {
|
||||
/// let mut url = Url::parse("mailto:me@example.com")?;
|
||||
/// assert!(url.path_segments_mut().is_err());
|
||||
///
|
||||
/// let mut url = Url::parse("http://example.net/foo/index.html").unwrap();
|
||||
/// url.path_segments_mut().unwrap().pop().push("img").push("2/100%.png");
|
||||
/// let mut url = Url::parse("http://example.net/foo/index.html")?;
|
||||
/// url.path_segments_mut().map_err(|_| "cannot be base")?
|
||||
/// .pop().push("img").push("2/100%.png");
|
||||
/// assert_eq!(url.as_str(), "http://example.net/foo/img/2%2F100%25.png");
|
||||
/// # Ok(())
|
||||
/// # }
|
||||
/// # run().unwrap();
|
||||
/// ```
|
||||
#[derive(Debug)]
|
||||
pub struct PathSegmentsMut<'a> {
|
||||
url: &'a mut Url,
|
||||
after_first_slash: usize,
|
||||
@@ -60,10 +68,17 @@ impl<'a> PathSegmentsMut<'a> {
|
||||
/// Example:
|
||||
///
|
||||
/// ```rust
|
||||
/// # use url::Url;
|
||||
/// let mut url = Url::parse("https://github.com/servo/rust-url/").unwrap();
|
||||
/// url.path_segments_mut().unwrap().clear().push("logout");
|
||||
/// use url::Url;
|
||||
/// # use std::error::Error;
|
||||
///
|
||||
/// # fn run() -> Result<(), Box<Error>> {
|
||||
/// let mut url = Url::parse("https://github.com/servo/rust-url/")?;
|
||||
/// url.path_segments_mut().map_err(|_| "cannot be base")?
|
||||
/// .clear().push("logout");
|
||||
/// assert_eq!(url.as_str(), "https://github.com/logout");
|
||||
/// # Ok(())
|
||||
/// # }
|
||||
/// # run().unwrap();
|
||||
/// ```
|
||||
pub fn clear(&mut self) -> &mut Self {
|
||||
self.url.serialization.truncate(self.after_first_slash);
|
||||
@@ -81,14 +96,22 @@ impl<'a> PathSegmentsMut<'a> {
|
||||
/// Example:
|
||||
///
|
||||
/// ```rust
|
||||
/// # use url::Url;
|
||||
/// let mut url = Url::parse("https://github.com/servo/rust-url/").unwrap();
|
||||
/// url.path_segments_mut().unwrap().push("pulls");
|
||||
/// use url::Url;
|
||||
/// # use std::error::Error;
|
||||
///
|
||||
/// # fn run() -> Result<(), Box<Error>> {
|
||||
/// let mut url = Url::parse("https://github.com/servo/rust-url/")?;
|
||||
/// url.path_segments_mut().map_err(|_| "cannot be base")?
|
||||
/// .push("pulls");
|
||||
/// assert_eq!(url.as_str(), "https://github.com/servo/rust-url//pulls");
|
||||
///
|
||||
/// let mut url = Url::parse("https://github.com/servo/rust-url/").unwrap();
|
||||
/// url.path_segments_mut().unwrap().pop_if_empty().push("pulls");
|
||||
/// let mut url = Url::parse("https://github.com/servo/rust-url/")?;
|
||||
/// url.path_segments_mut().map_err(|_| "cannot be base")?
|
||||
/// .pop_if_empty().push("pulls");
|
||||
/// assert_eq!(url.as_str(), "https://github.com/servo/rust-url/pulls");
|
||||
/// # Ok(())
|
||||
/// # }
|
||||
/// # run().unwrap();
|
||||
/// ```
|
||||
pub fn pop_if_empty(&mut self) -> &mut Self {
|
||||
if self.url.serialization[self.after_first_slash..].ends_with('/') {
|
||||
@@ -138,23 +161,37 @@ impl<'a> PathSegmentsMut<'a> {
|
||||
/// Example:
|
||||
///
|
||||
/// ```rust
|
||||
/// # use url::Url;
|
||||
/// let mut url = Url::parse("https://github.com/").unwrap();
|
||||
/// use url::Url;
|
||||
/// # use std::error::Error;
|
||||
///
|
||||
/// # fn run() -> Result<(), Box<Error>> {
|
||||
/// let mut url = Url::parse("https://github.com/")?;
|
||||
/// let org = "servo";
|
||||
/// let repo = "rust-url";
|
||||
/// let issue_number = "188";
|
||||
/// url.path_segments_mut().unwrap().extend(&[org, repo, "issues", issue_number]);
|
||||
/// url.path_segments_mut().map_err(|_| "cannot be base")?
|
||||
/// .extend(&[org, repo, "issues", issue_number]);
|
||||
/// assert_eq!(url.as_str(), "https://github.com/servo/rust-url/issues/188");
|
||||
/// # Ok(())
|
||||
/// # }
|
||||
/// # run().unwrap();
|
||||
/// ```
|
||||
///
|
||||
/// In order to make sure that parsing the serialization of an URL gives the same URL,
|
||||
/// a segment is ignored if it is `"."` or `".."`:
|
||||
///
|
||||
/// ```rust
|
||||
/// # use url::Url;
|
||||
/// let mut url = Url::parse("https://github.com/servo").unwrap();
|
||||
/// url.path_segments_mut().unwrap().extend(&["..", "rust-url", ".", "pulls"]);
|
||||
/// use url::Url;
|
||||
/// # use std::error::Error;
|
||||
///
|
||||
/// # fn run() -> Result<(), Box<Error>> {
|
||||
/// let mut url = Url::parse("https://github.com/servo")?;
|
||||
/// url.path_segments_mut().map_err(|_| "cannot be base")?
|
||||
/// .extend(&["..", "rust-url", ".", "pulls"]);
|
||||
/// assert_eq!(url.as_str(), "https://github.com/servo/rust-url/pulls");
|
||||
/// # Ok(())
|
||||
/// # }
|
||||
/// # run().unwrap();
|
||||
/// ```
|
||||
pub fn extend<I>(&mut self, segments: I) -> &mut Self
|
||||
where I: IntoIterator, I::Item: AsRef<str> {
|
||||
|
||||
108
third_party/rust/url/tests/unit.rs
vendored
108
third_party/rust/url/tests/unit.rs
vendored
@@ -14,7 +14,7 @@ extern crate url;
|
||||
use std::borrow::Cow;
|
||||
use std::net::{Ipv4Addr, Ipv6Addr};
|
||||
use std::path::{Path, PathBuf};
|
||||
use url::{Host, Url, form_urlencoded};
|
||||
use url::{Host, HostAndPort, Url, form_urlencoded};
|
||||
|
||||
#[test]
|
||||
fn size() {
|
||||
@@ -255,6 +255,36 @@ fn test_form_serialize() {
|
||||
assert_eq!(encoded, "foo=%C3%A9%26&bar=&foo=%23");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn host_and_port_display() {
|
||||
assert_eq!(
|
||||
format!(
|
||||
"{}",
|
||||
HostAndPort{ host: Host::Domain("www.mozilla.org"), port: 80}
|
||||
),
|
||||
"www.mozilla.org:80"
|
||||
);
|
||||
assert_eq!(
|
||||
format!(
|
||||
"{}",
|
||||
HostAndPort::<String>{ host: Host::Ipv4(Ipv4Addr::new(1, 35, 33, 49)), port: 65535 }
|
||||
),
|
||||
"1.35.33.49:65535"
|
||||
);
|
||||
assert_eq!(
|
||||
format!(
|
||||
"{}",
|
||||
HostAndPort::<String>{
|
||||
host: Host::Ipv6(Ipv6Addr::new(
|
||||
0x2001, 0x0db8, 0x85a3, 0x08d3, 0x1319, 0x8a2e, 0x0370, 0x7344
|
||||
)),
|
||||
port: 1337
|
||||
})
|
||||
,
|
||||
"[2001:db8:85a3:8d3:1319:8a2e:370:7344]:1337"
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
/// https://github.com/servo/rust-url/issues/25
|
||||
fn issue_25() {
|
||||
@@ -344,6 +374,13 @@ fn test_set_host() {
|
||||
assert_eq!(url.as_str(), "foobar:/hello");
|
||||
}
|
||||
|
||||
#[test]
|
||||
// https://github.com/servo/rust-url/issues/166
|
||||
fn test_leading_dots() {
|
||||
assert_eq!(Host::parse(".org").unwrap(), Host::Domain(".org".to_owned()));
|
||||
assert_eq!(Url::parse("file://./foo").unwrap().domain(), Some("."));
|
||||
}
|
||||
|
||||
// This is testing that the macro produces buildable code when invoked
|
||||
// inside both a module and a function
|
||||
#[test]
|
||||
@@ -372,3 +409,72 @@ fn define_encode_set_scopes() {
|
||||
|
||||
m::test();
|
||||
}
|
||||
|
||||
#[test]
|
||||
/// https://github.com/servo/rust-url/issues/302
|
||||
fn test_origin_hash() {
|
||||
use std::hash::{Hash,Hasher};
|
||||
use std::collections::hash_map::DefaultHasher;
|
||||
|
||||
fn hash<T: Hash>(value: &T) -> u64 {
|
||||
let mut hasher = DefaultHasher::new();
|
||||
value.hash(&mut hasher);
|
||||
hasher.finish()
|
||||
}
|
||||
|
||||
let origin = &Url::parse("http://example.net/").unwrap().origin();
|
||||
|
||||
let origins_to_compare = [
|
||||
Url::parse("http://example.net:80/").unwrap().origin(),
|
||||
Url::parse("http://example.net:81/").unwrap().origin(),
|
||||
Url::parse("http://example.net").unwrap().origin(),
|
||||
Url::parse("http://example.net/hello").unwrap().origin(),
|
||||
Url::parse("https://example.net").unwrap().origin(),
|
||||
Url::parse("ftp://example.net").unwrap().origin(),
|
||||
Url::parse("file://example.net").unwrap().origin(),
|
||||
Url::parse("http://user@example.net/").unwrap().origin(),
|
||||
Url::parse("http://user:pass@example.net/").unwrap().origin(),
|
||||
];
|
||||
|
||||
for origin_to_compare in &origins_to_compare {
|
||||
if origin == origin_to_compare {
|
||||
assert_eq!(hash(origin), hash(origin_to_compare));
|
||||
} else {
|
||||
assert_ne!(hash(origin), hash(origin_to_compare));
|
||||
}
|
||||
}
|
||||
|
||||
let opaque_origin = Url::parse("file://example.net").unwrap().origin();
|
||||
let same_opaque_origin = Url::parse("file://example.net").unwrap().origin();
|
||||
let other_opaque_origin = Url::parse("file://other").unwrap().origin();
|
||||
|
||||
assert_ne!(hash(&opaque_origin), hash(&same_opaque_origin));
|
||||
assert_ne!(hash(&opaque_origin), hash(&other_opaque_origin));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_windows_unc_path() {
|
||||
if !cfg!(windows) {
|
||||
return
|
||||
}
|
||||
|
||||
let url = Url::from_file_path(Path::new(r"\\host\share\path\file.txt")).unwrap();
|
||||
assert_eq!(url.as_str(), "file://host/share/path/file.txt");
|
||||
|
||||
let url = Url::from_file_path(Path::new(r"\\höst\share\path\file.txt")).unwrap();
|
||||
assert_eq!(url.as_str(), "file://xn--hst-sna/share/path/file.txt");
|
||||
|
||||
let url = Url::from_file_path(Path::new(r"\\192.168.0.1\share\path\file.txt")).unwrap();
|
||||
assert_eq!(url.host(), Some(Host::Ipv4(Ipv4Addr::new(192, 168, 0, 1))));
|
||||
|
||||
let path = url.to_file_path().unwrap();
|
||||
assert_eq!(path.to_str(), Some(r"\\192.168.0.1\share\path\file.txt"));
|
||||
|
||||
// Another way to write these:
|
||||
let url = Url::from_file_path(Path::new(r"\\?\UNC\host\share\path\file.txt")).unwrap();
|
||||
assert_eq!(url.as_str(), "file://host/share/path/file.txt");
|
||||
|
||||
// Paths starting with "\\.\" (Local Device Paths) are intentionally not supported.
|
||||
let url = Url::from_file_path(Path::new(r"\\.\some\path\file.txt"));
|
||||
assert!(url.is_err());
|
||||
}
|
||||
|
||||
25
toolkit/library/gtest/rust/Cargo.lock
generated
25
toolkit/library/gtest/rust/Cargo.lock
generated
@@ -445,12 +445,12 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "0.1.2"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicode-bidi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicode-normalization 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicode-normalization 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -653,6 +653,11 @@ name = "peeking_take_while"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "percent-encoding"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "phf"
|
||||
version = "0.7.21"
|
||||
@@ -811,7 +816,7 @@ dependencies = [
|
||||
"libc 0.2.24 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"nserror 0.1.0",
|
||||
"nsstring 0.1.0",
|
||||
"url 1.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1129,7 +1134,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "unicode-normalization"
|
||||
version = "0.1.4"
|
||||
version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
@@ -1157,11 +1162,12 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "url"
|
||||
version = "1.4.1"
|
||||
version = "1.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"idna 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"idna 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"percent-encoding 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1310,7 +1316,7 @@ dependencies = [
|
||||
"checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb"
|
||||
"checksum heapsize 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "5a376f7402b85be6e0ba504243ecbc0709c48019ecc6286d0540c2e359050c88"
|
||||
"checksum heapsize 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4c7593b1522161003928c959c20a2ca421c68e940d63d75573316a009e48a6d4"
|
||||
"checksum idna 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "2233d4940b1f19f0418c158509cd7396b8d70a5db5705ce410914dc8fa603b37"
|
||||
"checksum idna 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "014b298351066f1512874135335d62a789ffe78a9974f94b43ed5621951eaf7d"
|
||||
"checksum itertools 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4833d6978da405305126af4ac88569b5d71ff758581ce5a987dbfa3755f694fc"
|
||||
"checksum itoa 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eb2f404fbc66fd9aac13e998248505e7ecb2ad8e44ab6388684c5fb11c6c251c"
|
||||
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
|
||||
@@ -1333,6 +1339,7 @@ dependencies = [
|
||||
"checksum parking_lot_core 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "56a19dcbb5d1e32b6cccb8a9aa1fc2a38418c8699652e735e2bf391a3dc0aa16"
|
||||
"checksum pdqsort 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "ceca1642c89148ca05611cc775a0c383abef355fc4907c4e95f49f7b09d6287c"
|
||||
"checksum peeking_take_while 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099"
|
||||
"checksum percent-encoding 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de154f638187706bde41d9b4738748933d64e6b37bdbffc0b47a97d16a6ae356"
|
||||
"checksum phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "cb325642290f28ee14d8c6201159949a872f220c62af6e110a56ea914fbe42fc"
|
||||
"checksum phf_codegen 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "d62594c0bb54c464f633175d502038177e90309daf2e0158be42ed5f023ce88f"
|
||||
"checksum phf_generator 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "6b07ffcc532ccc85e3afc45865469bf5d9e4ef5bfcf9622e3cfe80c2d275ec03"
|
||||
@@ -1379,12 +1386,12 @@ dependencies = [
|
||||
"checksum time 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)" = "211b63c112206356ef1ff9b19355f43740fc3f85960c598a93d3a3d3ba7beade"
|
||||
"checksum toml 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "736b60249cb25337bc196faa43ee12c705e426f3d55c214d73a4e7be06f92cb4"
|
||||
"checksum unicode-bidi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a6a2c4e3710edd365cd7e78383153ed739fa31af19f9172f72d3575060f5a43a"
|
||||
"checksum unicode-normalization 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "e28fa37426fceeb5cf8f41ee273faa7c82c47dc8fba5853402841e665fcd86ff"
|
||||
"checksum unicode-normalization 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "51ccda9ef9efa3f7ef5d91e8f9b83bbe6955f9bf86aec89d5cce2c874625920f"
|
||||
"checksum unicode-segmentation 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "18127285758f0e2c6cf325bb3f3d138a12fee27de4f23e146cd6a179f26c2cf3"
|
||||
"checksum unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bf3a113775714a22dcb774d8ea3655c53a32debae63a063acc00a91cc586245f"
|
||||
"checksum unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f860d7d29cf02cb2f3f359fd35991af3d30bac52c57d265a3c461074cb4dc"
|
||||
"checksum unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1f2ae5ddb18e1c92664717616dd9549dde73f539f01bd7b77c2edb2446bdff91"
|
||||
"checksum url 1.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3e2ba3456fbe5c0098cb877cf08b92b76c3e18e0be9e47c35b487220d377d24e"
|
||||
"checksum url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eeb819346883532a271eb626deb43c4a1bb4c4dd47c519bd78137c3e72a4fe27"
|
||||
"checksum utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "662fab6525a98beff2921d7f61a39e7d59e0b425ebc7d0d9e66d316e55124122"
|
||||
"checksum vec_map 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "887b5b631c2ad01628bbbaa7dd4c869f80d3186688f8d0b6f58774fbe324988c"
|
||||
"checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"
|
||||
|
||||
25
toolkit/library/rust/Cargo.lock
generated
25
toolkit/library/rust/Cargo.lock
generated
@@ -443,12 +443,12 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "0.1.2"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicode-bidi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicode-normalization 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicode-normalization 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -640,6 +640,11 @@ name = "peeking_take_while"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "percent-encoding"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "phf"
|
||||
version = "0.7.21"
|
||||
@@ -798,7 +803,7 @@ dependencies = [
|
||||
"libc 0.2.24 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"nserror 0.1.0",
|
||||
"nsstring 0.1.0",
|
||||
"url 1.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1116,7 +1121,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "unicode-normalization"
|
||||
version = "0.1.4"
|
||||
version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
@@ -1144,11 +1149,12 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "url"
|
||||
version = "1.4.1"
|
||||
version = "1.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"idna 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"idna 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"percent-encoding 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1297,7 +1303,7 @@ dependencies = [
|
||||
"checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb"
|
||||
"checksum heapsize 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "5a376f7402b85be6e0ba504243ecbc0709c48019ecc6286d0540c2e359050c88"
|
||||
"checksum heapsize 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4c7593b1522161003928c959c20a2ca421c68e940d63d75573316a009e48a6d4"
|
||||
"checksum idna 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "2233d4940b1f19f0418c158509cd7396b8d70a5db5705ce410914dc8fa603b37"
|
||||
"checksum idna 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "014b298351066f1512874135335d62a789ffe78a9974f94b43ed5621951eaf7d"
|
||||
"checksum itertools 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4833d6978da405305126af4ac88569b5d71ff758581ce5a987dbfa3755f694fc"
|
||||
"checksum itoa 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eb2f404fbc66fd9aac13e998248505e7ecb2ad8e44ab6388684c5fb11c6c251c"
|
||||
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
|
||||
@@ -1320,6 +1326,7 @@ dependencies = [
|
||||
"checksum parking_lot_core 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "56a19dcbb5d1e32b6cccb8a9aa1fc2a38418c8699652e735e2bf391a3dc0aa16"
|
||||
"checksum pdqsort 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "ceca1642c89148ca05611cc775a0c383abef355fc4907c4e95f49f7b09d6287c"
|
||||
"checksum peeking_take_while 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099"
|
||||
"checksum percent-encoding 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de154f638187706bde41d9b4738748933d64e6b37bdbffc0b47a97d16a6ae356"
|
||||
"checksum phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "cb325642290f28ee14d8c6201159949a872f220c62af6e110a56ea914fbe42fc"
|
||||
"checksum phf_codegen 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "d62594c0bb54c464f633175d502038177e90309daf2e0158be42ed5f023ce88f"
|
||||
"checksum phf_generator 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "6b07ffcc532ccc85e3afc45865469bf5d9e4ef5bfcf9622e3cfe80c2d275ec03"
|
||||
@@ -1366,12 +1373,12 @@ dependencies = [
|
||||
"checksum time 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)" = "211b63c112206356ef1ff9b19355f43740fc3f85960c598a93d3a3d3ba7beade"
|
||||
"checksum toml 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "736b60249cb25337bc196faa43ee12c705e426f3d55c214d73a4e7be06f92cb4"
|
||||
"checksum unicode-bidi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a6a2c4e3710edd365cd7e78383153ed739fa31af19f9172f72d3575060f5a43a"
|
||||
"checksum unicode-normalization 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "e28fa37426fceeb5cf8f41ee273faa7c82c47dc8fba5853402841e665fcd86ff"
|
||||
"checksum unicode-normalization 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "51ccda9ef9efa3f7ef5d91e8f9b83bbe6955f9bf86aec89d5cce2c874625920f"
|
||||
"checksum unicode-segmentation 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "18127285758f0e2c6cf325bb3f3d138a12fee27de4f23e146cd6a179f26c2cf3"
|
||||
"checksum unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bf3a113775714a22dcb774d8ea3655c53a32debae63a063acc00a91cc586245f"
|
||||
"checksum unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f860d7d29cf02cb2f3f359fd35991af3d30bac52c57d265a3c461074cb4dc"
|
||||
"checksum unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1f2ae5ddb18e1c92664717616dd9549dde73f539f01bd7b77c2edb2446bdff91"
|
||||
"checksum url 1.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3e2ba3456fbe5c0098cb877cf08b92b76c3e18e0be9e47c35b487220d377d24e"
|
||||
"checksum url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eeb819346883532a271eb626deb43c4a1bb4c4dd47c519bd78137c3e72a4fe27"
|
||||
"checksum utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "662fab6525a98beff2921d7f61a39e7d59e0b425ebc7d0d9e66d316e55124122"
|
||||
"checksum vec_map 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "887b5b631c2ad01628bbbaa7dd4c869f80d3186688f8d0b6f58774fbe324988c"
|
||||
"checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"
|
||||
|
||||
Reference in New Issue
Block a user