Bug 1383831 - Upgrade rust-url to version 1.5.1 r=froydnj

MozReview-Commit-ID: L3j6K5dDWHi
This commit is contained in:
Valentin Gosu
2017-08-02 13:44:48 +03:00
parent fefc455410
commit bca4013ce3
39 changed files with 34872 additions and 23432 deletions

View File

@@ -8,6 +8,6 @@ name = "rust_url_capi"
[dependencies] [dependencies]
libc = "0.2.0" libc = "0.2.0"
url = "1.4.0" url = "1.5.1"
nsstring = { path = "../../../xpcom/rust/nsstring" } nsstring = { path = "../../../xpcom/rust/nsstring" }
nserror = { path = "../../../xpcom/rust/nserror" } nserror = { path = "../../../xpcom/rust/nserror" }

View File

@@ -181,17 +181,17 @@ dependencies = [
"traitobject 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "traitobject 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"typeable 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "typeable 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"unicase 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "unicase 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"url 1.4.1 (registry+https://github.com/rust-lang/crates.io-index)", "url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "idna" name = "idna"
version = "0.1.2" version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-bidi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-bidi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-normalization 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-normalization 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@@ -364,6 +364,11 @@ dependencies = [
"libc 0.2.24 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.24 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "percent-encoding"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "podio" name = "podio"
version = "0.1.5" version = "0.1.5"
@@ -580,7 +585,7 @@ dependencies = [
[[package]] [[package]]
name = "unicode-normalization" name = "unicode-normalization"
version = "0.1.4" version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
@@ -603,11 +608,12 @@ dependencies = [
[[package]] [[package]]
name = "url" name = "url"
version = "1.4.1" version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"idna 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "idna 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"percent-encoding 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@@ -646,7 +652,7 @@ dependencies = [
"regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
"time 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)",
"url 1.4.1 (registry+https://github.com/rust-lang/crates.io-index)", "url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@@ -703,7 +709,7 @@ dependencies = [
"checksum gcc 0.3.42 (registry+https://github.com/rust-lang/crates.io-index)" = "291055c78f59ca3d84c99026c9501c469413d386bb46be1e1cf1d285cd1db3b0" "checksum gcc 0.3.42 (registry+https://github.com/rust-lang/crates.io-index)" = "291055c78f59ca3d84c99026c9501c469413d386bb46be1e1cf1d285cd1db3b0"
"checksum httparse 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "77f756bed9ee3a83ce98774f4155b42a31b787029013f3a7d83eca714e500e21" "checksum httparse 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "77f756bed9ee3a83ce98774f4155b42a31b787029013f3a7d83eca714e500e21"
"checksum hyper 0.10.10 (registry+https://github.com/rust-lang/crates.io-index)" = "36e108e0b1fa2d17491cbaac4bc460dc0956029d10ccf83c913dd0e5db3e7f07" "checksum hyper 0.10.10 (registry+https://github.com/rust-lang/crates.io-index)" = "36e108e0b1fa2d17491cbaac4bc460dc0956029d10ccf83c913dd0e5db3e7f07"
"checksum idna 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "2233d4940b1f19f0418c158509cd7396b8d70a5db5705ce410914dc8fa603b37" "checksum idna 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "014b298351066f1512874135335d62a789ffe78a9974f94b43ed5621951eaf7d"
"checksum isatty 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "fa500db770a99afe2a0f2229be2a3d09c7ed9d7e4e8440bf71253141994e240f" "checksum isatty 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "fa500db770a99afe2a0f2229be2a3d09c7ed9d7e4e8440bf71253141994e240f"
"checksum kernel32-sys 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "e014dab1082fd9d80ea1fa6fcb261b47ed3eb511612a14198bb507701add083e" "checksum kernel32-sys 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "e014dab1082fd9d80ea1fa6fcb261b47ed3eb511612a14198bb507701add083e"
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" "checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
@@ -726,6 +732,7 @@ dependencies = [
"checksum num-iter 0.1.33 (registry+https://github.com/rust-lang/crates.io-index)" = "f7d1891bd7b936f12349b7d1403761c8a0b85a18b148e9da4429d5d102c1a41e" "checksum num-iter 0.1.33 (registry+https://github.com/rust-lang/crates.io-index)" = "f7d1891bd7b936f12349b7d1403761c8a0b85a18b148e9da4429d5d102c1a41e"
"checksum num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "e1cbfa3781f3fe73dc05321bed52a06d2d491eaa764c52335cf4399f046ece99" "checksum num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "e1cbfa3781f3fe73dc05321bed52a06d2d491eaa764c52335cf4399f046ece99"
"checksum num_cpus 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a225d1e2717567599c24f88e49f00856c6e825a12125181ee42c4257e3688d39" "checksum num_cpus 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a225d1e2717567599c24f88e49f00856c6e825a12125181ee42c4257e3688d39"
"checksum percent-encoding 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de154f638187706bde41d9b4738748933d64e6b37bdbffc0b47a97d16a6ae356"
"checksum podio 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e5422a1ee1bc57cc47ae717b0137314258138f38fd5f3cea083f43a9725383a0" "checksum podio 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e5422a1ee1bc57cc47ae717b0137314258138f38fd5f3cea083f43a9725383a0"
"checksum rand 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "022e0636ec2519ddae48154b028864bdce4eaf7d35226ab8e65c611be97b189d" "checksum rand 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "022e0636ec2519ddae48154b028864bdce4eaf7d35226ab8e65c611be97b189d"
"checksum redox_syscall 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "8dd35cc9a8bdec562c757e3d43c1526b5c6d2653e23e2315065bc25556550753" "checksum redox_syscall 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "8dd35cc9a8bdec562c757e3d43c1526b5c6d2653e23e2315065bc25556550753"
@@ -754,11 +761,11 @@ dependencies = [
"checksum typeable 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1410f6f91f21d1612654e7cc69193b0334f909dcf2c790c4826254fbb86f8887" "checksum typeable 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1410f6f91f21d1612654e7cc69193b0334f909dcf2c790c4826254fbb86f8887"
"checksum unicase 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "13a5906ca2b98c799f4b1ab4557b76367ebd6ae5ef14930ec841c74aed5f3764" "checksum unicase 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "13a5906ca2b98c799f4b1ab4557b76367ebd6ae5ef14930ec841c74aed5f3764"
"checksum unicode-bidi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a6a2c4e3710edd365cd7e78383153ed739fa31af19f9172f72d3575060f5a43a" "checksum unicode-bidi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a6a2c4e3710edd365cd7e78383153ed739fa31af19f9172f72d3575060f5a43a"
"checksum unicode-normalization 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "e28fa37426fceeb5cf8f41ee273faa7c82c47dc8fba5853402841e665fcd86ff" "checksum unicode-normalization 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "51ccda9ef9efa3f7ef5d91e8f9b83bbe6955f9bf86aec89d5cce2c874625920f"
"checksum unicode-segmentation 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "18127285758f0e2c6cf325bb3f3d138a12fee27de4f23e146cd6a179f26c2cf3" "checksum unicode-segmentation 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "18127285758f0e2c6cf325bb3f3d138a12fee27de4f23e146cd6a179f26c2cf3"
"checksum unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bf3a113775714a22dcb774d8ea3655c53a32debae63a063acc00a91cc586245f" "checksum unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bf3a113775714a22dcb774d8ea3655c53a32debae63a063acc00a91cc586245f"
"checksum unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1f2ae5ddb18e1c92664717616dd9549dde73f539f01bd7b77c2edb2446bdff91" "checksum unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1f2ae5ddb18e1c92664717616dd9549dde73f539f01bd7b77c2edb2446bdff91"
"checksum url 1.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3e2ba3456fbe5c0098cb877cf08b92b76c3e18e0be9e47c35b487220d377d24e" "checksum url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eeb819346883532a271eb626deb43c4a1bb4c4dd47c519bd78137c3e72a4fe27"
"checksum utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "662fab6525a98beff2921d7f61a39e7d59e0b425ebc7d0d9e66d316e55124122" "checksum utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "662fab6525a98beff2921d7f61a39e7d59e0b425ebc7d0d9e66d316e55124122"
"checksum uuid 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)" = "78c590b5bd79ed10aad8fb75f078a59d8db445af6c743e55c4a53227fc01c13f" "checksum uuid 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)" = "78c590b5bd79ed10aad8fb75f078a59d8db445af6c743e55c4a53227fc01c13f"
"checksum vec_map 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "887b5b631c2ad01628bbbaa7dd4c869f80d3186688f8d0b6f58774fbe324988c" "checksum vec_map 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "887b5b631c2ad01628bbbaa7dd4c869f80d3186688f8d0b6f58774fbe324988c"

View File

@@ -1 +1 @@
{"files":{".cargo-ok":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","Cargo.toml":"35fb5d8cfa50a27a476e718e437bfd1405ae4d38ddeb87a7d3404af67f8f6baa","src/IdnaMappingTable.txt":"a20be6e70dd1e48c2d15615455fef6098ba104756e5e37427bf8bd919b9d1118","src/lib.rs":"1fbd82781f2402e3dd3017673e7f2b2c40c8272c5a550cef7695f4e59df90c01","src/make_uts46_mapping_table.py":"60201ddefb8a3cb6f344b79808e5f93dfef5e21dcbacd4c8a0a36f172701c486","src/punycode.rs":"efb547848493d735aab32a0d0b2a2c795360ca9706272412524738794a540223","src/uts46.rs":"e6bb573e1469e9c0b6b83353083120696eb36d224821af5e3f39e8c397870877","src/uts46_mapping_table.rs":"88c01d8bcbd32741a9f3f7ea2cfb9e7e9883e3f83ee2de024d03bdc65a62d7f8","tests/IdnaTest.txt":"24817204a6dc010b91e98d899a8df627a94471f8893d703afca1d022f808c887","tests/punycode.rs":"57854e04949a43ed4b6b263c24d7d6502617a31e439cebb5b1cbd8cbce013dfb","tests/punycode_tests.json":"3d4ac0cf25984c37b9ce197f5df680a0136f728fb8ec82bc76624e42139eb3a8","tests/tests.rs":"bb92e129dc5e17e9a86ec6062dd7b3f4c905c4af69e773d7c70efea177654c7b","tests/uts46.rs":"4723a16d52e453b136a763fd883e48db5f198c45476b541c1917ed44725c3c7f"},"package":"2233d4940b1f19f0418c158509cd7396b8d70a5db5705ce410914dc8fa603b37"} {"files":{".cargo-ok":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","Cargo.toml":"82f46006a9e4959473d4426a9e4254172c5bb85fc191089dcda0b556e2b8e8be","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"20c7855c364d57ea4c97889a5e8d98470a9952dade37bd9248b9a54431670e5e","src/IdnaMappingTable.txt":"813a8308aeff8bcb9368751e1fd0ad7cc467130965d53ac860f82c4d0d11523f","src/lib.rs":"1fbd82781f2402e3dd3017673e7f2b2c40c8272c5a550cef7695f4e59df90c01","src/make_uts46_mapping_table.py":"3fa8cf34744fa0f531a77bd5d4e2231df85aa29bc82abed6e269fd9d9f33be6b","src/punycode.rs":"efb547848493d735aab32a0d0b2a2c795360ca9706272412524738794a540223","src/uts46.rs":"7ce58359fba57690ecf526ffd9031f76fb413d371a040d4e0a641973dcb32c6c","src/uts46_mapping_table.rs":"25c88d5ea382b8dc0880d5d48205c4007a80186f2a17e563d2f40462f29199e4","tests/IdnaTest.txt":"921c68e5d3fbb631b26140d232af90040fc4df612857d1894641ded319e52822","tests/punycode.rs":"57854e04949a43ed4b6b263c24d7d6502617a31e439cebb5b1cbd8cbce013dfb","tests/punycode_tests.json":"3d4ac0cf25984c37b9ce197f5df680a0136f728fb8ec82bc76624e42139eb3a8","tests/tests.rs":"d9f4ab9d8fc43b2f81031c45fe16f4013a866091797be695e4115478572e3965","tests/unit.rs":"d2993b27bc6242f2c0315c66cfc1875187b329980569571adfc17c302d266d3f","tests/uts46.rs":"4723a16d52e453b136a763fd883e48db5f198c45476b541c1917ed44725c3c7f"},"package":"014b298351066f1512874135335d62a789ffe78a9974f94b43ed5621951eaf7d"}

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "idna" name = "idna"
version = "0.1.2" version = "0.1.4"
authors = ["The rust-url developers"] authors = ["The rust-url developers"]
description = "IDNA (Internationalizing Domain Names in Applications) and Punycode." description = "IDNA (Internationalizing Domain Names in Applications) and Punycode."
repository = "https://github.com/servo/rust-url/" repository = "https://github.com/servo/rust-url/"
@@ -14,11 +14,14 @@ test = false
name = "tests" name = "tests"
harness = false harness = false
[[test]]
name = "unit"
[dev-dependencies] [dev-dependencies]
rustc-test = "0.1" rustc-test = "0.2"
rustc-serialize = "0.3" rustc-serialize = "0.3"
[dependencies] [dependencies]
unicode-bidi = "0.3" unicode-bidi = "0.3"
unicode-normalization = "0.1.3" unicode-normalization = "0.1.5"
matches = "0.1" matches = "0.1"

201
third_party/rust/idna/LICENSE-APACHE vendored Normal file
View File

@@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

25
third_party/rust/idna/LICENSE-MIT vendored Normal file
View File

@@ -0,0 +1,25 @@
Copyright (c) 2013-2016 The rust-url developers
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

File diff suppressed because it is too large Load Diff

View File

@@ -51,7 +51,15 @@ def strtab_slice(s):
return c return c
def rust_slice(s): def rust_slice(s):
return "(StringTableSlice { byte_start: %d, byte_len: %d })" % s start = s[0]
length = s[1]
start_lo = start & 0xff
start_hi = start >> 8
assert length <= 255
assert start_hi <= 255
return "(StringTableSlice { byte_start_lo: %d, byte_start_hi: %d, byte_len: %d })" % (start_lo, start_hi, length)
ranges = []
for line in txt: for line in txt:
# remove comments # remove comments
@@ -66,12 +74,58 @@ for line in txt:
if not last: if not last:
last = first last = first
mapping = fields[1].strip().replace('_', ' ').title().replace(' ', '') mapping = fields[1].strip().replace('_', ' ').title().replace(' ', '')
unicode_str = None
if len(fields) > 2: if len(fields) > 2:
if fields[2].strip(): if fields[2].strip():
unicode_str = u''.join(char(c) for c in fields[2].strip().split(' ')) unicode_str = u''.join(char(c) for c in fields[2].strip().split(' '))
mapping += rust_slice(strtab_slice(unicode_str))
elif mapping == "Deviation": elif mapping == "Deviation":
mapping += rust_slice(strtab_slice('')) unicode_str = u''
ranges.append((first, last, mapping, unicode_str))
def mergeable_key(r):
mapping = r[2]
# These types have associated data, so we should not merge them.
if mapping in ('Mapped', 'Deviation', 'DisallowedStd3Mapped'):
return r
assert mapping in ('Valid', 'Ignored', 'Disallowed', 'DisallowedStd3Valid')
return mapping
grouped_ranges = itertools.groupby(ranges, key=mergeable_key)
optimized_ranges = []
for (k, g) in grouped_ranges:
group = list(g)
if len(group) == 1:
optimized_ranges.append(group[0])
continue
# Assert that nothing in the group has an associated unicode string.
for g in group:
if g[3] is not None and len(g[3]) > 2:
assert not g[3][2].strip()
# Assert that consecutive members of the group don't leave gaps in
# the codepoint space.
a, b = itertools.tee(group)
next(b, None)
for (g1, g2) in itertools.izip(a, b):
last_char = int(g1[1], 16)
next_char = int(g2[0], 16)
if last_char + 1 == next_char:
continue
# There's a gap where surrogates would appear, but we don't have to
# worry about that gap, as surrogates never appear in Rust strings.
# Assert we're seeing the surrogate case here.
assert last_char == 0xd7ff
assert next_char == 0xe000
first = group[0][0]
last = group[-1][1]
mapping = group[0][2]
unicode_str = group[0][3]
optimized_ranges.append((first, last, mapping, unicode_str))
for (first, last, mapping, unicode_str) in optimized_ranges:
if unicode_str is not None:
mapping += rust_slice(strtab_slice(unicode_str))
print(" Range { from: '%s', to: '%s', mapping: %s }," % (escape_char(char(first)), print(" Range { from: '%s', to: '%s', mapping: %s }," % (escape_char(char(first)),
escape_char(char(last)), escape_char(char(last)),
mapping)) mapping))

View File

@@ -12,25 +12,35 @@
use self::Mapping::*; use self::Mapping::*;
use punycode; use punycode;
use std::ascii::AsciiExt; use std::ascii::AsciiExt;
use std::cmp::Ordering::{Equal, Less, Greater};
use unicode_bidi::{BidiClass, bidi_class};
use unicode_normalization::UnicodeNormalization; use unicode_normalization::UnicodeNormalization;
use unicode_normalization::char::is_combining_mark; use unicode_normalization::char::is_combining_mark;
use unicode_bidi::{BidiClass, bidi_class};
include!("uts46_mapping_table.rs"); include!("uts46_mapping_table.rs");
pub static PUNYCODE_PREFIX: &'static str = "xn--";
#[derive(Debug)] #[derive(Debug)]
struct StringTableSlice { struct StringTableSlice {
byte_start: u16, // Store these as separate fields so the structure will have an
byte_len: u16, // alignment of 1 and thus pack better into the Mapping enum, below.
byte_start_lo: u8,
byte_start_hi: u8,
byte_len: u8,
} }
fn decode_slice(slice: &StringTableSlice) -> &'static str { fn decode_slice(slice: &StringTableSlice) -> &'static str {
let start = slice.byte_start as usize; let lo = slice.byte_start_lo as usize;
let hi = slice.byte_start_hi as usize;
let start = (hi << 8) | lo;
let len = slice.byte_len as usize; let len = slice.byte_len as usize;
&STRING_TABLE[start..(start + len)] &STRING_TABLE[start..(start + len)]
} }
#[repr(u16)] #[repr(u8)]
#[derive(Debug)] #[derive(Debug)]
enum Mapping { enum Mapping {
Valid, Valid,
@@ -49,20 +59,16 @@ struct Range {
} }
fn find_char(codepoint: char) -> &'static Mapping { fn find_char(codepoint: char) -> &'static Mapping {
let mut min = 0; let r = TABLE.binary_search_by(|ref range| {
let mut max = TABLE.len() - 1; if codepoint > range.to {
while max > min { Less
let mid = (min + max) >> 1; } else if codepoint < range.from {
if codepoint > TABLE[mid].to { Greater
min = mid;
} else if codepoint < TABLE[mid].from {
max = mid;
} else { } else {
min = mid; Equal
max = mid;
} }
} });
&TABLE[min].mapping r.ok().map(|i| &TABLE[i].mapping).unwrap()
} }
fn map_char(codepoint: char, flags: Flags, output: &mut String, errors: &mut Vec<Error>) { fn map_char(codepoint: char, flags: Flags, output: &mut String, errors: &mut Vec<Error>) {
@@ -97,135 +103,177 @@ fn map_char(codepoint: char, flags: Flags, output: &mut String, errors: &mut Vec
} }
// http://tools.ietf.org/html/rfc5893#section-2 // http://tools.ietf.org/html/rfc5893#section-2
fn passes_bidi(label: &str, transitional_processing: bool) -> bool { fn passes_bidi(label: &str, is_bidi_domain: bool) -> bool {
// Rule 0: Bidi Rules apply to Bidi Domain Names: a name with at least one RTL label. A label
// is RTL if it contains at least one character of bidi class R, AL or AN.
if !is_bidi_domain {
return true;
}
let mut chars = label.chars(); let mut chars = label.chars();
let class = match chars.next() { let first_char_class = match chars.next() {
Some(c) => bidi_class(c), Some(c) => bidi_class(c),
None => return true, // empty string None => return true, // empty string
}; };
if class == BidiClass::L match first_char_class {
|| (class == BidiClass::ON && transitional_processing) // starts with \u200D // LTR label
|| (class == BidiClass::ES && transitional_processing) // hack: 1.35.+33.49 BidiClass::L => {
|| class == BidiClass::EN // hack: starts with number 0à.\u05D0 // Rule 5
{ // LTR loop {
// Rule 5 match chars.next() {
loop { Some(c) => {
match chars.next() { if !matches!(bidi_class(c),
Some(c) => { BidiClass::L | BidiClass::EN |
let c = bidi_class(c); BidiClass::ES | BidiClass::CS |
if !matches!(c, BidiClass::L | BidiClass::EN | BidiClass::ET | BidiClass::ON |
BidiClass::ES | BidiClass::CS | BidiClass::BN | BidiClass::NSM
BidiClass::ET | BidiClass::ON | ) {
BidiClass::BN | BidiClass::NSM) { return false;
return false; }
} },
}, None => { break; },
None => { break; },
}
}
// Rule 6
let mut rev_chars = label.chars().rev();
let mut last = rev_chars.next();
loop { // must end in L or EN followed by 0 or more NSM
match last {
Some(c) if bidi_class(c) == BidiClass::NSM => {
last = rev_chars.next();
continue;
} }
_ => { break; },
} }
}
// TODO: does not pass for àˇ.\u05D0 // Rule 6
// match last { // must end in L or EN followed by 0 or more NSM
// Some(c) if bidi_class(c) == BidiClass::L let mut rev_chars = label.chars().rev();
// || bidi_class(c) == BidiClass::EN => {}, let mut last_non_nsm = rev_chars.next();
// Some(c) => { return false; }, loop {
// _ => {} match last_non_nsm {
// } Some(c) if bidi_class(c) == BidiClass::NSM => {
last_non_nsm = rev_chars.next();
} else if class == BidiClass::R || class == BidiClass::AL { // RTL continue;
let mut found_en = false;
let mut found_an = false;
// Rule 2
loop {
match chars.next() {
Some(c) => {
let char_class = bidi_class(c);
if char_class == BidiClass::EN {
found_en = true;
} }
if char_class == BidiClass::AN { _ => { break; },
found_an = true;
}
if !matches!(char_class, BidiClass::R | BidiClass::AL |
BidiClass::AN | BidiClass::EN |
BidiClass::ES | BidiClass::CS |
BidiClass::ET | BidiClass::ON |
BidiClass::BN | BidiClass::NSM) {
return false;
}
},
None => { break; },
}
}
// Rule 3
let mut rev_chars = label.chars().rev();
let mut last = rev_chars.next();
loop { // must end in L or EN followed by 0 or more NSM
match last {
Some(c) if bidi_class(c) == BidiClass::NSM => {
last = rev_chars.next();
continue;
} }
_ => { break; },
} }
} match last_non_nsm {
match last { Some(c) if bidi_class(c) == BidiClass::L
Some(c) if matches!(bidi_class(c), BidiClass::R | BidiClass::AL | || bidi_class(c) == BidiClass::EN => {},
BidiClass::EN | BidiClass::AN) => {}, Some(_) => { return false; },
_ => { return false; } _ => {}
}
} }
// Rule 4 // RTL label
if found_an && found_en { BidiClass::R | BidiClass::AL => {
let mut found_en = false;
let mut found_an = false;
// Rule 2
loop {
match chars.next() {
Some(c) => {
let char_class = bidi_class(c);
if char_class == BidiClass::EN {
found_en = true;
}
if char_class == BidiClass::AN {
found_an = true;
}
if !matches!(char_class, BidiClass::R | BidiClass::AL |
BidiClass::AN | BidiClass::EN |
BidiClass::ES | BidiClass::CS |
BidiClass::ET | BidiClass::ON |
BidiClass::BN | BidiClass::NSM) {
return false;
}
},
None => { break; },
}
}
// Rule 3
let mut rev_chars = label.chars().rev();
let mut last = rev_chars.next();
loop { // must end in L or EN followed by 0 or more NSM
match last {
Some(c) if bidi_class(c) == BidiClass::NSM => {
last = rev_chars.next();
continue;
}
_ => { break; },
}
}
match last {
Some(c) if matches!(bidi_class(c), BidiClass::R | BidiClass::AL |
BidiClass::EN | BidiClass::AN) => {},
_ => { return false; }
}
// Rule 4
if found_an && found_en {
return false;
}
}
// Rule 1: Should start with L or R/AL
_ => {
return false; return false;
} }
} else {
// Rule 2: Should start with L or R/AL
return false;
} }
return true; return true;
} }
/// http://www.unicode.org/reports/tr46/#Validity_Criteria /// http://www.unicode.org/reports/tr46/#Validity_Criteria
fn validate(label: &str, flags: Flags, errors: &mut Vec<Error>) { fn validate(label: &str, is_bidi_domain: bool, flags: Flags, errors: &mut Vec<Error>) {
if label.nfc().ne(label.chars()) { let first_char = label.chars().next();
if first_char == None {
// Empty string, pass
}
// V1: Must be in NFC form.
else if label.nfc().ne(label.chars()) {
errors.push(Error::ValidityCriteria); errors.push(Error::ValidityCriteria);
} }
// Can not contain '.' since the input is from .split('.') // V2: No U+002D HYPHEN-MINUS in both third and fourth positions.
// Spec says that the label must not contain a HYPHEN-MINUS character in both the //
// NOTE: Spec says that the label must not contain a HYPHEN-MINUS character in both the
// third and fourth positions. But nobody follows this criteria. See the spec issue below: // third and fourth positions. But nobody follows this criteria. See the spec issue below:
// https://github.com/whatwg/url/issues/53 // https://github.com/whatwg/url/issues/53
if label.starts_with("-") //
|| label.ends_with("-") // TODO: Add *CheckHyphens* flag.
|| label.chars().next().map_or(false, is_combining_mark)
|| label.chars().any(|c| match *find_char(c) { // V3: neither begin nor end with a U+002D HYPHEN-MINUS
Mapping::Valid => false, else if label.starts_with("-") || label.ends_with("-") {
Mapping::Deviation(_) => flags.transitional_processing, errors.push(Error::ValidityCriteria);
Mapping::DisallowedStd3Valid => flags.use_std3_ascii_rules, }
_ => true,
}) // V4: not contain a U+002E FULL STOP
|| !passes_bidi(label, flags.transitional_processing) //
// Here, label can't contain '.' since the input is from .split('.')
// V5: not begin with a GC=Mark
else if is_combining_mark(first_char.unwrap()) {
errors.push(Error::ValidityCriteria);
}
// V6: Check against Mapping Table
else if label.chars().any(|c| match *find_char(c) {
Mapping::Valid => false,
Mapping::Deviation(_) => flags.transitional_processing,
Mapping::DisallowedStd3Valid => flags.use_std3_ascii_rules,
_ => true,
}) {
errors.push(Error::ValidityCriteria);
}
// V7: ContextJ rules
//
// TODO: Implement rules and add *CheckJoiners* flag.
// V8: Bidi rules
//
// TODO: Add *CheckBidi* flag
else if !passes_bidi(label, is_bidi_domain)
{ {
errors.push(Error::ValidityCriteria) errors.push(Error::ValidityCriteria);
} }
} }
@@ -236,22 +284,51 @@ fn processing(domain: &str, flags: Flags, errors: &mut Vec<Error>) -> String {
map_char(c, flags, &mut mapped, errors) map_char(c, flags, &mut mapped, errors)
} }
let normalized: String = mapped.nfc().collect(); let normalized: String = mapped.nfc().collect();
// Find out if it's a Bidi Domain Name
//
// First, check for literal bidi chars
let mut is_bidi_domain = domain.chars().any(|c|
matches!(bidi_class(c), BidiClass::R | BidiClass::AL | BidiClass::AN)
);
if !is_bidi_domain {
// Then check for punycode-encoded bidi chars
for label in normalized.split('.') {
if label.starts_with(PUNYCODE_PREFIX) {
match punycode::decode_to_string(&label[PUNYCODE_PREFIX.len()..]) {
Some(decoded_label) => {
if decoded_label.chars().any(|c|
matches!(bidi_class(c), BidiClass::R | BidiClass::AL | BidiClass::AN)
) {
is_bidi_domain = true;
}
}
None => {
is_bidi_domain = true;
}
}
}
}
}
let mut validated = String::new(); let mut validated = String::new();
let mut first = true;
for label in normalized.split('.') { for label in normalized.split('.') {
if validated.len() > 0 { if !first {
validated.push('.'); validated.push('.');
} }
if label.starts_with("xn--") { first = false;
match punycode::decode_to_string(&label["xn--".len()..]) { if label.starts_with(PUNYCODE_PREFIX) {
match punycode::decode_to_string(&label[PUNYCODE_PREFIX.len()..]) {
Some(decoded_label) => { Some(decoded_label) => {
let flags = Flags { transitional_processing: false, ..flags }; let flags = Flags { transitional_processing: false, ..flags };
validate(&decoded_label, flags, errors); validate(&decoded_label, is_bidi_domain, flags, errors);
validated.push_str(&decoded_label) validated.push_str(&decoded_label)
} }
None => errors.push(Error::PunycodeError) None => errors.push(Error::PunycodeError)
} }
} else { } else {
validate(label, flags, errors); validate(label, is_bidi_domain, flags, errors);
validated.push_str(label) validated.push_str(label)
} }
} }
@@ -273,6 +350,7 @@ enum Error {
DissallowedMappedInStd3, DissallowedMappedInStd3,
DissallowedCharacter, DissallowedCharacter,
TooLongForDns, TooLongForDns,
TooShortForDns,
} }
/// Errors recorded during UTS #46 processing. /// Errors recorded during UTS #46 processing.
@@ -286,16 +364,18 @@ pub struct Errors(Vec<Error>);
pub fn to_ascii(domain: &str, flags: Flags) -> Result<String, Errors> { pub fn to_ascii(domain: &str, flags: Flags) -> Result<String, Errors> {
let mut errors = Vec::new(); let mut errors = Vec::new();
let mut result = String::new(); let mut result = String::new();
let mut first = true;
for label in processing(domain, flags, &mut errors).split('.') { for label in processing(domain, flags, &mut errors).split('.') {
if result.len() > 0 { if !first {
result.push('.'); result.push('.');
} }
first = false;
if label.is_ascii() { if label.is_ascii() {
result.push_str(label); result.push_str(label);
} else { } else {
match punycode::encode_str(label) { match punycode::encode_str(label) {
Some(x) => { Some(x) => {
result.push_str("xn--"); result.push_str(PUNYCODE_PREFIX);
result.push_str(&x); result.push_str(&x);
}, },
None => errors.push(Error::PunycodeError) None => errors.push(Error::PunycodeError)
@@ -305,8 +385,10 @@ pub fn to_ascii(domain: &str, flags: Flags) -> Result<String, Errors> {
if flags.verify_dns_length { if flags.verify_dns_length {
let domain = if result.ends_with(".") { &result[..result.len()-1] } else { &*result }; let domain = if result.ends_with(".") { &result[..result.len()-1] } else { &*result };
if domain.len() < 1 || domain.len() > 253 || if domain.len() < 1 || domain.split('.').any(|label| label.len() < 1) {
domain.split('.').any(|label| label.len() < 1 || label.len() > 63) { errors.push(Error::TooShortForDns)
}
if domain.len() > 253 || domain.split('.').any(|label| label.len() > 63) {
errors.push(Error::TooLongForDns) errors.push(Error::TooLongForDns)
} }
} }

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -10,11 +10,7 @@ fn main() {
{ {
let mut add_test = |name, run| { let mut add_test = |name, run| {
tests.push(test::TestDescAndFn { tests.push(test::TestDescAndFn {
desc: test::TestDesc { desc: test::TestDesc::new(test::DynTestName(name)),
name: test::DynTestName(name),
ignore: false,
should_panic: test::ShouldPanic::No,
},
testfn: run, testfn: run,
}) })
}; };

40
third_party/rust/idna/tests/unit.rs vendored Normal file
View File

@@ -0,0 +1,40 @@
extern crate idna;
extern crate unicode_normalization;
use idna::uts46;
use unicode_normalization::char::is_combining_mark;
fn _to_ascii(domain: &str) -> Result<String, uts46::Errors> {
uts46::to_ascii(domain, uts46::Flags {
transitional_processing: false,
use_std3_ascii_rules: true,
verify_dns_length: true,
})
}
#[test]
fn test_v5() {
// IdnaTest:784 蔏。𑰺
assert!(is_combining_mark('\u{11C3A}'));
assert!(_to_ascii("\u{11C3A}").is_err());
assert!(_to_ascii("\u{850f}.\u{11C3A}").is_err());
assert!(_to_ascii("\u{850f}\u{ff61}\u{11C3A}").is_err());
}
#[test]
fn test_v8_bidi_rules() {
assert_eq!(_to_ascii("abc").unwrap(), "abc");
assert_eq!(_to_ascii("123").unwrap(), "123");
assert_eq!(_to_ascii("אבּג").unwrap(), "xn--kdb3bdf");
assert_eq!(_to_ascii("ابج").unwrap(), "xn--mgbcm");
assert_eq!(_to_ascii("abc.ابج").unwrap(), "abc.xn--mgbcm");
assert_eq!(_to_ascii("אבּג.ابج").unwrap(), "xn--kdb3bdf.xn--mgbcm");
// Bidi domain names cannot start with digits
assert!(_to_ascii("0a.\u{05D0}").is_err());
assert!(_to_ascii("0à.\u{05D0}").is_err());
// Bidi chars may be punycode-encoded
assert!(_to_ascii("xn--0ca24w").is_err());
}

View File

@@ -0,0 +1 @@
{"files":{".cargo-ok":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","Cargo.toml":"f6b6226839c4da49df564d7fc44d4477964621778b671d3a5d4cf980e5524d66","lib.rs":"d32a0b432c49053214a4aa51fd5e6b62215dea5a001f229a8ba1a17eb6be20f1"},"package":"de154f638187706bde41d9b4738748933d64e6b37bdbffc0b47a97d16a6ae356"}

View File

View File

@@ -0,0 +1,16 @@
[package]
name = "percent-encoding"
version = "1.0.0"
authors = ["The rust-url developers"]
description = "Percent encoding and decoding"
repository = "https://github.com/servo/rust-url/"
license = "MIT/Apache-2.0"
[lib]
doctest = false
test = false
path = "lib.rs"
[dev-dependencies]
rustc-test = "0.1"
rustc-serialize = "0.3"

View File

@@ -6,7 +6,32 @@
// option. This file may not be copied, modified, or distributed // option. This file may not be copied, modified, or distributed
// except according to those terms. // except according to those terms.
use encoding; //! URLs use special chacters to indicate the parts of the request. For example, a forward slash
//! indicates a path. In order for that charcter to exist outside of a path separator, that
//! charcter would need to be encoded.
//!
//! Percent encoding replaces reserved charcters with the `%` escape charcter followed by hexidecimal
//! ASCII representaton. For non-ASCII charcters that are percent encoded, a UTF-8 byte sequence
//! becomes percent encoded. A simple example can be seen when the space literal is replaced with
//! `%20`.
//!
//! Percent encoding is further complicated by the fact that different parts of an URL have
//! different encoding requirements. In order to support the variety of encoding requirements,
//! `url::percent_encoding` includes different *encode sets*.
//! See [URL Standard](https://url.spec.whatwg.org/#percent-encoded-bytes) for details.
//!
//! This module provides some `*_ENCODE_SET` constants.
//! If a different set is required, it can be created with
//! the [`define_encode_set!`](../macro.define_encode_set!.html) macro.
//!
//! # Examples
//!
//! ```
//! use url::percent_encoding::{utf8_percent_encode, DEFAULT_ENCODE_SET};
//!
//! assert_eq!(utf8_percent_encode("foo bar?", DEFAULT_ENCODE_SET).to_string(), "foo%20bar%3F");
//! ```
use std::ascii::AsciiExt; use std::ascii::AsciiExt;
use std::borrow::Cow; use std::borrow::Cow;
use std::fmt; use std::fmt;
@@ -44,8 +69,8 @@ pub trait EncodeSet: Clone {
/// ======= /// =======
/// ///
/// ```rust /// ```rust
/// #[macro_use] extern crate url; /// #[macro_use] extern crate percent_encoding;
/// use url::percent_encoding::{utf8_percent_encode, SIMPLE_ENCODE_SET}; /// use percent_encoding::{utf8_percent_encode, SIMPLE_ENCODE_SET};
/// define_encode_set! { /// define_encode_set! {
/// /// This encode set is used in the URL parser for query strings. /// /// This encode set is used in the URL parser for query strings.
/// pub QUERY_ENCODE_SET = [SIMPLE_ENCODE_SET] | {' ', '"', '#', '<', '>'} /// pub QUERY_ENCODE_SET = [SIMPLE_ENCODE_SET] | {' ', '"', '#', '<', '>'}
@@ -58,11 +83,11 @@ pub trait EncodeSet: Clone {
macro_rules! define_encode_set { macro_rules! define_encode_set {
($(#[$attr: meta])* pub $name: ident = [$base_set: expr] | {$($ch: pat),*}) => { ($(#[$attr: meta])* pub $name: ident = [$base_set: expr] | {$($ch: pat),*}) => {
$(#[$attr])* $(#[$attr])*
#[derive(Copy, Clone)] #[derive(Copy, Clone, Debug)]
#[allow(non_camel_case_types)] #[allow(non_camel_case_types)]
pub struct $name; pub struct $name;
impl $crate::percent_encoding::EncodeSet for $name { impl $crate::EncodeSet for $name {
#[inline] #[inline]
fn contains(&self, byte: u8) -> bool { fn contains(&self, byte: u8) -> bool {
match byte as char { match byte as char {
@@ -77,7 +102,10 @@ macro_rules! define_encode_set {
} }
/// This encode set is used for the path of cannot-be-a-base URLs. /// This encode set is used for the path of cannot-be-a-base URLs.
#[derive(Copy, Clone)] ///
/// All ASCII charcters less than hexidecimal 20 and greater than 7E are encoded. This includes
/// special charcters such as line feed, carriage return, NULL, etc.
#[derive(Copy, Clone, Debug)]
#[allow(non_camel_case_types)] #[allow(non_camel_case_types)]
pub struct SIMPLE_ENCODE_SET; pub struct SIMPLE_ENCODE_SET;
@@ -90,21 +118,39 @@ impl EncodeSet for SIMPLE_ENCODE_SET {
define_encode_set! { define_encode_set! {
/// This encode set is used in the URL parser for query strings. /// This encode set is used in the URL parser for query strings.
///
/// Aside from special chacters defined in the [`SIMPLE_ENCODE_SET`](struct.SIMPLE_ENCODE_SET.html),
/// space, double quote ("), hash (#), and inequality qualifiers (<), (>) are encoded.
pub QUERY_ENCODE_SET = [SIMPLE_ENCODE_SET] | {' ', '"', '#', '<', '>'} pub QUERY_ENCODE_SET = [SIMPLE_ENCODE_SET] | {' ', '"', '#', '<', '>'}
} }
define_encode_set! { define_encode_set! {
/// This encode set is used for path components. /// This encode set is used for path components.
///
/// Aside from special chacters defined in the [`SIMPLE_ENCODE_SET`](struct.SIMPLE_ENCODE_SET.html),
/// space, double quote ("), hash (#), inequality qualifiers (<), (>), backtick (`),
/// question mark (?), and curly brackets ({), (}) are encoded.
pub DEFAULT_ENCODE_SET = [QUERY_ENCODE_SET] | {'`', '?', '{', '}'} pub DEFAULT_ENCODE_SET = [QUERY_ENCODE_SET] | {'`', '?', '{', '}'}
} }
define_encode_set! { define_encode_set! {
/// This encode set is used for on '/'-separated path segment /// This encode set is used for on '/'-separated path segment
///
/// Aside from special chacters defined in the [`SIMPLE_ENCODE_SET`](struct.SIMPLE_ENCODE_SET.html),
/// space, double quote ("), hash (#), inequality qualifiers (<), (>), backtick (`),
/// question mark (?), and curly brackets ({), (}), percent sign (%), forward slash (/) are
/// encoded.
pub PATH_SEGMENT_ENCODE_SET = [DEFAULT_ENCODE_SET] | {'%', '/'} pub PATH_SEGMENT_ENCODE_SET = [DEFAULT_ENCODE_SET] | {'%', '/'}
} }
define_encode_set! { define_encode_set! {
/// This encode set is used for username and password. /// This encode set is used for username and password.
///
/// Aside from special chacters defined in the [`SIMPLE_ENCODE_SET`](struct.SIMPLE_ENCODE_SET.html),
/// space, double quote ("), hash (#), inequality qualifiers (<), (>), backtick (`),
/// question mark (?), and curly brackets ({), (}), forward slash (/), colon (:), semi-colon (;),
/// equality (=), at (@), backslash (\\), square brackets ([), (]), caret (\^), and pipe (|) are
/// encoded.
pub USERINFO_ENCODE_SET = [DEFAULT_ENCODE_SET] | { pub USERINFO_ENCODE_SET = [DEFAULT_ENCODE_SET] | {
'/', ':', ';', '=', '@', '[', '\\', ']', '^', '|' '/', ':', ';', '=', '@', '[', '\\', ']', '^', '|'
} }
@@ -113,6 +159,15 @@ define_encode_set! {
/// Return the percent-encoding of the given bytes. /// Return the percent-encoding of the given bytes.
/// ///
/// This is unconditional, unlike `percent_encode()` which uses an encode set. /// This is unconditional, unlike `percent_encode()` which uses an encode set.
///
/// # Examples
///
/// ```
/// use url::percent_encoding::percent_encode_byte;
///
/// assert_eq!("foo bar".bytes().map(percent_encode_byte).collect::<String>(),
/// "%66%6F%6F%20%62%61%72");
/// ```
pub fn percent_encode_byte(byte: u8) -> &'static str { pub fn percent_encode_byte(byte: u8) -> &'static str {
let index = usize::from(byte) * 3; let index = usize::from(byte) * 3;
&"\ &"\
@@ -146,6 +201,14 @@ pub fn percent_encode_byte(byte: u8) -> &'static str {
/// that also implements `Display` and `Into<Cow<str>>`. /// that also implements `Display` and `Into<Cow<str>>`.
/// The latter returns `Cow::Borrowed` when none of the bytes in `input` /// The latter returns `Cow::Borrowed` when none of the bytes in `input`
/// are in the given encode set. /// are in the given encode set.
///
/// # Examples
///
/// ```
/// use url::percent_encoding::{percent_encode, DEFAULT_ENCODE_SET};
///
/// assert_eq!(percent_encode(b"foo bar?", DEFAULT_ENCODE_SET).to_string(), "foo%20bar%3F");
/// ```
#[inline] #[inline]
pub fn percent_encode<E: EncodeSet>(input: &[u8], encode_set: E) -> PercentEncode<E> { pub fn percent_encode<E: EncodeSet>(input: &[u8], encode_set: E) -> PercentEncode<E> {
PercentEncode { PercentEncode {
@@ -157,13 +220,21 @@ pub fn percent_encode<E: EncodeSet>(input: &[u8], encode_set: E) -> PercentEncod
/// Percent-encode the UTF-8 encoding of the given string. /// Percent-encode the UTF-8 encoding of the given string.
/// ///
/// See `percent_encode()` for how to use the return value. /// See `percent_encode()` for how to use the return value.
///
/// # Examples
///
/// ```
/// use url::percent_encoding::{utf8_percent_encode, DEFAULT_ENCODE_SET};
///
/// assert_eq!(utf8_percent_encode("foo bar?", DEFAULT_ENCODE_SET).to_string(), "foo%20bar%3F");
/// ```
#[inline] #[inline]
pub fn utf8_percent_encode<E: EncodeSet>(input: &str, encode_set: E) -> PercentEncode<E> { pub fn utf8_percent_encode<E: EncodeSet>(input: &str, encode_set: E) -> PercentEncode<E> {
percent_encode(input.as_bytes(), encode_set) percent_encode(input.as_bytes(), encode_set)
} }
/// The return type of `percent_encode()` and `utf8_percent_encode()`. /// The return type of `percent_encode()` and `utf8_percent_encode()`.
#[derive(Clone)] #[derive(Clone, Debug)]
pub struct PercentEncode<'a, E: EncodeSet> { pub struct PercentEncode<'a, E: EncodeSet> {
bytes: &'a [u8], bytes: &'a [u8],
encode_set: E, encode_set: E,
@@ -241,6 +312,14 @@ impl<'a, E: EncodeSet> From<PercentEncode<'a, E>> for Cow<'a, str> {
/// that also implements `Into<Cow<u8>>` /// that also implements `Into<Cow<u8>>`
/// (which returns `Cow::Borrowed` when `input` contains no percent-encoded sequence) /// (which returns `Cow::Borrowed` when `input` contains no percent-encoded sequence)
/// and has `decode_utf8()` and `decode_utf8_lossy()` methods. /// and has `decode_utf8()` and `decode_utf8_lossy()` methods.
///
/// # Examples
///
/// ```
/// use url::percent_encoding::percent_decode;
///
/// assert_eq!(percent_decode(b"foo%20bar%3F").decode_utf8().unwrap(), "foo bar?");
/// ```
#[inline] #[inline]
pub fn percent_decode(input: &[u8]) -> PercentDecode { pub fn percent_decode(input: &[u8]) -> PercentDecode {
PercentDecode { PercentDecode {
@@ -249,7 +328,7 @@ pub fn percent_decode(input: &[u8]) -> PercentDecode {
} }
/// The return type of `percent_decode()`. /// The return type of `percent_decode()`.
#[derive(Clone)] #[derive(Clone, Debug)]
pub struct PercentDecode<'a> { pub struct PercentDecode<'a> {
bytes: slice::Iter<'a, u8>, bytes: slice::Iter<'a, u8>,
} }
@@ -339,6 +418,25 @@ impl<'a> PercentDecode<'a> {
/// Invalid UTF-8 percent-encoded byte sequences will be replaced <20> U+FFFD, /// Invalid UTF-8 percent-encoded byte sequences will be replaced <20> U+FFFD,
/// the replacement character. /// the replacement character.
pub fn decode_utf8_lossy(self) -> Cow<'a, str> { pub fn decode_utf8_lossy(self) -> Cow<'a, str> {
encoding::decode_utf8_lossy(self.clone().into()) decode_utf8_lossy(self.clone().into())
} }
} }
fn decode_utf8_lossy(input: Cow<[u8]>) -> Cow<str> {
match input {
Cow::Borrowed(bytes) => String::from_utf8_lossy(bytes),
Cow::Owned(bytes) => {
let raw_utf8: *const [u8];
match String::from_utf8_lossy(&bytes) {
Cow::Borrowed(utf8) => raw_utf8 = utf8.as_bytes(),
Cow::Owned(s) => return s.into(),
}
// from_utf8_lossy returned a borrow of `bytes` unchanged.
debug_assert!(raw_utf8 == &*bytes as *const [u8]);
// Reuse the existing `Vec` allocation.
unsafe { String::from_utf8_unchecked(bytes) }.into()
}
}
}

View File

@@ -1 +1 @@
{"files":{".cargo-ok":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",".travis.yml":"493704d6a0d0f27ad2ad6e950f8bce5f42d9ec4081daeb3c5a48066f1030f467","COPYRIGHT":"23860c2a7b5d96b21569afedf033469bab9fe14a1b24a35068b8641c578ce24d","Cargo.toml":"1898b4a4ea43acc71f3700c57d388b800c47c6f36b34d5baaa9df5cb536fdcec","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"7b63ecd5f1902af1b63729947373683c32745c16a10e8e6292e2e2dcd7e90ae0","README.md":"4f0f6696be822c6e05c38ada717f1d99790b18f46d88ba4c029e80be93e735d8","scripts/unicode.py":"64a1b919ab0e251fdb1db8b9c5363d84227fca33ac8375268bb88b74c4462f8f","scripts/unicode_gen_normtests.py":"da891d433fa58068747a1cd121774435b7d486394ce5c85c8079b227d20ea507","src/decompose.rs":"04818a6b0271412ec58508e44535b18c58c80384a5836fe5b2c24e489a5ab4cc","src/lib.rs":"3a2b271fa833f159d899875a88172b053a7a57c0d9786d5c6ac96fd82c6d7efb","src/normalize.rs":"c49af1939734065cd089c75c3612f9fec20063fd63ccb97416d8e894f0910b70","src/recompose.rs":"96b8aea91e09f1fa439467378f2d1fa2aa4c81c86e597d3d36a2a35d7750a0d6","src/tables.rs":"449e09a608ed21b3026e4b60fc728244749bc5347c6c4bc72cd34e6c45357555","src/test.rs":"e47de49aeef9231f5ff177eeb9946618577fc67ed798e889b99710c877e92e1b","src/testdata.rs":"8def8bcd8a24c700881c57eab78c6fdf19295969f4783eb4a138f25616519d75"},"package":"e28fa37426fceeb5cf8f41ee273faa7c82c47dc8fba5853402841e665fcd86ff"} {"files":{".cargo-ok":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",".travis.yml":"493704d6a0d0f27ad2ad6e950f8bce5f42d9ec4081daeb3c5a48066f1030f467","COPYRIGHT":"23860c2a7b5d96b21569afedf033469bab9fe14a1b24a35068b8641c578ce24d","Cargo.toml":"b262a0a92fca7f012aefc1d72eeb8923d93711f0b3a916315a0dc41584ec82fb","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"7b63ecd5f1902af1b63729947373683c32745c16a10e8e6292e2e2dcd7e90ae0","README.md":"4f0f6696be822c6e05c38ada717f1d99790b18f46d88ba4c029e80be93e735d8","scripts/unicode.py":"faff0551e945cfb13b345f21256a53c96f85e0b0d81df6e165b8b62aa8eaa8e9","scripts/unicode_gen_normtests.py":"da891d433fa58068747a1cd121774435b7d486394ce5c85c8079b227d20ea507","src/decompose.rs":"19399cb186245a973a5235118cd3a19e1834926cff4709b0d8a9cc9eea594be3","src/lib.rs":"3a2b271fa833f159d899875a88172b053a7a57c0d9786d5c6ac96fd82c6d7efb","src/normalize.rs":"06580af2b630c17da50e36aaafb9f0e3a728d5ee1de45d6ac1f3043ca723e670","src/recompose.rs":"936bf16efe318f06040bd3a8d2085a4c2e68a03c91d98b7e349f090f88752f9f","src/tables.rs":"566c4b764fa9d21abc8668681821c0bcbb3c54b1956795dc58be506f5540ced7","src/test.rs":"83a05c7dd030069cc1baff70c9933ef3ee65b9aeda4ca32cbbcc1d4c1a33979f","src/testdata.rs":"8def8bcd8a24c700881c57eab78c6fdf19295969f4783eb4a138f25616519d75"},"package":"51ccda9ef9efa3f7ef5d91e8f9b83bbe6955f9bf86aec89d5cce2c874625920f"}

View File

@@ -1,21 +1,24 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g. crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package] [package]
name = "unicode-normalization" name = "unicode-normalization"
version = "0.1.4" version = "0.1.5"
authors = ["kwantam <kwantam@gmail.com>"] authors = ["kwantam <kwantam@gmail.com>"]
exclude = ["target/*", "Cargo.lock", "scripts/tmp", "*.txt"]
description = "This crate provides functions for normalization of\nUnicode strings, including Canonical and Compatible\nDecomposition and Recomposition, as described in\nUnicode Standard Annex #15.\n"
homepage = "https://github.com/unicode-rs/unicode-normalization" homepage = "https://github.com/unicode-rs/unicode-normalization"
repository = "https://github.com/unicode-rs/unicode-normalization"
documentation = "https://unicode-rs.github.io/unicode-normalization" documentation = "https://unicode-rs.github.io/unicode-normalization"
license = "MIT/Apache-2.0"
keywords = ["text", "unicode", "normalization", "decomposition", "recomposition"]
readme = "README.md" readme = "README.md"
description = """ keywords = ["text", "unicode", "normalization", "decomposition", "recomposition"]
This crate provides functions for normalization of license = "MIT/Apache-2.0"
Unicode strings, including Canonical and Compatible repository = "https://github.com/unicode-rs/unicode-normalization"
Decomposition and Recomposition, as described in
Unicode Standard Annex #15.
"""
exclude = [ "target/*", "Cargo.lock", "scripts/tmp", "*.txt" ]

View File

@@ -18,7 +18,7 @@
# Since this should not require frequent updates, we just store this # Since this should not require frequent updates, we just store this
# out-of-line and check the unicode.rs file into git. # out-of-line and check the unicode.rs file into git.
import fileinput, re, os, sys import fileinput, re, os, sys, collections
preamble = '''// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT preamble = '''// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at // file at the top-level directory of this distribution and at
@@ -160,19 +160,9 @@ def to_combines(combs):
return combs_out return combs_out
def format_table_content(f, content, indent): def format_table_content(f, content, indent):
line = " "*indent indent = " "*indent
first = True for c in content:
for chunk in content.split(","): f.write("%s%s,\n" % (indent, c))
if len(line) + len(chunk) < 98:
if first:
line += chunk
else:
line += ", " + chunk
first = False
else:
f.write(line + ",\n")
line = " "*indent + chunk
f.write(line)
def load_properties(f, interestingprops): def load_properties(f, interestingprops):
fetch(f) fetch(f)
@@ -220,14 +210,44 @@ def emit_table(f, name, t_data, t_type = "&'static [(char, char)]", is_pub=True,
if is_pub: if is_pub:
pub_string = "pub " pub_string = "pub "
f.write(" %sconst %s: %s = &[\n" % (pub_string, name, t_type)) f.write(" %sconst %s: %s = &[\n" % (pub_string, name, t_type))
data = "" format_table_content(f, [pfun(d) for d in t_data], 8)
first = True f.write("\n ];\n\n")
for dat in t_data:
if not first: def emit_strtab_table(f, name, keys, vfun, is_pub=True,
data += "," tab_entry_type='char', slice_element_sfun=escape_char):
first = False pub_string = ""
data += pfun(dat) if is_pub:
format_table_content(f, data, 8) pub_string = "pub "
f.write(" %s const %s: &'static [(char, Slice)] = &[\n"
% (pub_string, name))
strtab = collections.OrderedDict()
strtab_offset = 0
# TODO: a more sophisticated algorithm here would not only check for the
# existence of v in the strtab, but also v in contiguous substrings of
# strtab, if that's possible.
for k in keys:
v = tuple(vfun(k))
if v in strtab:
item_slice = strtab[v]
else:
value_len = len(v)
item_slice = (strtab_offset, value_len)
strtab[v] = item_slice
strtab_offset += value_len
f.write("%s(%s, Slice { offset: %d, length: %d }),\n"
% (" "*8, escape_char(k), item_slice[0], item_slice[1]))
f.write("\n ];\n\n")
f.write(" %s const %s_STRTAB: &'static [%s] = &[\n"
% (pub_string, name, tab_entry_type))
for (v, _) in strtab.iteritems():
f.write("%s%s,\n" % (" "*8, ', '.join(slice_element_sfun(c) for c in v)))
f.write("\n ];\n\n") f.write("\n ];\n\n")
def emit_norm_module(f, canon, compat, combine, norm_props, general_category_mark): def emit_norm_module(f, canon, compat, combine, norm_props, general_category_mark):
@@ -251,43 +271,38 @@ def emit_norm_module(f, canon, compat, combine, norm_props, general_category_mar
canon_comp_keys.sort() canon_comp_keys.sort()
f.write("pub mod normalization {\n") f.write("pub mod normalization {\n")
f.write("""
pub struct Slice {
pub offset: u16,
pub length: u16,
}
""")
def mkdata_fun(table): def mkdata_fun(table):
def f(char): def f(char):
data = "(%s,&[" % escape_char(char) return table[char]
first = True
for d in table[char]:
if not first:
data += ","
first = False
data += escape_char(d)
data += "])"
return data
return f return f
# TODO: should the strtab of these two tables be of type &'static str, for
# smaller data?
f.write(" // Canonical decompositions\n") f.write(" // Canonical decompositions\n")
emit_table(f, "canonical_table", canon_keys, "&'static [(char, &'static [char])]", emit_strtab_table(f, "canonical_table", canon_keys,
pfun=mkdata_fun(canon)) vfun=mkdata_fun(canon))
f.write(" // Compatibility decompositions\n") f.write(" // Compatibility decompositions\n")
emit_table(f, "compatibility_table", compat_keys, "&'static [(char, &'static [char])]", emit_strtab_table(f, "compatibility_table", compat_keys,
pfun=mkdata_fun(compat)) vfun=mkdata_fun(compat))
def comp_pfun(char): def comp_vfun(char):
data = "(%s,&[" % escape_char(char) return sorted(canon_comp[char], lambda x, y: x[0] - y[0])
canon_comp[char].sort(lambda x, y: x[0] - y[0])
first = True
for pair in canon_comp[char]:
if not first:
data += ","
first = False
data += "(%s,%s)" % (escape_char(pair[0]), escape_char(pair[1]))
data += "])"
return data
f.write(" // Canonical compositions\n") f.write(" // Canonical compositions\n")
emit_table(f, "composition_table", canon_comp_keys, # "&'static [(char, &'static [(char, char)])]", pfun=comp_pfun)
"&'static [(char, &'static [(char, char)])]", pfun=comp_pfun) emit_strtab_table(f, "composition_table", canon_comp_keys,
vfun=comp_vfun,
tab_entry_type="(char, char)",
slice_element_sfun=lambda pair: "(%s,%s)" % (escape_char(pair[0]),
escape_char(pair[1])))
f.write(""" f.write("""
fn bsearch_range_value_table(c: char, r: &'static [(char, char, u8)]) -> u8 { fn bsearch_range_value_table(c: char, r: &'static [(char, char, u8)]) -> u8 {
@@ -335,7 +350,7 @@ def emit_norm_module(f, canon, compat, combine, norm_props, general_category_mar
""") """)
emit_table(f, "general_category_mark", combine, "&'static [(char, char)]", is_pub=False, emit_table(f, "general_category_mark", general_category_mark, "&'static [(char, char)]", is_pub=False,
pfun=lambda x: "(%s,%s)" % (escape_char(x[0]), escape_char(x[1]))) pfun=lambda x: "(%s,%s)" % (escape_char(x[0]), escape_char(x[1])))
f.write(""" f.write("""

View File

@@ -8,6 +8,7 @@
// option. This file may not be copied, modified, or distributed // option. This file may not be copied, modified, or distributed
// except according to those terms. // except according to those terms.
use std::fmt::{self, Write};
// Helper functions used for Unicode normalization // Helper functions used for Unicode normalization
fn canonical_sort(comb: &mut [(char, u8)]) { fn canonical_sort(comb: &mut [(char, u8)]) {
@@ -133,3 +134,12 @@ impl<I: Iterator<Item=char>> Iterator for Decompositions<I> {
(lower, None) (lower, None)
} }
} }
impl<I: Iterator<Item=char> + Clone> fmt::Display for Decompositions<I> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for c in self.clone() {
f.write_char(c)?;
}
Ok(())
}
}

View File

@@ -12,17 +12,22 @@
use std::cmp::Ordering::{Equal, Less, Greater}; use std::cmp::Ordering::{Equal, Less, Greater};
use std::ops::FnMut; use std::ops::FnMut;
use tables::normalization::{canonical_table, compatibility_table, composition_table}; use tables::normalization::{canonical_table, canonical_table_STRTAB};
use tables::normalization::{compatibility_table, compatibility_table_STRTAB};
use tables::normalization::{composition_table, composition_table_STRTAB};
use tables::normalization::Slice;
fn bsearch_table<T>(c: char, r: &'static [(char, &'static [T])]) -> Option<&'static [T]> { fn bsearch_table<T>(c: char, r: &'static [(char, Slice)], strtab: &'static [T]) -> Option<&'static [T]> {
match r.binary_search_by(|&(val, _)| { match r.binary_search_by(|&(val, _)| {
if c == val { Equal } if c == val { Equal }
else if val < c { Less } else if val < c { Less }
else { Greater } else { Greater }
}) { }) {
Ok(idx) => { Ok(idx) => {
let (_, result) = r[idx]; let ref slice = r[idx].1;
Some(result) let offset = slice.offset as usize;
let length = slice.length as usize;
Some(&strtab[offset..(offset + length)])
} }
Err(_) => None Err(_) => None
} }
@@ -50,7 +55,7 @@ fn d<F>(c: char, i: &mut F, k: bool) where F: FnMut(char) {
} }
// First check the canonical decompositions // First check the canonical decompositions
match bsearch_table(c, canonical_table) { match bsearch_table(c, canonical_table, canonical_table_STRTAB) {
Some(canon) => { Some(canon) => {
for x in canon { for x in canon {
d(*x, i, k); d(*x, i, k);
@@ -64,7 +69,7 @@ fn d<F>(c: char, i: &mut F, k: bool) where F: FnMut(char) {
if !k { (*i)(c); return; } if !k { (*i)(c); return; }
// Then check the compatibility decompositions // Then check the compatibility decompositions
match bsearch_table(c, compatibility_table) { match bsearch_table(c, compatibility_table, compatibility_table_STRTAB) {
Some(compat) => { Some(compat) => {
for x in compat { for x in compat {
d(*x, i, k); d(*x, i, k);
@@ -83,7 +88,7 @@ fn d<F>(c: char, i: &mut F, k: bool) where F: FnMut(char) {
/// for more information. /// for more information.
pub fn compose(a: char, b: char) -> Option<char> { pub fn compose(a: char, b: char) -> Option<char> {
compose_hangul(a, b).or_else(|| { compose_hangul(a, b).or_else(|| {
match bsearch_table(a, composition_table) { match bsearch_table(a, composition_table, composition_table_STRTAB) {
None => None, None => None,
Some(candidates) => { Some(candidates) => {
match candidates.binary_search_by(|&(val, _)| { match candidates.binary_search_by(|&(val, _)| {

View File

@@ -9,6 +9,7 @@
// except according to those terms. // except according to those terms.
use std::collections::VecDeque; use std::collections::VecDeque;
use std::fmt::{self, Write};
use decompose::Decompositions; use decompose::Decompositions;
#[derive(Clone)] #[derive(Clone)]
@@ -135,3 +136,12 @@ impl<I: Iterator<Item=char>> Iterator for Recompositions<I> {
} }
} }
} }
impl<I: Iterator<Item=char> + Clone> fmt::Display for Recompositions<I> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for c in self.clone() {
f.write_char(c)?;
}
Ok(())
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -8,14 +8,19 @@
// option. This file may not be copied, modified, or distributed // option. This file may not be copied, modified, or distributed
// except according to those terms. // except according to those terms.
use UnicodeNormalization;
use std::char;
use super::UnicodeNormalization;
use super::char::is_combining_mark;
#[test] #[test]
fn test_nfd() { fn test_nfd() {
macro_rules! t { macro_rules! t {
($input: expr, $expected: expr) => { ($input: expr, $expected: expr) => {
assert_eq!($input.nfd().collect::<String>(), $expected); assert_eq!($input.nfd().to_string(), $expected);
// A dummy iterator that is not std::str::Chars directly: // A dummy iterator that is not std::str::Chars directly;
// note that `id_func` is used to ensure `Clone` implementation
assert_eq!($input.chars().map(|c| c).nfd().collect::<String>(), $expected); assert_eq!($input.chars().map(|c| c).nfd().collect::<String>(), $expected);
} }
} }
@@ -35,7 +40,7 @@ fn test_nfd() {
fn test_nfkd() { fn test_nfkd() {
macro_rules! t { macro_rules! t {
($input: expr, $expected: expr) => { ($input: expr, $expected: expr) => {
assert_eq!($input.nfkd().collect::<String>(), $expected); assert_eq!($input.nfkd().to_string(), $expected);
} }
} }
t!("abc", "abc"); t!("abc", "abc");
@@ -54,7 +59,7 @@ fn test_nfkd() {
fn test_nfc() { fn test_nfc() {
macro_rules! t { macro_rules! t {
($input: expr, $expected: expr) => { ($input: expr, $expected: expr) => {
assert_eq!($input.nfc().collect::<String>(), $expected); assert_eq!($input.nfc().to_string(), $expected);
} }
} }
t!("abc", "abc"); t!("abc", "abc");
@@ -74,7 +79,7 @@ fn test_nfc() {
fn test_nfkc() { fn test_nfkc() {
macro_rules! t { macro_rules! t {
($input: expr, $expected: expr) => { ($input: expr, $expected: expr) => {
assert_eq!($input.nfkc().collect::<String>(), $expected); assert_eq!($input.nfkc().to_string(), $expected);
} }
} }
t!("abc", "abc"); t!("abc", "abc");
@@ -153,3 +158,24 @@ fn test_official() {
} }
} }
} }
#[test]
fn test_is_combining_mark_ascii() {
for cp in 0..0x7f {
assert!(!is_combining_mark(char::from_u32(cp).unwrap()));
}
}
#[test]
fn test_is_combining_mark_misc() {
// https://github.com/unicode-rs/unicode-normalization/issues/16
// U+11C3A BHAIKSUKI VOWEL SIGN O
// Category: Mark, Nonspacing [Mn]
assert!(is_combining_mark('\u{11C3A}'));
// U+11C3F BHAIKSUKI SIGN VIRAMA
// Category: Mark, Nonspacing [Mn]
assert!(is_combining_mark('\u{11C3F}'));
}

View File

@@ -1 +1 @@
{"files":{".cargo-ok":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",".travis.yml":"7a0722a759238d2a804e715e4afd3cbe7a2696771fd4e5d6ef9cf8d36354d306","Cargo.toml":"9487d35eec356799c7e06355d25c65275900a546769da368c55e1947e3d38d2f","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"20c7855c364d57ea4c97889a5e8d98470a9952dade37bd9248b9a54431670e5e","Makefile":"bffd75d34654b2955d4f005f1a5e85c821c90becf1a8a52cbe10121972f43148","README.md":"eb3f4694003f408cbe3c7f3e9fbbc71241defb940cc55a816981f0f0f144c8eb","UPGRADING.md":"fbcc2d39bdf17db0745793db6626fcd5c909dddd4ce13b27566cfabece22c368","appveyor.yml":"c78486dbfbe6ebbf3d808afb9a19f7ec18c4704ce451c6305f0716999b70a1a6","docs/.nojekyll":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","docs/404.html":"f61e6271c1ea1aa113b64b356e994595fa548f0433f89948d747503ad22195cd","docs/index.html":"f61e6271c1ea1aa113b64b356e994595fa548f0433f89948d747503ad22195cd","github.png":"b432fd855efe7c430fe6a57ccf83935c1996f03a7cdc8d6e1b34154b8c43f6ec","src/encoding.rs":"7fb43e1c109bf9f2a80a05525082f90e79dba8e8056547571c49fba074406d39","src/form_urlencoded.rs":"0778240691a4b501dc1bde94e03b60e933f8f3f163025ef5dcf124fd946845e2","src/host.rs":"76474c04c3b998ce6891d6c7e8ef30fd4680308da8e451c0052655835588bd5d","src/lib.rs":"c433461a2fe3b37ff3f4b1f078ea40ea159d368b315c734f081f4ee770e3998c","src/origin.rs":"76b91ec7522e4a578c3d848541e3c1e08fba8cc0d5a5baabf829797dacc2275d","src/parser.rs":"670f1c36b1e1c2af0456af45196d23fd7b3d83d4f574e23d7ba536eb0003fa73","src/path_segments.rs":"85e16d4e3a7bf4162b2ddd6a14beda0d753395da7c8efe208c52862fc2b21221","src/percent_encoding.rs":"d8f9c2cc18615f705898b4d5e2b8e41506c9639190c74b1615b5f42ea41bafe3","src/quirks.rs":"1231f965e22bb3632c22993e2a8d4c7470bcb4a8de25d049f31784303f0def03","src/slicing.rs":"4e539886b23945a92094625f3e531a4bff40daa44240b5d19ee8577478c4f7fe","tests/data.rs":"c333766897f6492fb6583ab5c8a511973b7a55f58ca550799432343da64d5ca7","tests/setters_tests.json":"ebcbdb52e9a4b5a565f8806d52ebc610d46a34df883e10b0be080d026468ff73","tests/unit.rs":"9cc21b36e7dba8bec4af465cd0b2c1ed3c015cd3c0f85d610aef39109afeb2c4","tests/urltestdata.json":"430c74aa3a31afaa57a92805544e00825f4dffe2def98c1e3c212c3db80268af"},"package":"3e2ba3456fbe5c0098cb877cf08b92b76c3e18e0be9e47c35b487220d377d24e"} {"files":{".cargo-ok":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",".travis.yml":"890af214187ffcba4732acb2d1af30d7adb9aade0679e9fdb06baae363240b8e","Cargo.toml":"ec586106c4d0625919a3591fe3ae915043e82c8bfdd1c9e747171ba5e21047e1","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"20c7855c364d57ea4c97889a5e8d98470a9952dade37bd9248b9a54431670e5e","Makefile":"bffd75d34654b2955d4f005f1a5e85c821c90becf1a8a52cbe10121972f43148","README.md":"eb3f4694003f408cbe3c7f3e9fbbc71241defb940cc55a816981f0f0f144c8eb","UPGRADING.md":"fbcc2d39bdf17db0745793db6626fcd5c909dddd4ce13b27566cfabece22c368","appveyor.yml":"c78486dbfbe6ebbf3d808afb9a19f7ec18c4704ce451c6305f0716999b70a1a6","docs/.nojekyll":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","docs/404.html":"f61e6271c1ea1aa113b64b356e994595fa548f0433f89948d747503ad22195cd","docs/index.html":"f61e6271c1ea1aa113b64b356e994595fa548f0433f89948d747503ad22195cd","github.png":"b432fd855efe7c430fe6a57ccf83935c1996f03a7cdc8d6e1b34154b8c43f6ec","rust-url-todo":"1192cee7b6cedf2133d97dc6074b593a1d19b0ee13fff6f28d6329855044e575","src/encoding.rs":"f3e109ca8ec5a9130da50cdfb3003530aedb6dd5a440f0790d76b71f6981119c","src/form_urlencoded.rs":"7ccaef7148e4bc2577154c50f8705db3a055b641269e24c22770f06222321e1e","src/host.rs":"281165d732ea87b6f01a98f7c68ffcb284c41f84b3ab6ed674fb8e57022d1019","src/lib.rs":"bd156e8bcfbd44f0cd52c8b394e03ec63fea012c0bf5ca554521352714838605","src/origin.rs":"7071dcc1070ccfae84cdcd43586b84a9706e35a9a099ff4dde128da0909bd0bc","src/parser.rs":"9d30868f0900586fec6f122a0322598a08116ab0b4c4d8caf5c35a720381a73a","src/path_segments.rs":"7bd3142eaa568863ef44e2255c181239141f9eeee337f889b9ffaaeab4ca669d","src/quirks.rs":"1231f965e22bb3632c22993e2a8d4c7470bcb4a8de25d049f31784303f0def03","src/slicing.rs":"4e539886b23945a92094625f3e531a4bff40daa44240b5d19ee8577478c4f7fe","tests/data.rs":"c333766897f6492fb6583ab5c8a511973b7a55f58ca550799432343da64d5ca7","tests/setters_tests.json":"ebcbdb52e9a4b5a565f8806d52ebc610d46a34df883e10b0be080d026468ff73","tests/unit.rs":"c2f206f433be619414d761d358a2a4a5a46cfe8a4fea5339adec5e9937d78de2","tests/urltestdata.json":"430c74aa3a31afaa57a92805544e00825f4dffe2def98c1e3c212c3db80268af"},"package":"eeb819346883532a271eb626deb43c4a1bb4c4dd47c519bd78137c3e72a4fe27"}

View File

@@ -3,7 +3,7 @@ rust:
- nightly - nightly
- beta - beta
- stable - stable
- 1.15.0 - 1.17.0
script: make test script: make test
notifications: notifications:
webhooks: http://build.servo.org:54856/travis webhooks: http://build.servo.org:54856/travis

View File

@@ -1,29 +1,24 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g. crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package] [package]
name = "url" name = "url"
version = "1.4.1" # When updating version, also modify html_root_url in the lib.rs
version = "1.5.1"
authors = ["The rust-url developers"] authors = ["The rust-url developers"]
description = "URL library for Rust, based on the WHATWG URL Standard" description = "URL library for Rust, based on the WHATWG URL Standard"
documentation = "https://docs.rs/url" documentation = "https://docs.rs/url"
repository = "https://github.com/servo/rust-url"
readme = "README.md" readme = "README.md"
keywords = ["url", "parser"] keywords = ["url", "parser"]
categories = ["parser-implementations", "web-programming", "encoding"] categories = ["parser-implementations", "web-programming", "encoding"]
license = "MIT/Apache-2.0" license = "MIT/Apache-2.0"
repository = "https://github.com/servo/rust-url"
[lib] [badges]
test = false travis-ci = { repository = "servo/rust-url" }
appveyor = { repository = "servo/rust-url" }
[workspace]
members = [".", "idna", "percent_encoding", "url_serde"]
[[test]] [[test]]
name = "unit" name = "unit"
@@ -31,41 +26,24 @@ name = "unit"
[[test]] [[test]]
name = "data" name = "data"
harness = false harness = false
[dependencies.encoding]
version = "0.2"
optional = true
[dependencies.heapsize] [lib]
version = ">=0.1.1, <0.5" test = false
optional = true
[dependencies.rustc-serialize] [dev-dependencies]
version = "0.3" rustc-test = "0.1"
optional = true rustc-serialize = "0.3"
serde_json = ">=0.6.1, <0.9"
[dependencies.idna]
version = "0.1.0"
[dependencies.serde]
version = ">=0.6.1, <0.9"
optional = true
[dependencies.matches]
version = "0.1"
[dev-dependencies.serde_json]
version = ">=0.6.1, <0.9"
[dev-dependencies.rustc-test]
version = "0.1"
[dev-dependencies.rustc-serialize]
version = "0.3"
[features] [features]
heap_size = ["heapsize"]
query_encoding = ["encoding"] query_encoding = ["encoding"]
[badges.appveyor] heap_size = ["heapsize"]
repository = "servo/rust-url"
[badges.travis-ci] [dependencies]
repository = "servo/rust-url" encoding = {version = "0.2", optional = true}
heapsize = {version = ">=0.1.1, <0.5", optional = true}
idna = { version = "0.1.0", path = "./idna" }
matches = "0.1"
percent-encoding = { version = "1.0.0", path = "./percent_encoding" }
rustc-serialize = {version = "0.3", optional = true}
serde = {version = ">=0.6.1, <0.9", optional = true}

14
third_party/rust/url/rust-url-todo vendored Normal file
View File

@@ -0,0 +1,14 @@
* standalone path parsing?
* Test setters
* Test trim C0/space
* Test remove tab & newline
#[test]
fn test_path_segments() {
let mut url = Url::parse("http://example.net").unwrap();
url.push_path_segment("foo").unwrap();
url.extend_path_segments(&["bar", "b/az"]).unwrap();
assert_eq!(url.as_str(), "http://example.net/foo");
}

View File

@@ -13,6 +13,7 @@
#[cfg(feature = "query_encoding")] extern crate encoding; #[cfg(feature = "query_encoding")] extern crate encoding;
use std::borrow::Cow; use std::borrow::Cow;
#[cfg(feature = "query_encoding")] use std::fmt::{self, Debug, Formatter};
#[cfg(feature = "query_encoding")] use self::encoding::types::{DecoderTrap, EncoderTrap}; #[cfg(feature = "query_encoding")] use self::encoding::types::{DecoderTrap, EncoderTrap};
#[cfg(feature = "query_encoding")] use self::encoding::label::encoding_from_whatwg_label; #[cfg(feature = "query_encoding")] use self::encoding::label::encoding_from_whatwg_label;
@@ -89,9 +90,19 @@ impl EncodingOverride {
} }
} }
#[cfg(feature = "query_encoding")]
impl Debug for EncodingOverride {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "EncodingOverride {{ encoding: ")?;
match self.encoding {
Some(e) => write!(f, "{} }}", e.name()),
None => write!(f, "None }}")
}
}
}
#[cfg(not(feature = "query_encoding"))] #[cfg(not(feature = "query_encoding"))]
#[derive(Copy, Clone)] #[derive(Copy, Clone, Debug)]
pub struct EncodingOverride; pub struct EncodingOverride;
#[cfg(not(feature = "query_encoding"))] #[cfg(not(feature = "query_encoding"))]

View File

@@ -81,7 +81,7 @@ pub fn parse_with_encoding<'a>(input: &'a [u8],
} }
/// The return type of `parse()`. /// The return type of `parse()`.
#[derive(Copy, Clone)] #[derive(Copy, Clone, Debug)]
pub struct Parse<'a> { pub struct Parse<'a> {
input: &'a [u8], input: &'a [u8],
encoding: EncodingOverride, encoding: EncodingOverride,
@@ -145,6 +145,7 @@ impl<'a> Parse<'a> {
} }
/// Like `Parse`, but yields pairs of `String` instead of pairs of `Cow<str>`. /// Like `Parse`, but yields pairs of `String` instead of pairs of `Cow<str>`.
#[derive(Debug)]
pub struct ParseIntoOwned<'a> { pub struct ParseIntoOwned<'a> {
inner: Parse<'a> inner: Parse<'a>
} }
@@ -168,6 +169,7 @@ pub fn byte_serialize(input: &[u8]) -> ByteSerialize {
} }
/// Return value of `byte_serialize()`. /// Return value of `byte_serialize()`.
#[derive(Debug)]
pub struct ByteSerialize<'a> { pub struct ByteSerialize<'a> {
bytes: &'a [u8], bytes: &'a [u8],
} }
@@ -209,6 +211,7 @@ impl<'a> Iterator for ByteSerialize<'a> {
/// The [`application/x-www-form-urlencoded` serializer]( /// The [`application/x-www-form-urlencoded` serializer](
/// https://url.spec.whatwg.org/#concept-urlencoded-serializer). /// https://url.spec.whatwg.org/#concept-urlencoded-serializer).
#[derive(Debug)]
pub struct Serializer<T: Target> { pub struct Serializer<T: Target> {
target: Option<T>, target: Option<T>,
start_position: usize, start_position: usize,

View File

@@ -176,7 +176,7 @@ impl<S: AsRef<str>> fmt::Display for Host<S> {
/// This mostly exists because coherence rules dont allow us to implement /// This mostly exists because coherence rules dont allow us to implement
/// `ToSocketAddrs for (Host<S>, u16)`. /// `ToSocketAddrs for (Host<S>, u16)`.
#[derive(Clone)] #[derive(Clone, Debug)]
pub struct HostAndPort<S=String> { pub struct HostAndPort<S=String> {
pub host: Host<S>, pub host: Host<S>,
pub port: u16, pub port: u16,
@@ -192,6 +192,15 @@ impl<'a> HostAndPort<&'a str> {
} }
} }
impl<S: AsRef<str>> fmt::Display for HostAndPort<S> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
self.host.fmt(f)?;
f.write_str(":")?;
self.port.fmt(f)
}
}
impl<S: AsRef<str>> ToSocketAddrs for HostAndPort<S> { impl<S: AsRef<str>> ToSocketAddrs for HostAndPort<S> {
type Iter = SocketAddrs; type Iter = SocketAddrs;
@@ -213,10 +222,12 @@ impl<S: AsRef<str>> ToSocketAddrs for HostAndPort<S> {
} }
/// Socket addresses for an URL. /// Socket addresses for an URL.
#[derive(Debug)]
pub struct SocketAddrs { pub struct SocketAddrs {
state: SocketAddrsState state: SocketAddrsState
} }
#[derive(Debug)]
enum SocketAddrsState { enum SocketAddrsState {
Domain(vec::IntoIter<SocketAddr>), Domain(vec::IntoIter<SocketAddr>),
One(SocketAddr), One(SocketAddr),

File diff suppressed because it is too large Load Diff

View File

@@ -50,7 +50,7 @@ pub fn url_origin(url: &Url) -> Origin {
/// the URL does not have the same origin as any other URL. /// the URL does not have the same origin as any other URL.
/// ///
/// For more information see https://url.spec.whatwg.org/#origin /// For more information see https://url.spec.whatwg.org/#origin
#[derive(PartialEq, Eq, Clone, Debug)] #[derive(PartialEq, Eq, Hash, Clone, Debug)]
pub enum Origin { pub enum Origin {
/// A globally unique identifier /// A globally unique identifier
Opaque(OpaqueOrigin), Opaque(OpaqueOrigin),
@@ -123,7 +123,7 @@ impl Origin {
} }
/// Opaque identifier for URLs that have file or other schemes /// Opaque identifier for URLs that have file or other schemes
#[derive(Eq, PartialEq, Clone, Debug)] #[derive(Eq, PartialEq, Hash, Clone, Debug)]
pub struct OpaqueOrigin(usize); pub struct OpaqueOrigin(usize);
#[cfg(feature = "heapsize")] #[cfg(feature = "heapsize")]

View File

@@ -57,6 +57,9 @@ simple_enum_error! {
Overflow => "URLs more than 4 GB are not supported", Overflow => "URLs more than 4 GB are not supported",
} }
#[cfg(feature = "heapsize")]
known_heap_size!(0, ParseError);
impl fmt::Display for ParseError { impl fmt::Display for ParseError {
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result { fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
self.description().fmt(fmt) self.description().fmt(fmt)

View File

@@ -18,14 +18,22 @@ use Url;
/// Examples: /// Examples:
/// ///
/// ```rust /// ```rust
/// # use url::Url; /// use url::Url;
/// let mut url = Url::parse("mailto:me@example.com").unwrap(); /// # use std::error::Error;
///
/// # fn run() -> Result<(), Box<Error>> {
/// let mut url = Url::parse("mailto:me@example.com")?;
/// assert!(url.path_segments_mut().is_err()); /// assert!(url.path_segments_mut().is_err());
/// ///
/// let mut url = Url::parse("http://example.net/foo/index.html").unwrap(); /// let mut url = Url::parse("http://example.net/foo/index.html")?;
/// url.path_segments_mut().unwrap().pop().push("img").push("2/100%.png"); /// url.path_segments_mut().map_err(|_| "cannot be base")?
/// .pop().push("img").push("2/100%.png");
/// assert_eq!(url.as_str(), "http://example.net/foo/img/2%2F100%25.png"); /// assert_eq!(url.as_str(), "http://example.net/foo/img/2%2F100%25.png");
/// # Ok(())
/// # }
/// # run().unwrap();
/// ``` /// ```
#[derive(Debug)]
pub struct PathSegmentsMut<'a> { pub struct PathSegmentsMut<'a> {
url: &'a mut Url, url: &'a mut Url,
after_first_slash: usize, after_first_slash: usize,
@@ -60,10 +68,17 @@ impl<'a> PathSegmentsMut<'a> {
/// Example: /// Example:
/// ///
/// ```rust /// ```rust
/// # use url::Url; /// use url::Url;
/// let mut url = Url::parse("https://github.com/servo/rust-url/").unwrap(); /// # use std::error::Error;
/// url.path_segments_mut().unwrap().clear().push("logout"); ///
/// # fn run() -> Result<(), Box<Error>> {
/// let mut url = Url::parse("https://github.com/servo/rust-url/")?;
/// url.path_segments_mut().map_err(|_| "cannot be base")?
/// .clear().push("logout");
/// assert_eq!(url.as_str(), "https://github.com/logout"); /// assert_eq!(url.as_str(), "https://github.com/logout");
/// # Ok(())
/// # }
/// # run().unwrap();
/// ``` /// ```
pub fn clear(&mut self) -> &mut Self { pub fn clear(&mut self) -> &mut Self {
self.url.serialization.truncate(self.after_first_slash); self.url.serialization.truncate(self.after_first_slash);
@@ -81,14 +96,22 @@ impl<'a> PathSegmentsMut<'a> {
/// Example: /// Example:
/// ///
/// ```rust /// ```rust
/// # use url::Url; /// use url::Url;
/// let mut url = Url::parse("https://github.com/servo/rust-url/").unwrap(); /// # use std::error::Error;
/// url.path_segments_mut().unwrap().push("pulls"); ///
/// # fn run() -> Result<(), Box<Error>> {
/// let mut url = Url::parse("https://github.com/servo/rust-url/")?;
/// url.path_segments_mut().map_err(|_| "cannot be base")?
/// .push("pulls");
/// assert_eq!(url.as_str(), "https://github.com/servo/rust-url//pulls"); /// assert_eq!(url.as_str(), "https://github.com/servo/rust-url//pulls");
/// ///
/// let mut url = Url::parse("https://github.com/servo/rust-url/").unwrap(); /// let mut url = Url::parse("https://github.com/servo/rust-url/")?;
/// url.path_segments_mut().unwrap().pop_if_empty().push("pulls"); /// url.path_segments_mut().map_err(|_| "cannot be base")?
/// .pop_if_empty().push("pulls");
/// assert_eq!(url.as_str(), "https://github.com/servo/rust-url/pulls"); /// assert_eq!(url.as_str(), "https://github.com/servo/rust-url/pulls");
/// # Ok(())
/// # }
/// # run().unwrap();
/// ``` /// ```
pub fn pop_if_empty(&mut self) -> &mut Self { pub fn pop_if_empty(&mut self) -> &mut Self {
if self.url.serialization[self.after_first_slash..].ends_with('/') { if self.url.serialization[self.after_first_slash..].ends_with('/') {
@@ -138,23 +161,37 @@ impl<'a> PathSegmentsMut<'a> {
/// Example: /// Example:
/// ///
/// ```rust /// ```rust
/// # use url::Url; /// use url::Url;
/// let mut url = Url::parse("https://github.com/").unwrap(); /// # use std::error::Error;
///
/// # fn run() -> Result<(), Box<Error>> {
/// let mut url = Url::parse("https://github.com/")?;
/// let org = "servo"; /// let org = "servo";
/// let repo = "rust-url"; /// let repo = "rust-url";
/// let issue_number = "188"; /// let issue_number = "188";
/// url.path_segments_mut().unwrap().extend(&[org, repo, "issues", issue_number]); /// url.path_segments_mut().map_err(|_| "cannot be base")?
/// .extend(&[org, repo, "issues", issue_number]);
/// assert_eq!(url.as_str(), "https://github.com/servo/rust-url/issues/188"); /// assert_eq!(url.as_str(), "https://github.com/servo/rust-url/issues/188");
/// # Ok(())
/// # }
/// # run().unwrap();
/// ``` /// ```
/// ///
/// In order to make sure that parsing the serialization of an URL gives the same URL, /// In order to make sure that parsing the serialization of an URL gives the same URL,
/// a segment is ignored if it is `"."` or `".."`: /// a segment is ignored if it is `"."` or `".."`:
/// ///
/// ```rust /// ```rust
/// # use url::Url; /// use url::Url;
/// let mut url = Url::parse("https://github.com/servo").unwrap(); /// # use std::error::Error;
/// url.path_segments_mut().unwrap().extend(&["..", "rust-url", ".", "pulls"]); ///
/// # fn run() -> Result<(), Box<Error>> {
/// let mut url = Url::parse("https://github.com/servo")?;
/// url.path_segments_mut().map_err(|_| "cannot be base")?
/// .extend(&["..", "rust-url", ".", "pulls"]);
/// assert_eq!(url.as_str(), "https://github.com/servo/rust-url/pulls"); /// assert_eq!(url.as_str(), "https://github.com/servo/rust-url/pulls");
/// # Ok(())
/// # }
/// # run().unwrap();
/// ``` /// ```
pub fn extend<I>(&mut self, segments: I) -> &mut Self pub fn extend<I>(&mut self, segments: I) -> &mut Self
where I: IntoIterator, I::Item: AsRef<str> { where I: IntoIterator, I::Item: AsRef<str> {

View File

@@ -14,7 +14,7 @@ extern crate url;
use std::borrow::Cow; use std::borrow::Cow;
use std::net::{Ipv4Addr, Ipv6Addr}; use std::net::{Ipv4Addr, Ipv6Addr};
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use url::{Host, Url, form_urlencoded}; use url::{Host, HostAndPort, Url, form_urlencoded};
#[test] #[test]
fn size() { fn size() {
@@ -255,6 +255,36 @@ fn test_form_serialize() {
assert_eq!(encoded, "foo=%C3%A9%26&bar=&foo=%23"); assert_eq!(encoded, "foo=%C3%A9%26&bar=&foo=%23");
} }
#[test]
fn host_and_port_display() {
assert_eq!(
format!(
"{}",
HostAndPort{ host: Host::Domain("www.mozilla.org"), port: 80}
),
"www.mozilla.org:80"
);
assert_eq!(
format!(
"{}",
HostAndPort::<String>{ host: Host::Ipv4(Ipv4Addr::new(1, 35, 33, 49)), port: 65535 }
),
"1.35.33.49:65535"
);
assert_eq!(
format!(
"{}",
HostAndPort::<String>{
host: Host::Ipv6(Ipv6Addr::new(
0x2001, 0x0db8, 0x85a3, 0x08d3, 0x1319, 0x8a2e, 0x0370, 0x7344
)),
port: 1337
})
,
"[2001:db8:85a3:8d3:1319:8a2e:370:7344]:1337"
)
}
#[test] #[test]
/// https://github.com/servo/rust-url/issues/25 /// https://github.com/servo/rust-url/issues/25
fn issue_25() { fn issue_25() {
@@ -344,6 +374,13 @@ fn test_set_host() {
assert_eq!(url.as_str(), "foobar:/hello"); assert_eq!(url.as_str(), "foobar:/hello");
} }
#[test]
// https://github.com/servo/rust-url/issues/166
fn test_leading_dots() {
assert_eq!(Host::parse(".org").unwrap(), Host::Domain(".org".to_owned()));
assert_eq!(Url::parse("file://./foo").unwrap().domain(), Some("."));
}
// This is testing that the macro produces buildable code when invoked // This is testing that the macro produces buildable code when invoked
// inside both a module and a function // inside both a module and a function
#[test] #[test]
@@ -372,3 +409,72 @@ fn define_encode_set_scopes() {
m::test(); m::test();
} }
#[test]
/// https://github.com/servo/rust-url/issues/302
fn test_origin_hash() {
use std::hash::{Hash,Hasher};
use std::collections::hash_map::DefaultHasher;
fn hash<T: Hash>(value: &T) -> u64 {
let mut hasher = DefaultHasher::new();
value.hash(&mut hasher);
hasher.finish()
}
let origin = &Url::parse("http://example.net/").unwrap().origin();
let origins_to_compare = [
Url::parse("http://example.net:80/").unwrap().origin(),
Url::parse("http://example.net:81/").unwrap().origin(),
Url::parse("http://example.net").unwrap().origin(),
Url::parse("http://example.net/hello").unwrap().origin(),
Url::parse("https://example.net").unwrap().origin(),
Url::parse("ftp://example.net").unwrap().origin(),
Url::parse("file://example.net").unwrap().origin(),
Url::parse("http://user@example.net/").unwrap().origin(),
Url::parse("http://user:pass@example.net/").unwrap().origin(),
];
for origin_to_compare in &origins_to_compare {
if origin == origin_to_compare {
assert_eq!(hash(origin), hash(origin_to_compare));
} else {
assert_ne!(hash(origin), hash(origin_to_compare));
}
}
let opaque_origin = Url::parse("file://example.net").unwrap().origin();
let same_opaque_origin = Url::parse("file://example.net").unwrap().origin();
let other_opaque_origin = Url::parse("file://other").unwrap().origin();
assert_ne!(hash(&opaque_origin), hash(&same_opaque_origin));
assert_ne!(hash(&opaque_origin), hash(&other_opaque_origin));
}
#[test]
fn test_windows_unc_path() {
if !cfg!(windows) {
return
}
let url = Url::from_file_path(Path::new(r"\\host\share\path\file.txt")).unwrap();
assert_eq!(url.as_str(), "file://host/share/path/file.txt");
let url = Url::from_file_path(Path::new(r"\\höst\share\path\file.txt")).unwrap();
assert_eq!(url.as_str(), "file://xn--hst-sna/share/path/file.txt");
let url = Url::from_file_path(Path::new(r"\\192.168.0.1\share\path\file.txt")).unwrap();
assert_eq!(url.host(), Some(Host::Ipv4(Ipv4Addr::new(192, 168, 0, 1))));
let path = url.to_file_path().unwrap();
assert_eq!(path.to_str(), Some(r"\\192.168.0.1\share\path\file.txt"));
// Another way to write these:
let url = Url::from_file_path(Path::new(r"\\?\UNC\host\share\path\file.txt")).unwrap();
assert_eq!(url.as_str(), "file://host/share/path/file.txt");
// Paths starting with "\\.\" (Local Device Paths) are intentionally not supported.
let url = Url::from_file_path(Path::new(r"\\.\some\path\file.txt"));
assert!(url.is_err());
}

View File

@@ -445,12 +445,12 @@ dependencies = [
[[package]] [[package]]
name = "idna" name = "idna"
version = "0.1.2" version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-bidi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-bidi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-normalization 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-normalization 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@@ -653,6 +653,11 @@ name = "peeking_take_while"
version = "0.1.2" version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "percent-encoding"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "phf" name = "phf"
version = "0.7.21" version = "0.7.21"
@@ -811,7 +816,7 @@ dependencies = [
"libc 0.2.24 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.24 (registry+https://github.com/rust-lang/crates.io-index)",
"nserror 0.1.0", "nserror 0.1.0",
"nsstring 0.1.0", "nsstring 0.1.0",
"url 1.4.1 (registry+https://github.com/rust-lang/crates.io-index)", "url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@@ -1129,7 +1134,7 @@ dependencies = [
[[package]] [[package]]
name = "unicode-normalization" name = "unicode-normalization"
version = "0.1.4" version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
@@ -1157,11 +1162,12 @@ dependencies = [
[[package]] [[package]]
name = "url" name = "url"
version = "1.4.1" version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"idna 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "idna 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"percent-encoding 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@@ -1310,7 +1316,7 @@ dependencies = [
"checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb" "checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb"
"checksum heapsize 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "5a376f7402b85be6e0ba504243ecbc0709c48019ecc6286d0540c2e359050c88" "checksum heapsize 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "5a376f7402b85be6e0ba504243ecbc0709c48019ecc6286d0540c2e359050c88"
"checksum heapsize 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4c7593b1522161003928c959c20a2ca421c68e940d63d75573316a009e48a6d4" "checksum heapsize 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4c7593b1522161003928c959c20a2ca421c68e940d63d75573316a009e48a6d4"
"checksum idna 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "2233d4940b1f19f0418c158509cd7396b8d70a5db5705ce410914dc8fa603b37" "checksum idna 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "014b298351066f1512874135335d62a789ffe78a9974f94b43ed5621951eaf7d"
"checksum itertools 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4833d6978da405305126af4ac88569b5d71ff758581ce5a987dbfa3755f694fc" "checksum itertools 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4833d6978da405305126af4ac88569b5d71ff758581ce5a987dbfa3755f694fc"
"checksum itoa 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eb2f404fbc66fd9aac13e998248505e7ecb2ad8e44ab6388684c5fb11c6c251c" "checksum itoa 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eb2f404fbc66fd9aac13e998248505e7ecb2ad8e44ab6388684c5fb11c6c251c"
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" "checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
@@ -1333,6 +1339,7 @@ dependencies = [
"checksum parking_lot_core 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "56a19dcbb5d1e32b6cccb8a9aa1fc2a38418c8699652e735e2bf391a3dc0aa16" "checksum parking_lot_core 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "56a19dcbb5d1e32b6cccb8a9aa1fc2a38418c8699652e735e2bf391a3dc0aa16"
"checksum pdqsort 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "ceca1642c89148ca05611cc775a0c383abef355fc4907c4e95f49f7b09d6287c" "checksum pdqsort 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "ceca1642c89148ca05611cc775a0c383abef355fc4907c4e95f49f7b09d6287c"
"checksum peeking_take_while 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" "checksum peeking_take_while 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099"
"checksum percent-encoding 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de154f638187706bde41d9b4738748933d64e6b37bdbffc0b47a97d16a6ae356"
"checksum phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "cb325642290f28ee14d8c6201159949a872f220c62af6e110a56ea914fbe42fc" "checksum phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "cb325642290f28ee14d8c6201159949a872f220c62af6e110a56ea914fbe42fc"
"checksum phf_codegen 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "d62594c0bb54c464f633175d502038177e90309daf2e0158be42ed5f023ce88f" "checksum phf_codegen 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "d62594c0bb54c464f633175d502038177e90309daf2e0158be42ed5f023ce88f"
"checksum phf_generator 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "6b07ffcc532ccc85e3afc45865469bf5d9e4ef5bfcf9622e3cfe80c2d275ec03" "checksum phf_generator 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "6b07ffcc532ccc85e3afc45865469bf5d9e4ef5bfcf9622e3cfe80c2d275ec03"
@@ -1379,12 +1386,12 @@ dependencies = [
"checksum time 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)" = "211b63c112206356ef1ff9b19355f43740fc3f85960c598a93d3a3d3ba7beade" "checksum time 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)" = "211b63c112206356ef1ff9b19355f43740fc3f85960c598a93d3a3d3ba7beade"
"checksum toml 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "736b60249cb25337bc196faa43ee12c705e426f3d55c214d73a4e7be06f92cb4" "checksum toml 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "736b60249cb25337bc196faa43ee12c705e426f3d55c214d73a4e7be06f92cb4"
"checksum unicode-bidi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a6a2c4e3710edd365cd7e78383153ed739fa31af19f9172f72d3575060f5a43a" "checksum unicode-bidi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a6a2c4e3710edd365cd7e78383153ed739fa31af19f9172f72d3575060f5a43a"
"checksum unicode-normalization 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "e28fa37426fceeb5cf8f41ee273faa7c82c47dc8fba5853402841e665fcd86ff" "checksum unicode-normalization 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "51ccda9ef9efa3f7ef5d91e8f9b83bbe6955f9bf86aec89d5cce2c874625920f"
"checksum unicode-segmentation 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "18127285758f0e2c6cf325bb3f3d138a12fee27de4f23e146cd6a179f26c2cf3" "checksum unicode-segmentation 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "18127285758f0e2c6cf325bb3f3d138a12fee27de4f23e146cd6a179f26c2cf3"
"checksum unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bf3a113775714a22dcb774d8ea3655c53a32debae63a063acc00a91cc586245f" "checksum unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bf3a113775714a22dcb774d8ea3655c53a32debae63a063acc00a91cc586245f"
"checksum unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f860d7d29cf02cb2f3f359fd35991af3d30bac52c57d265a3c461074cb4dc" "checksum unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f860d7d29cf02cb2f3f359fd35991af3d30bac52c57d265a3c461074cb4dc"
"checksum unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1f2ae5ddb18e1c92664717616dd9549dde73f539f01bd7b77c2edb2446bdff91" "checksum unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1f2ae5ddb18e1c92664717616dd9549dde73f539f01bd7b77c2edb2446bdff91"
"checksum url 1.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3e2ba3456fbe5c0098cb877cf08b92b76c3e18e0be9e47c35b487220d377d24e" "checksum url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eeb819346883532a271eb626deb43c4a1bb4c4dd47c519bd78137c3e72a4fe27"
"checksum utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "662fab6525a98beff2921d7f61a39e7d59e0b425ebc7d0d9e66d316e55124122" "checksum utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "662fab6525a98beff2921d7f61a39e7d59e0b425ebc7d0d9e66d316e55124122"
"checksum vec_map 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "887b5b631c2ad01628bbbaa7dd4c869f80d3186688f8d0b6f58774fbe324988c" "checksum vec_map 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "887b5b631c2ad01628bbbaa7dd4c869f80d3186688f8d0b6f58774fbe324988c"
"checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" "checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"

View File

@@ -443,12 +443,12 @@ dependencies = [
[[package]] [[package]]
name = "idna" name = "idna"
version = "0.1.2" version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-bidi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-bidi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-normalization 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-normalization 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@@ -640,6 +640,11 @@ name = "peeking_take_while"
version = "0.1.2" version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "percent-encoding"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "phf" name = "phf"
version = "0.7.21" version = "0.7.21"
@@ -798,7 +803,7 @@ dependencies = [
"libc 0.2.24 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.24 (registry+https://github.com/rust-lang/crates.io-index)",
"nserror 0.1.0", "nserror 0.1.0",
"nsstring 0.1.0", "nsstring 0.1.0",
"url 1.4.1 (registry+https://github.com/rust-lang/crates.io-index)", "url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@@ -1116,7 +1121,7 @@ dependencies = [
[[package]] [[package]]
name = "unicode-normalization" name = "unicode-normalization"
version = "0.1.4" version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
@@ -1144,11 +1149,12 @@ dependencies = [
[[package]] [[package]]
name = "url" name = "url"
version = "1.4.1" version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"idna 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "idna 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"percent-encoding 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@@ -1297,7 +1303,7 @@ dependencies = [
"checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb" "checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb"
"checksum heapsize 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "5a376f7402b85be6e0ba504243ecbc0709c48019ecc6286d0540c2e359050c88" "checksum heapsize 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "5a376f7402b85be6e0ba504243ecbc0709c48019ecc6286d0540c2e359050c88"
"checksum heapsize 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4c7593b1522161003928c959c20a2ca421c68e940d63d75573316a009e48a6d4" "checksum heapsize 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4c7593b1522161003928c959c20a2ca421c68e940d63d75573316a009e48a6d4"
"checksum idna 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "2233d4940b1f19f0418c158509cd7396b8d70a5db5705ce410914dc8fa603b37" "checksum idna 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "014b298351066f1512874135335d62a789ffe78a9974f94b43ed5621951eaf7d"
"checksum itertools 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4833d6978da405305126af4ac88569b5d71ff758581ce5a987dbfa3755f694fc" "checksum itertools 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4833d6978da405305126af4ac88569b5d71ff758581ce5a987dbfa3755f694fc"
"checksum itoa 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eb2f404fbc66fd9aac13e998248505e7ecb2ad8e44ab6388684c5fb11c6c251c" "checksum itoa 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eb2f404fbc66fd9aac13e998248505e7ecb2ad8e44ab6388684c5fb11c6c251c"
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" "checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
@@ -1320,6 +1326,7 @@ dependencies = [
"checksum parking_lot_core 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "56a19dcbb5d1e32b6cccb8a9aa1fc2a38418c8699652e735e2bf391a3dc0aa16" "checksum parking_lot_core 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "56a19dcbb5d1e32b6cccb8a9aa1fc2a38418c8699652e735e2bf391a3dc0aa16"
"checksum pdqsort 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "ceca1642c89148ca05611cc775a0c383abef355fc4907c4e95f49f7b09d6287c" "checksum pdqsort 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "ceca1642c89148ca05611cc775a0c383abef355fc4907c4e95f49f7b09d6287c"
"checksum peeking_take_while 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" "checksum peeking_take_while 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099"
"checksum percent-encoding 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de154f638187706bde41d9b4738748933d64e6b37bdbffc0b47a97d16a6ae356"
"checksum phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "cb325642290f28ee14d8c6201159949a872f220c62af6e110a56ea914fbe42fc" "checksum phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "cb325642290f28ee14d8c6201159949a872f220c62af6e110a56ea914fbe42fc"
"checksum phf_codegen 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "d62594c0bb54c464f633175d502038177e90309daf2e0158be42ed5f023ce88f" "checksum phf_codegen 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "d62594c0bb54c464f633175d502038177e90309daf2e0158be42ed5f023ce88f"
"checksum phf_generator 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "6b07ffcc532ccc85e3afc45865469bf5d9e4ef5bfcf9622e3cfe80c2d275ec03" "checksum phf_generator 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "6b07ffcc532ccc85e3afc45865469bf5d9e4ef5bfcf9622e3cfe80c2d275ec03"
@@ -1366,12 +1373,12 @@ dependencies = [
"checksum time 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)" = "211b63c112206356ef1ff9b19355f43740fc3f85960c598a93d3a3d3ba7beade" "checksum time 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)" = "211b63c112206356ef1ff9b19355f43740fc3f85960c598a93d3a3d3ba7beade"
"checksum toml 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "736b60249cb25337bc196faa43ee12c705e426f3d55c214d73a4e7be06f92cb4" "checksum toml 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "736b60249cb25337bc196faa43ee12c705e426f3d55c214d73a4e7be06f92cb4"
"checksum unicode-bidi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a6a2c4e3710edd365cd7e78383153ed739fa31af19f9172f72d3575060f5a43a" "checksum unicode-bidi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a6a2c4e3710edd365cd7e78383153ed739fa31af19f9172f72d3575060f5a43a"
"checksum unicode-normalization 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "e28fa37426fceeb5cf8f41ee273faa7c82c47dc8fba5853402841e665fcd86ff" "checksum unicode-normalization 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "51ccda9ef9efa3f7ef5d91e8f9b83bbe6955f9bf86aec89d5cce2c874625920f"
"checksum unicode-segmentation 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "18127285758f0e2c6cf325bb3f3d138a12fee27de4f23e146cd6a179f26c2cf3" "checksum unicode-segmentation 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "18127285758f0e2c6cf325bb3f3d138a12fee27de4f23e146cd6a179f26c2cf3"
"checksum unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bf3a113775714a22dcb774d8ea3655c53a32debae63a063acc00a91cc586245f" "checksum unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bf3a113775714a22dcb774d8ea3655c53a32debae63a063acc00a91cc586245f"
"checksum unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f860d7d29cf02cb2f3f359fd35991af3d30bac52c57d265a3c461074cb4dc" "checksum unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f860d7d29cf02cb2f3f359fd35991af3d30bac52c57d265a3c461074cb4dc"
"checksum unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1f2ae5ddb18e1c92664717616dd9549dde73f539f01bd7b77c2edb2446bdff91" "checksum unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1f2ae5ddb18e1c92664717616dd9549dde73f539f01bd7b77c2edb2446bdff91"
"checksum url 1.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3e2ba3456fbe5c0098cb877cf08b92b76c3e18e0be9e47c35b487220d377d24e" "checksum url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eeb819346883532a271eb626deb43c4a1bb4c4dd47c519bd78137c3e72a4fe27"
"checksum utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "662fab6525a98beff2921d7f61a39e7d59e0b425ebc7d0d9e66d316e55124122" "checksum utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "662fab6525a98beff2921d7f61a39e7d59e0b425ebc7d0d9e66d316e55124122"
"checksum vec_map 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "887b5b631c2ad01628bbbaa7dd4c869f80d3186688f8d0b6f58774fbe324988c" "checksum vec_map 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "887b5b631c2ad01628bbbaa7dd4c869f80d3186688f8d0b6f58774fbe324988c"
"checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" "checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"