Bug 1953191 - Vendor Rust and Python changes of the previous commits r=chutten,supply-chain-reviewers,mach-reviewers,ahal

Split out, because the upgrades are split into multiple commits, but we
require a single vendoring to avoid duplicated packages.

Differential Revision: https://phabricator.services.mozilla.com/D241959
This commit is contained in:
Jan-Erik Rediger
2025-04-07 20:39:50 +00:00
parent 7605226bfd
commit 3eb9ddea6a
387 changed files with 24263 additions and 10656 deletions

View File

@@ -70,9 +70,9 @@ git = "https://github.com/jfkthame/mapped_hyph.git"
rev = "eff105f6ad7ec9b79816cfc1985a28e5340ad14b"
replace-with = "vendored-sources"
[source."git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a"]
[source."git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94"]
git = "https://github.com/mozilla/application-services"
rev = "d773da92641d92930b7308300e9fc2746a05ce6a"
rev = "8e84c588a5cc2686973c5026ecd240d6275d7e94"
replace-with = "vendored-sources"
[source."git+https://github.com/mozilla/audioipc?rev=e6f44a2bd1e57d11dfc737632a9e849077632330"]

209
Cargo.lock generated
View File

@@ -189,38 +189,6 @@ dependencies = [
"libc",
]
[[package]]
name = "askama"
version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "47cbc3cf73fa8d9833727bbee4835ba5c421a0d65b72daf9a7b5d0e0f9cfb57e"
dependencies = [
"askama_derive",
"askama_escape",
]
[[package]]
name = "askama_derive"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c22fbe0413545c098358e56966ff22cdd039e10215ae213cfbd65032b119fc94"
dependencies = [
"basic-toml",
"mime",
"mime_guess",
"nom",
"proc-macro2",
"quote",
"serde",
"syn",
]
[[package]]
name = "askama_escape"
version = "0.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "619743e34b5ba4e9703bba34deac3427c72507c7159f5fd030aea8cac0cfe341"
[[package]]
name = "async-task"
version = "4.3.0"
@@ -467,7 +435,7 @@ dependencies = [
"proc-macro2",
"quote",
"regex",
"rustc-hash",
"rustc-hash 1.999.999",
"shlex",
"syn",
]
@@ -1836,7 +1804,7 @@ dependencies = [
[[package]]
name = "error-support"
version = "0.1.0"
source = "git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a#d773da92641d92930b7308300e9fc2746a05ce6a"
source = "git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94#8e84c588a5cc2686973c5026ecd240d6275d7e94"
dependencies = [
"error-support-macros",
"lazy_static",
@@ -1848,7 +1816,7 @@ dependencies = [
[[package]]
name = "error-support-macros"
version = "0.1.0"
source = "git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a#d773da92641d92930b7308300e9fc2746a05ce6a"
source = "git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94#8e84c588a5cc2686973c5026ecd240d6275d7e94"
dependencies = [
"proc-macro2",
"quote",
@@ -1965,7 +1933,7 @@ dependencies = [
[[package]]
name = "firefox-versioning"
version = "0.1.0"
source = "git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a#d773da92641d92930b7308300e9fc2746a05ce6a"
source = "git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94#8e84c588a5cc2686973c5026ecd240d6275d7e94"
dependencies = [
"serde_json",
"thiserror 1.999.999",
@@ -2017,7 +1985,7 @@ dependencies = [
"fluent-syntax",
"intl-memoizer",
"intl_pluralrules",
"rustc-hash",
"rustc-hash 1.999.999",
"self_cell",
"smallvec",
"unic-langid",
@@ -2034,7 +2002,7 @@ dependencies = [
"fluent-bundle",
"futures",
"once_cell",
"rustc-hash",
"rustc-hash 1.999.999",
"unic-langid",
]
@@ -2618,9 +2586,9 @@ dependencies = [
[[package]]
name = "glean"
version = "63.1.0"
version = "64.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2afa6754943cac5243099efd0d26e89cc8e06f1585776ba14ab0c6ee99e1f71"
checksum = "251b9cb685554b96dcf785dba69ce90447006dd6d9229db783336c981c3777e1"
dependencies = [
"crossbeam-channel",
"glean-core",
@@ -2632,9 +2600,9 @@ dependencies = [
[[package]]
name = "glean-core"
version = "63.1.0"
version = "64.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53cd53bb7a3b89b17d3989e95dd808b137ff47c504d1d19f14cb0d820cc2f42e"
checksum = "a49d1d62648ddeed8cb996373046ea45de93f1d1ff956aba054b9304bc305753"
dependencies = [
"android_logger",
"bincode",
@@ -3264,7 +3232,7 @@ dependencies = [
[[package]]
name = "interrupt-support"
version = "0.1.0"
source = "git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a#d773da92641d92930b7308300e9fc2746a05ce6a"
source = "git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94#8e84c588a5cc2686973c5026ecd240d6275d7e94"
dependencies = [
"lazy_static",
"parking_lot",
@@ -3346,9 +3314,9 @@ dependencies = [
[[package]]
name = "itoa"
version = "1.0.5"
version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440"
checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
[[package]]
name = "jexl-eval"
@@ -3484,7 +3452,7 @@ dependencies = [
"futures",
"pin-project-lite",
"replace_with",
"rustc-hash",
"rustc-hash 1.999.999",
"unic-langid",
]
@@ -4541,7 +4509,7 @@ dependencies = [
"indexmap",
"log",
"num-traits",
"rustc-hash",
"rustc-hash 1.999.999",
"serde",
"spirv",
"strum",
@@ -4970,7 +4938,7 @@ checksum = "d01a5bd0424d00070b0098dd17ebca6f961a959dead1dbcbbbc1d1cd8d3deeba"
[[package]]
name = "payload-support"
version = "0.1.0"
source = "git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a#d773da92641d92930b7308300e9fc2746a05ce6a"
source = "git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94#8e84c588a5cc2686973c5026ecd240d6275d7e94"
dependencies = [
"serde",
"serde_derive",
@@ -5466,7 +5434,7 @@ checksum = "dbb5fb1acd8a1a18b3dd5be62d25485eb770e05afb408a9627d14d451bae12da"
[[package]]
name = "relevancy"
version = "0.1.0"
source = "git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a#d773da92641d92930b7308300e9fc2746a05ce6a"
source = "git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94#8e84c588a5cc2686973c5026ecd240d6275d7e94"
dependencies = [
"anyhow",
"base64 0.21.999",
@@ -5491,7 +5459,7 @@ dependencies = [
[[package]]
name = "remote_settings"
version = "0.1.0"
source = "git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a#d773da92641d92930b7308300e9fc2746a05ce6a"
source = "git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94#8e84c588a5cc2686973c5026ecd240d6275d7e94"
dependencies = [
"anyhow",
"camino",
@@ -5536,6 +5504,45 @@ dependencies = [
"cache-padded",
]
[[package]]
name = "rinja"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3dc4940d00595430b3d7d5a01f6222b5e5b51395d1120bdb28d854bb8abb17a5"
dependencies = [
"itoa",
"rinja_derive",
]
[[package]]
name = "rinja_derive"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08d9ed0146aef6e2825f1b1515f074510549efba38d71f4554eec32eb36ba18b"
dependencies = [
"basic-toml",
"memchr",
"mime",
"mime_guess",
"proc-macro2",
"quote",
"rinja_parser",
"rustc-hash 2.1.1",
"serde",
"syn",
]
[[package]]
name = "rinja_parser"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93f9a866e2e00a7a1fb27e46e9e324a6f7c0e7edc4543cae1d38f4e4a100c610"
dependencies = [
"memchr",
"nom",
"serde",
]
[[package]]
name = "rkv"
version = "0.19.0"
@@ -5688,9 +5695,16 @@ checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342"
[[package]]
name = "rustc-hash"
version = "1.1.0"
version = "1.999.999"
dependencies = [
"rustc-hash 2.1.1",
]
[[package]]
name = "rustc-hash"
version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d"
[[package]]
name = "rustc_version"
@@ -5776,7 +5790,7 @@ dependencies = [
[[package]]
name = "search"
version = "0.1.0"
source = "git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a#d773da92641d92930b7308300e9fc2746a05ce6a"
source = "git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94#8e84c588a5cc2686973c5026ecd240d6275d7e94"
dependencies = [
"error-support",
"firefox-versioning",
@@ -6066,7 +6080,7 @@ dependencies = [
[[package]]
name = "sql-support"
version = "0.1.0"
source = "git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a#d773da92641d92930b7308300e9fc2746a05ce6a"
source = "git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94#8e84c588a5cc2686973c5026ecd240d6275d7e94"
dependencies = [
"interrupt-support",
"lazy_static",
@@ -6265,7 +6279,7 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
[[package]]
name = "suggest"
version = "0.1.0"
source = "git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a#d773da92641d92930b7308300e9fc2746a05ce6a"
source = "git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94#8e84c588a5cc2686973c5026ecd240d6275d7e94"
dependencies = [
"anyhow",
"chrono",
@@ -6317,7 +6331,7 @@ dependencies = [
[[package]]
name = "sync-guid"
version = "0.1.0"
source = "git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a#d773da92641d92930b7308300e9fc2746a05ce6a"
source = "git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94#8e84c588a5cc2686973c5026ecd240d6275d7e94"
dependencies = [
"base64 0.21.999",
"rand",
@@ -6328,7 +6342,7 @@ dependencies = [
[[package]]
name = "sync15"
version = "0.1.0"
source = "git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a#d773da92641d92930b7308300e9fc2746a05ce6a"
source = "git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94#8e84c588a5cc2686973c5026ecd240d6275d7e94"
dependencies = [
"anyhow",
"error-support",
@@ -6368,7 +6382,7 @@ dependencies = [
[[package]]
name = "tabs"
version = "0.1.0"
source = "git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a#d773da92641d92930b7308300e9fc2746a05ce6a"
source = "git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94#8e84c588a5cc2686973c5026ecd240d6275d7e94"
dependencies = [
"anyhow",
"error-support",
@@ -6694,7 +6708,7 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6d3364c5e96cb2ad1603037ab253ddd34d7fb72a58bdddf4b7350760fc69a46"
dependencies = [
"rustc-hash",
"rustc-hash 1.999.999",
]
[[package]]
@@ -6712,7 +6726,7 @@ checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba"
[[package]]
name = "types"
version = "0.1.0"
source = "git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a#d773da92641d92930b7308300e9fc2746a05ce6a"
source = "git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94#8e84c588a5cc2686973c5026ecd240d6275d7e94"
dependencies = [
"rusqlite",
"serde",
@@ -6802,9 +6816,9 @@ checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd"
[[package]]
name = "uniffi"
version = "0.28.2"
version = "0.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51ce6280c581045879e11b400bae14686a819df22b97171215d15549efa04ddb"
checksum = "fe34585ac0275accf6c284d0080cc2840f3898c551cda869ec291b5a4218712c"
dependencies = [
"anyhow",
"cargo_metadata",
@@ -6819,12 +6833,12 @@ name = "uniffi-bindgen-gecko-js"
version = "0.1.0"
dependencies = [
"anyhow",
"askama",
"camino",
"cargo_metadata",
"clap",
"extend",
"heck",
"rinja",
"serde",
"textwrap",
"toml",
@@ -6924,12 +6938,11 @@ dependencies = [
[[package]]
name = "uniffi_bindgen"
version = "0.28.2"
version = "0.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e9f25730c9db2e878521d606f54e921edb719cdd94d735e7f97705d6796d024"
checksum = "1a792af1424cc8b3c43b44c1a6cb7935ed1fbe5584a74f70e8bab9799740266d"
dependencies = [
"anyhow",
"askama",
"camino",
"cargo_metadata",
"fs-err",
@@ -6938,6 +6951,7 @@ dependencies = [
"heck",
"once_cell",
"paste",
"rinja",
"serde",
"textwrap",
"toml",
@@ -6948,9 +6962,9 @@ dependencies = [
[[package]]
name = "uniffi_build"
version = "0.28.2"
version = "0.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "88dba57ac699bd8ec53d6a352c8dd0e479b33f698c5659831bb1e4ce468c07bd"
checksum = "00c4138211f2ae951018fcce6a978e1fcd1a47c3fd0bc0d5472a520520060db1"
dependencies = [
"anyhow",
"camino",
@@ -6958,36 +6972,33 @@ dependencies = [
]
[[package]]
name = "uniffi_checksum_derive"
version = "0.28.2"
name = "uniffi_core"
version = "0.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2c801f0f05b06df456a2da4c41b9c2c4fdccc6b9916643c6c67275c4c9e4d07"
checksum = "c18baace68a52666d33d12d73ca335ecf27a302202cefb53b1f974512bb72417"
dependencies = [
"anyhow",
"bytes",
"once_cell",
"static_assertions",
]
[[package]]
name = "uniffi_internal_macros"
version = "0.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f9902d4ed16c65e6c0222241024dd0bfeed07ea3deb7c470eb175e5f5ef406cd"
dependencies = [
"quote",
"syn",
]
[[package]]
name = "uniffi_core"
version = "0.28.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "61049e4db6212d0ede80982adf0e1d6fa224e6118387324c5cfbe3083dfb2252"
dependencies = [
"anyhow",
"bytes",
"log",
"once_cell",
"paste",
"static_assertions",
]
[[package]]
name = "uniffi_macros"
version = "0.28.2"
version = "0.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b40fd2249e0c5dcbd2bfa3c263db1ec981f7273dca7f4132bf06a272359a586c"
checksum = "9d82c82ef945c51082d8763635334b994e63e77650f09d0fae6d28dd08b1de83"
dependencies = [
"bincode",
"camino",
"fs-err",
"once_cell",
@@ -7001,21 +7012,20 @@ dependencies = [
[[package]]
name = "uniffi_meta"
version = "0.28.2"
version = "0.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c9ad57039b4fafdbf77428d74fff40e0908e5a1731e023c19cfe538f6d4a8ed6"
checksum = "8d6027b971c2aa86350dd180aee9819729c7b99bacd381534511ff29d2c09cea"
dependencies = [
"anyhow",
"bytes",
"siphasher",
"uniffi_checksum_derive",
"uniffi_internal_macros",
]
[[package]]
name = "uniffi_testing"
version = "0.28.2"
version = "0.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "21fa171d4d258dc51bbd01893cc9608c1b62273d2f9ea55fb64f639e77824567"
checksum = "6301bcb50098dabcd304485318ba73f0f4db5e5d9d3c385c60b967810344ce90"
dependencies = [
"anyhow",
"camino",
@@ -7026,14 +7036,13 @@ dependencies = [
[[package]]
name = "uniffi_udl"
version = "0.28.2"
version = "0.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f52299e247419e7e2934bef2f94d7cccb0e6566f3248b1d48b160d8f369a2668"
checksum = "52300b7a4ab02dc159a038a13d5bfe27aefbad300d91b0b501b3dda094c1e0a2"
dependencies = [
"anyhow",
"textwrap",
"uniffi_meta",
"uniffi_testing",
"weedle2",
]
@@ -7099,7 +7108,7 @@ checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]]
name = "viaduct"
version = "0.1.0"
source = "git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a#d773da92641d92930b7308300e9fc2746a05ce6a"
source = "git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94#8e84c588a5cc2686973c5026ecd240d6275d7e94"
dependencies = [
"ffi-support",
"log",
@@ -7269,7 +7278,7 @@ dependencies = [
[[package]]
name = "webext-storage"
version = "0.1.0"
source = "git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a#d773da92641d92930b7308300e9fc2746a05ce6a"
source = "git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94#8e84c588a5cc2686973c5026ecd240d6275d7e94"
dependencies = [
"anyhow",
"error-support",
@@ -7425,7 +7434,7 @@ dependencies = [
"parking_lot",
"profiling",
"ron",
"rustc-hash",
"rustc-hash 1.999.999",
"serde",
"smallvec",
"thiserror 2.0.9",

199
gfx/wr/Cargo.lock generated
View File

@@ -87,38 +87,6 @@ dependencies = [
"term",
]
[[package]]
name = "askama"
version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "47cbc3cf73fa8d9833727bbee4835ba5c421a0d65b72daf9a7b5d0e0f9cfb57e"
dependencies = [
"askama_derive",
"askama_escape",
]
[[package]]
name = "askama_derive"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c22fbe0413545c098358e56966ff22cdd039e10215ae213cfbd65032b119fc94"
dependencies = [
"basic-toml",
"mime",
"mime_guess",
"nom 7.1.1",
"proc-macro2",
"quote",
"serde",
"syn 2.0.32",
]
[[package]]
name = "askama_escape"
version = "0.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "619743e34b5ba4e9703bba34deac3427c72507c7159f5fd030aea8cac0cfe341"
[[package]]
name = "atty"
version = "0.2.14"
@@ -181,7 +149,7 @@ dependencies = [
"proc-macro2",
"quote",
"regex",
"rustc-hash",
"rustc-hash 1.1.0",
"shlex",
"syn 2.0.32",
"which",
@@ -285,32 +253,6 @@ name = "camino"
version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "869119e97797867fd90f5e22af7d0bd274bd4635ebb9eb68c04f3f513ae6c412"
dependencies = [
"serde",
]
[[package]]
name = "cargo-platform"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cbdb825da8a5df079a43676dbe042702f1707b1109f713a01420fbb4cc71fa27"
dependencies = [
"serde",
]
[[package]]
name = "cargo_metadata"
version = "0.15.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "982a0cf6a99c350d7246035613882e376d58cebe571785abc5da4f648d53ac0a"
dependencies = [
"camino",
"cargo-platform",
"semver",
"serde",
"serde_json",
"thiserror",
]
[[package]]
name = "cbitset"
@@ -1040,9 +982,9 @@ dependencies = [
[[package]]
name = "glean"
version = "63.1.0"
version = "64.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2afa6754943cac5243099efd0d26e89cc8e06f1585776ba14ab0c6ee99e1f71"
checksum = "251b9cb685554b96dcf785dba69ce90447006dd6d9229db783336c981c3777e1"
dependencies = [
"crossbeam-channel",
"glean-core",
@@ -1054,9 +996,9 @@ dependencies = [
[[package]]
name = "glean-core"
version = "63.1.0"
version = "64.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53cd53bb7a3b89b17d3989e95dd808b137ff47c504d1d19f14cb0d820cc2f42e"
checksum = "a49d1d62648ddeed8cb996373046ea45de93f1d1ff956aba054b9304bc305753"
dependencies = [
"android_logger",
"bincode",
@@ -1369,9 +1311,9 @@ dependencies = [
[[package]]
name = "itoa"
version = "1.0.2"
version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "112c678d4050afce233f4f2852bb2eb519230b3cf12f33585275537d7e41578d"
checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
[[package]]
name = "jni-sys"
@@ -2328,6 +2270,45 @@ version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
[[package]]
name = "rinja"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3dc4940d00595430b3d7d5a01f6222b5e5b51395d1120bdb28d854bb8abb17a5"
dependencies = [
"itoa",
"rinja_derive",
]
[[package]]
name = "rinja_derive"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08d9ed0146aef6e2825f1b1515f074510549efba38d71f4554eec32eb36ba18b"
dependencies = [
"basic-toml",
"memchr",
"mime",
"mime_guess",
"proc-macro2",
"quote",
"rinja_parser",
"rustc-hash 2.1.1",
"serde",
"syn 2.0.32",
]
[[package]]
name = "rinja_parser"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93f9a866e2e00a7a1fb27e46e9e324a6f7c0e7edc4543cae1d38f4e4a100c610"
dependencies = [
"memchr",
"nom 7.1.1",
"serde",
]
[[package]]
name = "rkv"
version = "0.19.0"
@@ -2372,7 +2353,7 @@ dependencies = [
"countme",
"hashbrown 0.12.3",
"memoffset",
"rustc-hash",
"rustc-hash 1.1.0",
"text-size",
]
@@ -2382,6 +2363,12 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
[[package]]
name = "rustc-hash"
version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d"
[[package]]
name = "rustix"
version = "0.38.38"
@@ -2453,9 +2440,6 @@ name = "semver"
version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2333e6df6d6598f2b1974829f853c2b4c5f4a6e503c10af918081aa6f8564e1"
dependencies = [
"serde",
]
[[package]]
name = "serde"
@@ -2846,9 +2830,9 @@ checksum = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c"
[[package]]
name = "uniffi"
version = "0.28.1"
version = "0.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2db87def739fe4183947f8419d572d1849a4a09355eba4e988a2105cfd0ac6a7"
checksum = "fe34585ac0275accf6c284d0080cc2840f3898c551cda869ec291b5a4218712c"
dependencies = [
"anyhow",
"uniffi_build",
@@ -2858,12 +2842,11 @@ dependencies = [
[[package]]
name = "uniffi_bindgen"
version = "0.28.1"
version = "0.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a112599c9556d1581e4a3d72019a74c2c3e122cc27f4af12577a429c4d5e614"
checksum = "1a792af1424cc8b3c43b44c1a6cb7935ed1fbe5584a74f70e8bab9799740266d"
dependencies = [
"anyhow",
"askama",
"camino",
"fs-err",
"glob",
@@ -2871,6 +2854,7 @@ dependencies = [
"heck",
"once_cell",
"paste",
"rinja",
"serde",
"textwrap 0.16.1",
"toml",
@@ -2880,9 +2864,9 @@ dependencies = [
[[package]]
name = "uniffi_build"
version = "0.28.1"
version = "0.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2b12684401d2a8508ca9c72a95bbc45906417e42fc80942abaf033bbf01aa33"
checksum = "00c4138211f2ae951018fcce6a978e1fcd1a47c3fd0bc0d5472a520520060db1"
dependencies = [
"anyhow",
"camino",
@@ -2890,37 +2874,33 @@ dependencies = [
]
[[package]]
name = "uniffi_checksum_derive"
version = "0.28.1"
name = "uniffi_core"
version = "0.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a22dbe67c1c957ac6e7611bdf605a6218aa86b0eebeb8be58b70ae85ad7d73dc"
checksum = "c18baace68a52666d33d12d73ca335ecf27a302202cefb53b1f974512bb72417"
dependencies = [
"anyhow",
"bytes",
"once_cell",
"static_assertions",
]
[[package]]
name = "uniffi_internal_macros"
version = "0.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f9902d4ed16c65e6c0222241024dd0bfeed07ea3deb7c470eb175e5f5ef406cd"
dependencies = [
"quote",
"syn 2.0.32",
]
[[package]]
name = "uniffi_core"
version = "0.28.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a0c35aaad30e3a9e6d4fe34e358d64dbc92ee09045b48591b05fc9f12e0905b"
dependencies = [
"anyhow",
"bytes",
"camino",
"log",
"once_cell",
"paste",
"static_assertions",
]
[[package]]
name = "uniffi_macros"
version = "0.28.1"
version = "0.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "db66474c5c61b0f7afc3b4995fecf9b72b340daa5ca0ef3da7778d75eb5482ea"
checksum = "9d82c82ef945c51082d8763635334b994e63e77650f09d0fae6d28dd08b1de83"
dependencies = [
"bincode",
"camino",
"fs-err",
"once_cell",
@@ -2934,39 +2914,24 @@ dependencies = [
[[package]]
name = "uniffi_meta"
version = "0.28.1"
version = "0.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d898893f102e0e39b8bcb7e3d2188f4156ba280db32db9e8af1f122d057e9526"
checksum = "8d6027b971c2aa86350dd180aee9819729c7b99bacd381534511ff29d2c09cea"
dependencies = [
"anyhow",
"bytes",
"siphasher",
"uniffi_checksum_derive",
]
[[package]]
name = "uniffi_testing"
version = "0.28.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c6aa4f0cf9d12172d84fc00a35a6c1f3522b526daad05ae739f709f6941b9b6"
dependencies = [
"anyhow",
"camino",
"cargo_metadata",
"fs-err",
"once_cell",
"uniffi_internal_macros",
]
[[package]]
name = "uniffi_udl"
version = "0.28.1"
version = "0.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6b044e9c519e0bb51e516ab6f6d8f4f4dcf900ce30d5ad07c03f924e2824f28e"
checksum = "52300b7a4ab02dc159a038a13d5bfe27aefbad300d91b0b501b3dda094c1e0a2"
dependencies = [
"anyhow",
"textwrap 0.16.1",
"uniffi_meta",
"uniffi_testing",
"weedle2",
]

View File

@@ -28,6 +28,8 @@ packages = [
"parking_lot_core",
"rand",
"rand_core",
# rinja (dependency of uniffi) requires 2.1, rowan requires 1.1
"rustc-hash",
# transition to syn 2 is underway.
"syn",
"synstructure",

View File

@@ -531,6 +531,13 @@ start = "2023-06-21"
end = "2026-02-01"
notes = "Maintained by the Glean and Application Services teams"
[[wildcard-audits.uniffi_internal_macros]]
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
criteria = "safe-to-deploy"
user-id = 111105 # Mark Hammond (mhammond)
start = "2025-03-18"
end = "2026-03-25"
[[wildcard-audits.uniffi_macros]]
who = "Ben Dean-Kawamura <bdk@mozilla.com>"
criteria = "safe-to-deploy"
@@ -6326,12 +6333,6 @@ non-1-byte-aligned type, however right now that is not the case
(submitted https://github.com/zip-rs/zip2/issues/198).
"""
[[audits.zlib-rs]]
who = "Mike Hommey <mh+mozilla@glandium.org>"
criteria = "safe-to-deploy"
delta = "0.2.1 -> 0.2.1@git:4aa430ccb77537d0d60dab8db993ca51bb1194c5"
importable = false
[[trusted.aho-corasick]]
criteria = "safe-to-deploy"
user-id = 189 # Andrew Gallant (BurntSushi)

View File

@@ -297,14 +297,6 @@ criteria = "safe-to-deploy"
version = "0.2.0"
criteria = "safe-to-deploy"
[[exemptions.askama_derive]]
version = "0.11.2"
criteria = "safe-to-deploy"
[[exemptions.askama_escape]]
version = "0.10.3"
criteria = "safe-to-deploy"
[[exemptions.async-task]]
version = "4.0.3"
criteria = "safe-to-deploy"
@@ -526,10 +518,6 @@ criteria = "safe-to-deploy"
version = "0.10.3"
criteria = "safe-to-deploy"
[[exemptions.khronos-egl]]
version = "4.1.0"
criteria = "safe-to-deploy"
[[exemptions.khronos_api]]
version = "3.1.0"
criteria = "safe-to-deploy"
@@ -642,10 +630,6 @@ criteria = "safe-to-deploy"
version = "1.2.3"
criteria = "safe-to-deploy"
[[exemptions.rand]]
version = "0.8.5"
criteria = "safe-to-deploy"
[[exemptions.remove_dir_all]]
version = "0.5.3"
criteria = "safe-to-deploy"

View File

@@ -258,15 +258,15 @@ user-login = "jrmuizel"
user-name = "Jeff Muizelaar"
[[publisher.glean]]
version = "63.1.0"
when = "2025-01-30"
version = "64.0.1"
when = "2025-04-01"
user-id = 48
user-login = "badboy"
user-name = "Jan-Erik Rediger"
[[publisher.glean-core]]
version = "63.1.0"
when = "2025-01-30"
version = "64.0.1"
when = "2025-04-01"
user-id = 48
user-login = "badboy"
user-name = "Jan-Erik Rediger"
@@ -335,8 +335,8 @@ user-login = "carllerche"
user-name = "Carl Lerche"
[[publisher.itoa]]
version = "1.0.5"
when = "2022-12-17"
version = "1.0.11"
when = "2024-03-26"
user-id = 3618
user-login = "dtolnay"
user-name = "David Tolnay"
@@ -711,58 +711,67 @@ user-login = "Manishearth"
user-name = "Manish Goregaokar"
[[publisher.uniffi]]
version = "0.28.2"
when = "2024-10-08"
user-id = 127697
user-login = "bendk"
version = "0.29.1"
when = "2025-03-18"
user-id = 111105
user-login = "mhammond"
user-name = "Mark Hammond"
[[publisher.uniffi_bindgen]]
version = "0.28.2"
when = "2024-10-08"
user-id = 127697
user-login = "bendk"
version = "0.29.1"
when = "2025-03-18"
user-id = 111105
user-login = "mhammond"
user-name = "Mark Hammond"
[[publisher.uniffi_build]]
version = "0.28.2"
when = "2024-10-08"
user-id = 127697
user-login = "bendk"
[[publisher.uniffi_checksum_derive]]
version = "0.28.2"
when = "2024-10-08"
user-id = 127697
user-login = "bendk"
version = "0.29.1"
when = "2025-03-18"
user-id = 111105
user-login = "mhammond"
user-name = "Mark Hammond"
[[publisher.uniffi_core]]
version = "0.28.2"
when = "2024-10-08"
user-id = 127697
user-login = "bendk"
version = "0.29.1"
when = "2025-03-18"
user-id = 111105
user-login = "mhammond"
user-name = "Mark Hammond"
[[publisher.uniffi_internal_macros]]
version = "0.29.1"
when = "2025-03-18"
user-id = 111105
user-login = "mhammond"
user-name = "Mark Hammond"
[[publisher.uniffi_macros]]
version = "0.28.2"
when = "2024-10-08"
user-id = 127697
user-login = "bendk"
version = "0.29.1"
when = "2025-03-18"
user-id = 111105
user-login = "mhammond"
user-name = "Mark Hammond"
[[publisher.uniffi_meta]]
version = "0.28.2"
when = "2024-10-08"
user-id = 127697
user-login = "bendk"
version = "0.29.1"
when = "2025-03-18"
user-id = 111105
user-login = "mhammond"
user-name = "Mark Hammond"
[[publisher.uniffi_testing]]
version = "0.28.2"
when = "2024-10-08"
user-id = 127697
user-login = "bendk"
version = "0.29.1"
when = "2025-03-18"
user-id = 111105
user-login = "mhammond"
user-name = "Mark Hammond"
[[publisher.uniffi_udl]]
version = "0.28.2"
when = "2024-10-08"
user-id = 127697
user-login = "bendk"
version = "0.29.1"
when = "2025-03-18"
user-id = 111105
user-login = "mhammond"
user-name = "Mark Hammond"
[[publisher.utf8_iter]]
version = "1.0.4"
@@ -1203,6 +1212,11 @@ crate is broadly used throughout the ecosystem and does not contain anything
suspicious.
"""
[[audits.bytecode-alliance.audits.itoa]]
who = "Dan Gohman <dev@sunfishcode.online>"
criteria = "safe-to-deploy"
delta = "1.0.11 -> 1.0.14"
[[audits.bytecode-alliance.audits.jobserver]]
who = "Alex Crichton <alex@alexcrichton.com>"
criteria = "safe-to-deploy"
@@ -1670,6 +1684,13 @@ criteria = "safe-to-run"
version = "0.14.20"
aggregated-from = "https://chromium.googlesource.com/chromiumos/third_party/rust_crates/+/refs/heads/main/cargo-vet/audits.toml?format=TEXT"
[[audits.google.audits.itoa]]
who = "Daniel Cheng <dcheng@chromium.org>"
criteria = "safe-to-deploy"
delta = "1.0.14 -> 1.0.15"
notes = "Only minor rustdoc changes."
aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT"
[[audits.google.audits.nom]]
who = "danakj@chromium.org"
criteria = "safe-to-deploy"
@@ -1724,6 +1745,15 @@ Config-related changes in `test_size.rs`.
"""
aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT"
[[audits.google.audits.rand]]
who = "Lukasz Anforowicz <lukasza@chromium.org>"
criteria = "safe-to-deploy"
version = "0.8.5"
notes = """
For more detailed unsafe review notes please see https://crrev.com/c/6362797
"""
aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT"
[[audits.google.audits.rustversion]]
who = "Lukasz Anforowicz <lukasza@chromium.org>"
criteria = "safe-to-deploy"
@@ -2075,6 +2105,70 @@ who = "Ameer Ghani <inahga@divviup.org>"
criteria = "safe-to-deploy"
delta = "0.4.1 -> 0.4.2"
[[audits.mozilla.wildcard-audits.uniffi]]
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
criteria = "safe-to-deploy"
user-id = 111105 # Mark Hammond (mhammond)
start = "2021-11-22"
end = "2026-01-13"
aggregated-from = "https://raw.githubusercontent.com/mozilla/glean/main/supply-chain/audits.toml"
[[audits.mozilla.wildcard-audits.uniffi_bindgen]]
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
criteria = "safe-to-deploy"
user-id = 111105 # Mark Hammond (mhammond)
start = "2021-11-22"
end = "2026-01-13"
aggregated-from = "https://raw.githubusercontent.com/mozilla/glean/main/supply-chain/audits.toml"
[[audits.mozilla.wildcard-audits.uniffi_build]]
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
criteria = "safe-to-deploy"
user-id = 111105 # Mark Hammond (mhammond)
start = "2021-11-22"
end = "2026-01-13"
aggregated-from = "https://raw.githubusercontent.com/mozilla/glean/main/supply-chain/audits.toml"
[[audits.mozilla.wildcard-audits.uniffi_core]]
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
criteria = "safe-to-deploy"
user-id = 111105 # Mark Hammond (mhammond)
start = "2023-11-20"
end = "2026-01-13"
aggregated-from = "https://raw.githubusercontent.com/mozilla/glean/main/supply-chain/audits.toml"
[[audits.mozilla.wildcard-audits.uniffi_macros]]
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
criteria = "safe-to-deploy"
user-id = 111105 # Mark Hammond (mhammond)
start = "2021-11-22"
end = "2026-01-13"
aggregated-from = "https://raw.githubusercontent.com/mozilla/glean/main/supply-chain/audits.toml"
[[audits.mozilla.wildcard-audits.uniffi_meta]]
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
criteria = "safe-to-deploy"
user-id = 111105 # Mark Hammond (mhammond)
start = "2023-11-20"
end = "2026-01-13"
aggregated-from = "https://raw.githubusercontent.com/mozilla/glean/main/supply-chain/audits.toml"
[[audits.mozilla.wildcard-audits.uniffi_testing]]
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
criteria = "safe-to-deploy"
user-id = 111105 # Mark Hammond (mhammond)
start = "2023-11-20"
end = "2026-01-13"
aggregated-from = "https://raw.githubusercontent.com/mozilla/glean/main/supply-chain/audits.toml"
[[audits.mozilla.wildcard-audits.uniffi_udl]]
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
criteria = "safe-to-deploy"
user-id = 111105 # Mark Hammond (mhammond)
start = "2023-11-20"
end = "2026-01-13"
aggregated-from = "https://raw.githubusercontent.com/mozilla/glean/main/supply-chain/audits.toml"
[[audits.mozilla.wildcard-audits.weedle2]]
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
criteria = "safe-to-deploy"
@@ -2093,20 +2187,6 @@ end = "2025-08-05"
notes = "Maintained by me"
aggregated-from = "https://raw.githubusercontent.com/mozilla/glean/main/supply-chain/audits.toml"
[[audits.mozilla.audits.askama]]
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
criteria = "safe-to-deploy"
delta = "0.11.1 -> 0.12.0"
notes = "No new unsafe usage, mostly dependency updates and smaller API changes"
aggregated-from = "https://raw.githubusercontent.com/mozilla/glean/main/supply-chain/audits.toml"
[[audits.mozilla.audits.askama_derive]]
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
criteria = "safe-to-deploy"
delta = "0.11.2 -> 0.12.1"
notes = "Dependency updates, a new toml dependency and some API changes. No unsafe use."
aggregated-from = "https://raw.githubusercontent.com/mozilla/glean/main/supply-chain/audits.toml"
[[audits.mozilla.audits.basic-toml]]
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
criteria = "safe-to-deploy"

View File

@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: glean_parser
Version: 16.2.0
Version: 17.0.1
Summary: Parser tools for Mozilla's Glean telemetry
Home-page: https://github.com/mozilla/glean_parser
Author: The Glean Team
@@ -79,6 +79,14 @@ $ glean_parser check < ping.json
## Unreleased
## 17.0.1
- BUGFIX: Fix missing `ping_arg` "`uploader_capabilities`" in util.py ([#786](https://github.com/mozilla/glean_parser/pull/786))
## 17.0.0
- BREAKING CHANGE: Support `uploader_capabilities` for pings ([bug 1920732](https://bugzilla.mozilla.org/show_bug.cgi?id=1920732))
## 16.2.0
- New lint: error when there are metrics whose names are too similar ([bug 1934099](https://bugzilla.mozilla.org/show_bug.cgi?id=1934099))

View File

@@ -10,7 +10,7 @@ glean_parser/lint.py,sha256=ktdkR2GjR0wuR4IpLTiZ-q17vI4dk_Nebp4XU3pqzsk,21103
glean_parser/markdown.py,sha256=GkCr1CrV6mnRQseT6FO1-JJ7Eup8X3lxUfRMBTxXpe4,9066
glean_parser/metrics.py,sha256=3_ERPI63CsH_QvXVKNBVKIQTv4KWir2SfSbtn6J8a9Q,15842
glean_parser/parser.py,sha256=3bJwUGYhnzIHYJ7UBdO63Oi0_n1_Twvr2IOUUe_koME,18132
glean_parser/pings.py,sha256=xUgAunjluLbdLtcSQiUL6HDO5aLYM75MAoIT9H8-lOY,3729
glean_parser/pings.py,sha256=l4hKmnKigS46vlHFI4aWueKVHcZQL36QfhK0VC8OiFA,3924
glean_parser/python_server.py,sha256=ERpYcbSwF19xKFagxX0mZAvlR1y6D7Ah5DSvW8LipCY,4791
glean_parser/ruby_server.py,sha256=e5lkfcLQAUMUBQDCjqNU82LkdUzT5x-G6HOnsUInbsU,5190
glean_parser/rust.py,sha256=u1IeluyxFj6NrZZrBQwwa0nWz0TABv93lYxVBx0aN3I,7334
@@ -19,12 +19,12 @@ glean_parser/swift.py,sha256=paUzF6tItdktFwIQYCKsYpqXfn8zxR2coU_jMYrmwlc,8957
glean_parser/tags.py,sha256=bemKYvcbMO4JrghiNSe-A4BNNDtx_FlUPkgrPPJy84Y,1391
glean_parser/translate.py,sha256=itObn41X63koLYjdppLiywIFzPWDvPEx7C13efWpDSE,8444
glean_parser/translation_options.py,sha256=Lxzr6G7MP0tC_ZYlZXftS4j0SLiqO-5mGVTEc7ggXis,2037
glean_parser/util.py,sha256=YigUFMhzbXucNx3_bU-SAFWSnnyKS73pQWMBqxRGNn8,16551
glean_parser/util.py,sha256=yTx_-Q8w8rNNSZ_xbno0B90WR7pZZptG2bUWU0sCHZk,16580
glean_parser/validate_ping.py,sha256=0TNvILH6dtzJDys3W8Kqorw6kk03me73OCUDtpoHcXU,2118
glean_parser/schemas/metrics.1-0-0.schema.yaml,sha256=cND3cvi6iBfPUVmtfIBQfGJV9AALpbvN7nu8E33_J-o,19566
glean_parser/schemas/metrics.2-0-0.schema.yaml,sha256=ieFMxezBuySCvUorx8eGqXRUcoeTql4Z9FxkbkG9XFQ,26715
glean_parser/schemas/pings.1-0-0.schema.yaml,sha256=hwCnsKpEysmrmVp-QHGBArEkVY3vaU1rVsxlTwhAzws,4315
glean_parser/schemas/pings.2-0-0.schema.yaml,sha256=5k_OKfNkxHed4j1kMFGkxW1yDpvkqst-Vogig_W_JJU,6420
glean_parser/schemas/pings.2-0-0.schema.yaml,sha256=FQBsEt8Eg_ypBUnhJ1THZWXIgtuiYfyXcp_J9pGJUnE,7001
glean_parser/schemas/tags.1-0-0.schema.yaml,sha256=OGXIJlvvVW1vaqB_NVZnwKeZ-sLlfH57vjBSHbj6DNI,1231
glean_parser/templates/data_review.jinja2,sha256=jeYU29T1zLSyu9fKBBFu5BFPfIw8_hmOUXw8RXhRXK8,3287
glean_parser/templates/go_server.jinja2,sha256=t9ZS5AF5JwoVExkSxDoRQdBoTQv1bchJ7oTRB9eP5FI,9241
@@ -37,13 +37,13 @@ glean_parser/templates/markdown.jinja2,sha256=vAHHGGm28HRDPd3zO_wQMAUZIuxE9uQ7hl
glean_parser/templates/python_server.jinja2,sha256=gu2C1rkn760IqBCG2SWaK7o32T1ify94wDEsudLPUg8,7260
glean_parser/templates/qmldir.jinja2,sha256=m6IGsp-tgTiOfQ7VN8XW6GqX0gJqJkt3B6Pkaul6FVo,156
glean_parser/templates/ruby_server.jinja2,sha256=B0pbuld3j_0s7uMjoaCo8_6ehJUZeTXZlZZ9QRS4J_8,6252
glean_parser/templates/rust.jinja2,sha256=mdYEsldHLMb2Hkzly-NJzkFINg7qMZo7MjDI_2ZqS3U,7247
glean_parser/templates/rust.jinja2,sha256=hX8p5HXQNEeVz_sF6SDIyUNus6CcaCG9KWLl6uQLiOU,7285
glean_parser/templates/rust_server.jinja2,sha256=JJdeU5jiWx9aWpF0qiXIYztJ14OQKxV3VFdAbCrtR_0,12841
glean_parser/templates/swift.jinja2,sha256=EAenC__ReGA2A4tn-ui3n849PVHxE5IndXUivXDh8AU,6841
glean_parser-16.2.0.dist-info/AUTHORS.md,sha256=yxgj8MioO4wUnrh0gmfb8l3DJJrf-l4HmmEDbQsbbNI,455
glean_parser-16.2.0.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
glean_parser-16.2.0.dist-info/METADATA,sha256=rY8wmuwWXN1DnL3-VguHQr08H2WUoNlzvPgrfYIkVkU,36477
glean_parser-16.2.0.dist-info/WHEEL,sha256=cVxcB9AmuTcXqmwrtPhNK88dr7IR_b6qagTj0UvIEbY,91
glean_parser-16.2.0.dist-info/entry_points.txt,sha256=mf9d3sv8BwSjjR58x9KDnpVkONCnv3fPQC2NjJl15Xg,68
glean_parser-16.2.0.dist-info/top_level.txt,sha256=q7T3duD-9tYZFyDry6Wv2LcdMsK2jGnzdDFhxWcT2Z8,13
glean_parser-16.2.0.dist-info/RECORD,,
glean_parser/templates/swift.jinja2,sha256=L_JpwGLVzmOf1FYLoCzFu_RnGTExCIDup7iR1tWzD3o,6912
glean_parser-17.0.1.dist-info/AUTHORS.md,sha256=yxgj8MioO4wUnrh0gmfb8l3DJJrf-l4HmmEDbQsbbNI,455
glean_parser-17.0.1.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
glean_parser-17.0.1.dist-info/METADATA,sha256=fX3p807Z0tgo8pjTGNX4Fxw3gPXT6dJrW_Cw7lnAE_4,36761
glean_parser-17.0.1.dist-info/WHEEL,sha256=cVxcB9AmuTcXqmwrtPhNK88dr7IR_b6qagTj0UvIEbY,91
glean_parser-17.0.1.dist-info/entry_points.txt,sha256=mf9d3sv8BwSjjR58x9KDnpVkONCnv3fPQC2NjJl15Xg,68
glean_parser-17.0.1.dist-info/top_level.txt,sha256=q7T3duD-9tYZFyDry6Wv2LcdMsK2jGnzdDFhxWcT2Z8,13
glean_parser-17.0.1.dist-info/RECORD,,

View File

@@ -26,6 +26,7 @@ class Ping:
notification_emails: List[str],
metadata: Optional[Dict] = None,
data_reviews: Optional[List[str]] = None,
uploader_capabilities: Optional[List[str]] = None,
include_client_id: bool = False,
send_if_empty: bool = False,
reasons: Optional[Dict[str, str]] = None,
@@ -57,6 +58,9 @@ class Ping:
if data_reviews is None:
data_reviews = []
self.data_reviews = data_reviews
if not uploader_capabilities:
uploader_capabilities = []
self.uploader_capabilities = uploader_capabilities
self.include_client_id = include_client_id
self.send_if_empty = send_if_empty
if reasons is None:

View File

@@ -127,6 +127,21 @@ additionalProperties:
only takes effect when `metadata.include_info_sections` is `true`.
type: boolean
uploader_capabilities:
title: Uploader Capabilities
description: |
**Optional.**
An optional list of capability strings that the ping uploader must be
capable of supporting in order to upload this ping.
These are supplied exactly as defined (including order) to the uploader
every time upload is attempted for this ping.
The uploader must only attempt upload if it satisfies the supplied
capabilities. If not, it must refuse to upload the ping.
type: [array, "null"]
items:
type: string
send_if_empty:
title: Send if empty
description: |

View File

@@ -97,7 +97,7 @@ CommonMetricData {
/// {{ obj.description|wordwrap() | replace('\n', '\n/// ') }}
#[rustfmt::skip]
pub static {{ obj.name|snake_case }}: ::glean::private::__export::Lazy<::glean::private::PingType> =
::glean::private::__export::Lazy::new(|| ::glean::private::PingType::new("{{ obj.name }}", {{ obj.include_client_id|rust }}, {{ obj.send_if_empty|rust }}, {{ obj.precise_timestamps|rust }}, {{ obj.include_info_sections|rust }}, {{ obj.enabled|rust }}, {{ obj.schedules_pings|rust }}, {{ obj.reason_codes|rust }}, {{ obj.follows_collection_enabled|rust }}));
::glean::private::__export::Lazy::new(|| ::glean::private::PingType::new("{{ obj.name }}", {{ obj.include_client_id|rust }}, {{ obj.send_if_empty|rust }}, {{ obj.precise_timestamps|rust }}, {{ obj.include_info_sections|rust }}, {{ obj.enabled|rust }}, {{ obj.schedules_pings|rust }}, {{ obj.reason_codes|rust }}, {{ obj.follows_collection_enabled|rust }}, {{ obj.uploader_capabilities|rust }}));
{% endfor %}
{% else %}
pub mod {{ category.name|snake_case }} {

View File

@@ -144,7 +144,8 @@ extension {{ namespace }} {
enabled: {{obj.enabled|swift}},
schedulesPings: {{obj.schedules_pings|swift}},
reasonCodes: {{obj.reason_codes|swift}},
followsCollectionEnabled: {{obj.follows_collection_enabled|swift}}
followsCollectionEnabled: {{obj.follows_collection_enabled|swift}},
uploaderCapabilities: {{obj.uploader_capabilities|swift}}
)
{% endfor %}

View File

@@ -546,6 +546,7 @@ ping_args = [
"schedules_pings",
"reason_codes",
"follows_collection_enabled",
"uploader_capabilities",
]

View File

@@ -358,9 +358,9 @@ gitignorant==0.3.1 \
giturlparse==0.12.0 \
--hash=sha256:c0fff7c21acc435491b1779566e038757a205c1ffdcb47e4f81ea52ad8c3859a \
--hash=sha256:412b74f2855f1da2fefa89fd8dde62df48476077a72fc19b62039554d27360eb
glean-parser==16.2.0 \
--hash=sha256:4f6794b41b6e69cbceaee2a5b835a74cdfe443d1fbf4e2656ac40ba72cc27458 \
--hash=sha256:dc521d87b6d9c04f2006509be0aa2cdf0e923338521d9acad221d4e23caaace8
glean-parser==17.0.1 \
--hash=sha256:764a3b5aaa22d6100100d97a6ce58515687f4e0d06660180024e3101a507f580 \
--hash=sha256:922b75be353461875802a50dfd052215414dbc2965d153b95ff31d85081c40b7
idna==3.10 \
--hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \
--hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3

8
third_party/python/uv.lock generated vendored
View File

@@ -598,7 +598,7 @@ wheels = [
[[package]]
name = "glean-parser"
version = "16.2.0"
version = "17.0.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "click" },
@@ -608,9 +608,9 @@ dependencies = [
{ name = "platformdirs" },
{ name = "pyyaml" },
]
sdist = { url = "https://files.pythonhosted.org/packages/88/74/c9d3ca070ca08399b5ee32d4f85adab9fe95faf44785655fbd283c20f4cb/glean_parser-16.2.0.tar.gz", hash = "sha256:4f6794b41b6e69cbceaee2a5b835a74cdfe443d1fbf4e2656ac40ba72cc27458", size = 289721 }
sdist = { url = "https://files.pythonhosted.org/packages/7d/fd/895160c0fbc1ced0803bd19a2c2473f537efd8e6afa38aae2af12d5535b9/glean_parser-17.0.1.tar.gz", hash = "sha256:764a3b5aaa22d6100100d97a6ce58515687f4e0d06660180024e3101a507f580", size = 290273 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/7c/2b/b7cedca86929673c89f4350e257ab720a6ea014a684b99c2814ad279b716/glean_parser-16.2.0-py3-none-any.whl", hash = "sha256:dc521d87b6d9c04f2006509be0aa2cdf0e923338521d9acad221d4e23caaace8", size = 123785 },
{ url = "https://files.pythonhosted.org/packages/2d/02/dcc2f155ef74fb2c83d51b7170236f4648adb49f645bac4e5786dc3ac77c/glean_parser-17.0.1-py3-none-any.whl", hash = "sha256:922b75be353461875802a50dfd052215414dbc2965d153b95ff31d85081c40b7", size = 124106 },
]
[[package]]
@@ -925,7 +925,7 @@ requires-dist = [
{ name = "filelock", specifier = "~=3.6" },
{ name = "fluent-migrate", specifier = "==0.13.2" },
{ name = "fluent-syntax", specifier = "==0.19.0" },
{ name = "glean-parser", specifier = "==16.2.0" },
{ name = "glean-parser", specifier = "==17.0.1" },
{ name = "importlib-metadata", specifier = "==6.0.0" },
{ name = "jinja2", specifier = "==3.1.2" },
{ name = "jsmin", specifier = "==3.0.0" },

View File

@@ -1 +1 @@
dafec412f602d87682118ce75e91a25ed9ad9d9841b6667a3e9727dad246839b
6811e7a47781ee2d8b491fa59de0ad7a40375d04beeab0f203d5e423f0db5e48

View File

@@ -1 +0,0 @@
{"files":{"Cargo.toml":"fbab611fc3ba2204942300a534b4f030460f33b0606fa50b9ad08ea567ba81e8","LICENSE-APACHE":"87cb0d734c723c083e51c825930ff42bce28596b52dee15567f6b28f19c195e3","LICENSE-MIT":"df20e0180764bf5bd76f74d47bc9e8c0069a666401629c390003a1d5eba99c92","README.md":"6a4430cf614ff9d36ba01463a8f94085ed4b0889fd719793fa914568247acce2","src/error.rs":"1e3f8020092469090f314f60685c077347e730a88222dfdaa38aaf2396507532","src/filters/json.rs":"dccd0a3f1017da9f6cd9650bd39eb1670f4a9833d2f0968614cd8cd65d18a9dd","src/filters/mod.rs":"903d09599e62f56657b00b2aa577c9d2f963348dd12a1029e90e68549f78b1db","src/filters/yaml.rs":"4e641bedbe3666b334836fb6603fe7f718f7e90d8e33419acca624f50a580c3f","src/helpers.rs":"76e0422acd4ccba7b1735d6ab7622a93f6ec5a2fa89531111d877266784d5334","src/lib.rs":"3a6e4d0b3aadc7c391cbe59416504a719406303726122779281a3af1a7ad76a4"},"package":"47cbc3cf73fa8d9833727bbee4835ba5c421a0d65b72daf9a7b5d0e0f9cfb57e"}

View File

@@ -1,126 +0,0 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies.
#
# If you are reading this file be aware that the original Cargo.toml
# will likely look very different (and much more reasonable).
# See Cargo.toml.orig for the original contents.
[package]
edition = "2021"
rust-version = "1.58"
name = "askama"
version = "0.12.0"
description = "Type-safe, compiled Jinja-like templates for Rust"
homepage = "https://github.com/djc/askama"
documentation = "https://docs.rs/askama"
readme = "README.md"
keywords = [
"markup",
"template",
"jinja2",
"html",
]
categories = ["template-engine"]
license = "MIT OR Apache-2.0"
repository = "https://github.com/djc/askama"
resolver = "1"
[package.metadata.docs.rs]
features = [
"config",
"humansize",
"num-traits",
"serde-json",
"serde-yaml",
]
[dependencies.askama_derive]
version = "0.12.0"
[dependencies.askama_escape]
version = "0.10.3"
[dependencies.comrak]
version = "0.16"
optional = true
default-features = false
[dependencies.dep_humansize]
version = "2"
optional = true
package = "humansize"
[dependencies.dep_num_traits]
version = "0.2.6"
optional = true
package = "num-traits"
[dependencies.percent-encoding]
version = "2.1.0"
optional = true
[dependencies.serde]
version = "1.0"
features = ["derive"]
optional = true
[dependencies.serde_json]
version = "1.0"
optional = true
[dependencies.serde_yaml]
version = "0.9"
optional = true
[features]
config = ["askama_derive/config"]
default = [
"config",
"humansize",
"num-traits",
"urlencode",
]
humansize = [
"askama_derive/humansize",
"dep_humansize",
]
markdown = [
"askama_derive/markdown",
"comrak",
]
mime = []
mime_guess = []
num-traits = [
"askama_derive/num-traits",
"dep_num_traits",
]
serde-json = [
"askama_derive/serde-json",
"askama_escape/json",
"serde",
"serde_json",
]
serde-yaml = [
"askama_derive/serde-yaml",
"serde",
"serde_yaml",
]
urlencode = [
"askama_derive/urlencode",
"percent-encoding",
]
with-actix-web = ["askama_derive/with-actix-web"]
with-axum = ["askama_derive/with-axum"]
with-gotham = ["askama_derive/with-gotham"]
with-hyper = ["askama_derive/with-hyper"]
with-mendes = ["askama_derive/with-mendes"]
with-rocket = ["askama_derive/with-rocket"]
with-tide = ["askama_derive/with-tide"]
with-warp = ["askama_derive/with-warp"]
[badges.maintenance]
status = "actively-developed"

View File

@@ -1,96 +0,0 @@
# Askama
[![Documentation](https://docs.rs/askama/badge.svg)](https://docs.rs/askama/)
[![Latest version](https://img.shields.io/crates/v/askama.svg)](https://crates.io/crates/askama)
[![Build Status](https://github.com/djc/askama/workflows/CI/badge.svg)](https://github.com/djc/askama/actions?query=workflow%3ACI)
[![Chat](https://badges.gitter.im/gitterHQ/gitter.svg)](https://gitter.im/djc/askama)
Askama implements a template rendering engine based on [Jinja](https://jinja.palletsprojects.com/).
It generates Rust code from your templates at compile time
based on a user-defined `struct` to hold the template's context.
See below for an example, or read [the book][docs].
**"Pretty exciting. I would love to use this already."** --
[Armin Ronacher][mitsuhiko], creator of Jinja
All feedback welcome. Feel free to file bugs, requests for documentation and
any other feedback to the [issue tracker][issues] or [tweet me][twitter].
Askama was created by and is maintained by Dirkjan Ochtman. If you are in a
position to support ongoing maintenance and further development or use it
in a for-profit context, please consider supporting my open source work on
[Patreon][patreon].
### Feature highlights
* Construct templates using a familiar, easy-to-use syntax
* Benefit from the safety provided by Rust's type system
* Template code is compiled into your crate for [optimal performance][benchmarks]
* Optional built-in support for Actix, Axum, Gotham, Mendes, Rocket, tide, and warp web frameworks
* Debugging features to assist you in template development
* Templates must be valid UTF-8 and produce UTF-8 when rendered
* IDE support available in [JetBrains products](https://plugins.jetbrains.com/plugin/16591-askama-template-support)
* Works on stable Rust
### Supported in templates
* Template inheritance
* Loops, if/else statements and include support
* Macro support
* Variables (no mutability allowed)
* Some built-in filters, and the ability to use your own
* Whitespace suppressing with '-' markers
* Opt-out HTML escaping
* Syntax customization
[docs]: https://djc.github.io/askama/
[fafhrd91]: https://github.com/fafhrd91
[mitsuhiko]: http://lucumr.pocoo.org/
[issues]: https://github.com/djc/askama/issues
[twitter]: https://twitter.com/djco/
[patreon]: https://www.patreon.com/dochtman
[benchmarks]: https://github.com/djc/template-benchmarks-rs
How to get started
------------------
First, add the following to your crate's `Cargo.toml`:
```toml
# in section [dependencies]
askama = "0.11.2"
```
Now create a directory called `templates` in your crate root.
In it, create a file called `hello.html`, containing the following:
```
Hello, {{ name }}!
```
In any Rust file inside your crate, add the following:
```rust
use askama::Template; // bring trait in scope
#[derive(Template)] // this will generate the code...
#[template(path = "hello.html")] // using the template in this path, relative
// to the `templates` dir in the crate root
struct HelloTemplate<'a> { // the name of the struct can be anything
name: &'a str, // the field name should match the variable name
// in your template
}
fn main() {
let hello = HelloTemplate { name: "world" }; // instantiate your struct
println!("{}", hello.render().unwrap()); // then render it.
}
```
You should now be able to compile and run this code.
Review the [test cases] for more examples.
[test cases]: https://github.com/djc/askama/tree/main/testing

View File

@@ -1,44 +0,0 @@
use crate::error::{Error, Result};
use askama_escape::JsonEscapeBuffer;
use serde::Serialize;
use serde_json::to_writer_pretty;
/// Serialize to JSON (requires `json` feature)
///
/// The generated string does not contain ampersands `&`, chevrons `< >`, or apostrophes `'`.
/// To use it in a `<script>` you can combine it with the safe filter:
///
/// ``` html
/// <script>
/// var data = {{data|json|safe}};
/// </script>
/// ```
///
/// To use it in HTML attributes, you can either use it in quotation marks `"{{data|json}}"` as is,
/// or in apostrophes with the (optional) safe filter `'{{data|json|safe}}'`.
/// In HTML texts the output of e.g. `<pre>{{data|json|safe}}</pre>` is safe, too.
pub fn json<S: Serialize>(s: S) -> Result<String> {
let mut writer = JsonEscapeBuffer::new();
to_writer_pretty(&mut writer, &s).map_err(Error::from)?;
Ok(writer.finish())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_json() {
assert_eq!(json(true).unwrap(), "true");
assert_eq!(json("foo").unwrap(), r#""foo""#);
assert_eq!(json(true).unwrap(), "true");
assert_eq!(json("foo").unwrap(), r#""foo""#);
assert_eq!(
json(vec!["foo", "bar"]).unwrap(),
r#"[
"foo",
"bar"
]"#
);
}
}

View File

@@ -1,640 +0,0 @@
//! Module for built-in filter functions
//!
//! Contains all the built-in filter functions for use in templates.
//! You can define your own filters, as well.
//! For more information, read the [book](https://djc.github.io/askama/filters.html).
#![allow(clippy::trivially_copy_pass_by_ref)]
use std::fmt::{self, Write};
#[cfg(feature = "serde-json")]
mod json;
#[cfg(feature = "serde-json")]
pub use self::json::json;
#[cfg(feature = "serde-yaml")]
mod yaml;
#[cfg(feature = "serde-yaml")]
pub use self::yaml::yaml;
#[allow(unused_imports)]
use crate::error::Error::Fmt;
use askama_escape::{Escaper, MarkupDisplay};
#[cfg(feature = "humansize")]
use dep_humansize::{format_size_i, ToF64, DECIMAL};
#[cfg(feature = "num-traits")]
use dep_num_traits::{cast::NumCast, Signed};
#[cfg(feature = "percent-encoding")]
use percent_encoding::{utf8_percent_encode, AsciiSet, NON_ALPHANUMERIC};
use super::Result;
#[cfg(feature = "percent-encoding")]
// Urlencode char encoding set. Only the characters in the unreserved set don't
// have any special purpose in any part of a URI and can be safely left
// unencoded as specified in https://tools.ietf.org/html/rfc3986.html#section-2.3
const URLENCODE_STRICT_SET: &AsciiSet = &NON_ALPHANUMERIC
.remove(b'_')
.remove(b'.')
.remove(b'-')
.remove(b'~');
#[cfg(feature = "percent-encoding")]
// Same as URLENCODE_STRICT_SET, but preserves forward slashes for encoding paths
const URLENCODE_SET: &AsciiSet = &URLENCODE_STRICT_SET.remove(b'/');
/// Marks a string (or other `Display` type) as safe
///
/// Use this is you want to allow markup in an expression, or if you know
/// that the expression's contents don't need to be escaped.
///
/// Askama will automatically insert the first (`Escaper`) argument,
/// so this filter only takes a single argument of any type that implements
/// `Display`.
pub fn safe<E, T>(e: E, v: T) -> Result<MarkupDisplay<E, T>>
where
E: Escaper,
T: fmt::Display,
{
Ok(MarkupDisplay::new_safe(v, e))
}
/// Escapes strings according to the escape mode.
///
/// Askama will automatically insert the first (`Escaper`) argument,
/// so this filter only takes a single argument of any type that implements
/// `Display`.
///
/// It is possible to optionally specify an escaper other than the default for
/// the template's extension, like `{{ val|escape("txt") }}`.
pub fn escape<E, T>(e: E, v: T) -> Result<MarkupDisplay<E, T>>
where
E: Escaper,
T: fmt::Display,
{
Ok(MarkupDisplay::new_unsafe(v, e))
}
#[cfg(feature = "humansize")]
/// Returns adequate string representation (in KB, ..) of number of bytes
pub fn filesizeformat(b: &(impl ToF64 + Copy)) -> Result<String> {
Ok(format_size_i(*b, DECIMAL))
}
#[cfg(feature = "percent-encoding")]
/// Percent-encodes the argument for safe use in URI; does not encode `/`.
///
/// This should be safe for all parts of URI (paths segments, query keys, query
/// values). In the rare case that the server can't deal with forward slashes in
/// the query string, use [`urlencode_strict`], which encodes them as well.
///
/// Encodes all characters except ASCII letters, digits, and `_.-~/`. In other
/// words, encodes all characters which are not in the unreserved set,
/// as specified by [RFC3986](https://tools.ietf.org/html/rfc3986#section-2.3),
/// with the exception of `/`.
///
/// ```none,ignore
/// <a href="/metro{{ "/stations/Château d'Eau"|urlencode }}">Station</a>
/// <a href="/page?text={{ "look, unicode/emojis ✨"|urlencode }}">Page</a>
/// ```
///
/// To encode `/` as well, see [`urlencode_strict`](./fn.urlencode_strict.html).
///
/// [`urlencode_strict`]: ./fn.urlencode_strict.html
pub fn urlencode<T: fmt::Display>(s: T) -> Result<String> {
let s = s.to_string();
Ok(utf8_percent_encode(&s, URLENCODE_SET).to_string())
}
#[cfg(feature = "percent-encoding")]
/// Percent-encodes the argument for safe use in URI; encodes `/`.
///
/// Use this filter for encoding query keys and values in the rare case that
/// the server can't process them unencoded.
///
/// Encodes all characters except ASCII letters, digits, and `_.-~`. In other
/// words, encodes all characters which are not in the unreserved set,
/// as specified by [RFC3986](https://tools.ietf.org/html/rfc3986#section-2.3).
///
/// ```none,ignore
/// <a href="/page?text={{ "look, unicode/emojis ✨"|urlencode_strict }}">Page</a>
/// ```
///
/// If you want to preserve `/`, see [`urlencode`](./fn.urlencode.html).
pub fn urlencode_strict<T: fmt::Display>(s: T) -> Result<String> {
let s = s.to_string();
Ok(utf8_percent_encode(&s, URLENCODE_STRICT_SET).to_string())
}
/// Formats arguments according to the specified format
///
/// The *second* argument to this filter must be a string literal (as in normal
/// Rust). The two arguments are passed through to the `format!()`
/// [macro](https://doc.rust-lang.org/stable/std/macro.format.html) by
/// the Askama code generator, but the order is swapped to support filter
/// composition.
///
/// ```ignore
/// {{ value | fmt("{:?}") }}
/// ```
///
/// Compare with [format](./fn.format.html).
pub fn fmt() {}
/// Formats arguments according to the specified format
///
/// The first argument to this filter must be a string literal (as in normal
/// Rust). All arguments are passed through to the `format!()`
/// [macro](https://doc.rust-lang.org/stable/std/macro.format.html) by
/// the Askama code generator.
///
/// ```ignore
/// {{ "{:?}{:?}" | format(value, other_value) }}
/// ```
///
/// Compare with [fmt](./fn.fmt.html).
pub fn format() {}
/// Replaces line breaks in plain text with appropriate HTML
///
/// A single newline becomes an HTML line break `<br>` and a new line
/// followed by a blank line becomes a paragraph break `<p>`.
pub fn linebreaks<T: fmt::Display>(s: T) -> Result<String> {
let s = s.to_string();
let linebroken = s.replace("\n\n", "</p><p>").replace('\n', "<br/>");
Ok(format!("<p>{linebroken}</p>"))
}
/// Converts all newlines in a piece of plain text to HTML line breaks
pub fn linebreaksbr<T: fmt::Display>(s: T) -> Result<String> {
let s = s.to_string();
Ok(s.replace('\n', "<br/>"))
}
/// Replaces only paragraph breaks in plain text with appropriate HTML
///
/// A new line followed by a blank line becomes a paragraph break `<p>`.
/// Paragraph tags only wrap content; empty paragraphs are removed.
/// No `<br/>` tags are added.
pub fn paragraphbreaks<T: fmt::Display>(s: T) -> Result<String> {
let s = s.to_string();
let linebroken = s.replace("\n\n", "</p><p>").replace("<p></p>", "");
Ok(format!("<p>{linebroken}</p>"))
}
/// Converts to lowercase
pub fn lower<T: fmt::Display>(s: T) -> Result<String> {
let s = s.to_string();
Ok(s.to_lowercase())
}
/// Alias for the `lower()` filter
pub fn lowercase<T: fmt::Display>(s: T) -> Result<String> {
lower(s)
}
/// Converts to uppercase
pub fn upper<T: fmt::Display>(s: T) -> Result<String> {
let s = s.to_string();
Ok(s.to_uppercase())
}
/// Alias for the `upper()` filter
pub fn uppercase<T: fmt::Display>(s: T) -> Result<String> {
upper(s)
}
/// Strip leading and trailing whitespace
pub fn trim<T: fmt::Display>(s: T) -> Result<String> {
let s = s.to_string();
Ok(s.trim().to_owned())
}
/// Limit string length, appends '...' if truncated
pub fn truncate<T: fmt::Display>(s: T, len: usize) -> Result<String> {
let mut s = s.to_string();
if s.len() > len {
let mut real_len = len;
while !s.is_char_boundary(real_len) {
real_len += 1;
}
s.truncate(real_len);
s.push_str("...");
}
Ok(s)
}
/// Indent lines with `width` spaces
pub fn indent<T: fmt::Display>(s: T, width: usize) -> Result<String> {
let s = s.to_string();
let mut indented = String::new();
for (i, c) in s.char_indices() {
indented.push(c);
if c == '\n' && i < s.len() - 1 {
for _ in 0..width {
indented.push(' ');
}
}
}
Ok(indented)
}
#[cfg(feature = "num-traits")]
/// Casts number to f64
pub fn into_f64<T>(number: T) -> Result<f64>
where
T: NumCast,
{
number.to_f64().ok_or(Fmt(fmt::Error))
}
#[cfg(feature = "num-traits")]
/// Casts number to isize
pub fn into_isize<T>(number: T) -> Result<isize>
where
T: NumCast,
{
number.to_isize().ok_or(Fmt(fmt::Error))
}
/// Joins iterable into a string separated by provided argument
pub fn join<T, I, S>(input: I, separator: S) -> Result<String>
where
T: fmt::Display,
I: Iterator<Item = T>,
S: AsRef<str>,
{
let separator: &str = separator.as_ref();
let mut rv = String::new();
for (num, item) in input.enumerate() {
if num > 0 {
rv.push_str(separator);
}
write!(rv, "{item}")?;
}
Ok(rv)
}
#[cfg(feature = "num-traits")]
/// Absolute value
pub fn abs<T>(number: T) -> Result<T>
where
T: Signed,
{
Ok(number.abs())
}
/// Capitalize a value. The first character will be uppercase, all others lowercase.
pub fn capitalize<T: fmt::Display>(s: T) -> Result<String> {
let s = s.to_string();
match s.chars().next() {
Some(c) => {
let mut replacement: String = c.to_uppercase().collect();
replacement.push_str(&s[c.len_utf8()..].to_lowercase());
Ok(replacement)
}
_ => Ok(s),
}
}
/// Centers the value in a field of a given width
pub fn center(src: &dyn fmt::Display, dst_len: usize) -> Result<String> {
let src = src.to_string();
let len = src.len();
if dst_len <= len {
Ok(src)
} else {
let diff = dst_len - len;
let mid = diff / 2;
let r = diff % 2;
let mut buf = String::with_capacity(dst_len);
for _ in 0..mid {
buf.push(' ');
}
buf.push_str(&src);
for _ in 0..mid + r {
buf.push(' ');
}
Ok(buf)
}
}
/// Count the words in that string
pub fn wordcount<T: fmt::Display>(s: T) -> Result<usize> {
let s = s.to_string();
Ok(s.split_whitespace().count())
}
#[cfg(feature = "markdown")]
pub fn markdown<E, S>(
e: E,
s: S,
options: Option<&comrak::ComrakOptions>,
) -> Result<MarkupDisplay<E, String>>
where
E: Escaper,
S: AsRef<str>,
{
use comrak::{
markdown_to_html, ComrakExtensionOptions, ComrakOptions, ComrakParseOptions,
ComrakRenderOptions, ListStyleType,
};
const DEFAULT_OPTIONS: ComrakOptions = ComrakOptions {
extension: ComrakExtensionOptions {
strikethrough: true,
tagfilter: true,
table: true,
autolink: true,
// default:
tasklist: false,
superscript: false,
header_ids: None,
footnotes: false,
description_lists: false,
front_matter_delimiter: None,
},
parse: ComrakParseOptions {
// default:
smart: false,
default_info_string: None,
relaxed_tasklist_matching: false,
},
render: ComrakRenderOptions {
unsafe_: false,
escape: true,
// default:
hardbreaks: false,
github_pre_lang: false,
width: 0,
list_style: ListStyleType::Dash,
},
};
let s = markdown_to_html(s.as_ref(), options.unwrap_or(&DEFAULT_OPTIONS));
Ok(MarkupDisplay::new_safe(s, e))
}
#[cfg(test)]
mod tests {
use super::*;
#[cfg(feature = "num-traits")]
use std::f64::INFINITY;
#[cfg(feature = "humansize")]
#[test]
fn test_filesizeformat() {
assert_eq!(filesizeformat(&0).unwrap(), "0 B");
assert_eq!(filesizeformat(&999u64).unwrap(), "999 B");
assert_eq!(filesizeformat(&1000i32).unwrap(), "1 kB");
assert_eq!(filesizeformat(&1023).unwrap(), "1.02 kB");
assert_eq!(filesizeformat(&1024usize).unwrap(), "1.02 kB");
}
#[cfg(feature = "percent-encoding")]
#[test]
fn test_urlencoding() {
// Unreserved (https://tools.ietf.org/html/rfc3986.html#section-2.3)
// alpha / digit
assert_eq!(urlencode("AZaz09").unwrap(), "AZaz09");
assert_eq!(urlencode_strict("AZaz09").unwrap(), "AZaz09");
// other
assert_eq!(urlencode("_.-~").unwrap(), "_.-~");
assert_eq!(urlencode_strict("_.-~").unwrap(), "_.-~");
// Reserved (https://tools.ietf.org/html/rfc3986.html#section-2.2)
// gen-delims
assert_eq!(urlencode(":/?#[]@").unwrap(), "%3A/%3F%23%5B%5D%40");
assert_eq!(
urlencode_strict(":/?#[]@").unwrap(),
"%3A%2F%3F%23%5B%5D%40"
);
// sub-delims
assert_eq!(
urlencode("!$&'()*+,;=").unwrap(),
"%21%24%26%27%28%29%2A%2B%2C%3B%3D"
);
assert_eq!(
urlencode_strict("!$&'()*+,;=").unwrap(),
"%21%24%26%27%28%29%2A%2B%2C%3B%3D"
);
// Other
assert_eq!(
urlencode("žŠďŤňĚáÉóŮ").unwrap(),
"%C5%BE%C5%A0%C4%8F%C5%A4%C5%88%C4%9A%C3%A1%C3%89%C3%B3%C5%AE"
);
assert_eq!(
urlencode_strict("žŠďŤňĚáÉóŮ").unwrap(),
"%C5%BE%C5%A0%C4%8F%C5%A4%C5%88%C4%9A%C3%A1%C3%89%C3%B3%C5%AE"
);
// Ferris
assert_eq!(urlencode("🦀").unwrap(), "%F0%9F%A6%80");
assert_eq!(urlencode_strict("🦀").unwrap(), "%F0%9F%A6%80");
}
#[test]
fn test_linebreaks() {
assert_eq!(
linebreaks("Foo\nBar Baz").unwrap(),
"<p>Foo<br/>Bar Baz</p>"
);
assert_eq!(
linebreaks("Foo\nBar\n\nBaz").unwrap(),
"<p>Foo<br/>Bar</p><p>Baz</p>"
);
}
#[test]
fn test_linebreaksbr() {
assert_eq!(linebreaksbr("Foo\nBar").unwrap(), "Foo<br/>Bar");
assert_eq!(
linebreaksbr("Foo\nBar\n\nBaz").unwrap(),
"Foo<br/>Bar<br/><br/>Baz"
);
}
#[test]
fn test_paragraphbreaks() {
assert_eq!(
paragraphbreaks("Foo\nBar Baz").unwrap(),
"<p>Foo\nBar Baz</p>"
);
assert_eq!(
paragraphbreaks("Foo\nBar\n\nBaz").unwrap(),
"<p>Foo\nBar</p><p>Baz</p>"
);
assert_eq!(
paragraphbreaks("Foo\n\n\n\n\nBar\n\nBaz").unwrap(),
"<p>Foo</p><p>\nBar</p><p>Baz</p>"
);
}
#[test]
fn test_lower() {
assert_eq!(lower("Foo").unwrap(), "foo");
assert_eq!(lower("FOO").unwrap(), "foo");
assert_eq!(lower("FooBar").unwrap(), "foobar");
assert_eq!(lower("foo").unwrap(), "foo");
}
#[test]
fn test_upper() {
assert_eq!(upper("Foo").unwrap(), "FOO");
assert_eq!(upper("FOO").unwrap(), "FOO");
assert_eq!(upper("FooBar").unwrap(), "FOOBAR");
assert_eq!(upper("foo").unwrap(), "FOO");
}
#[test]
fn test_trim() {
assert_eq!(trim(" Hello\tworld\t").unwrap(), "Hello\tworld");
}
#[test]
fn test_truncate() {
assert_eq!(truncate("hello", 2).unwrap(), "he...");
let a = String::from("您好");
assert_eq!(a.len(), 6);
assert_eq!(String::from("").len(), 3);
assert_eq!(truncate("您好", 1).unwrap(), "您...");
assert_eq!(truncate("您好", 2).unwrap(), "您...");
assert_eq!(truncate("您好", 3).unwrap(), "您...");
assert_eq!(truncate("您好", 4).unwrap(), "您好...");
assert_eq!(truncate("您好", 6).unwrap(), "您好");
assert_eq!(truncate("您好", 7).unwrap(), "您好");
let s = String::from("🤚a🤚");
assert_eq!(s.len(), 9);
assert_eq!(String::from("🤚").len(), 4);
assert_eq!(truncate("🤚a🤚", 1).unwrap(), "🤚...");
assert_eq!(truncate("🤚a🤚", 2).unwrap(), "🤚...");
assert_eq!(truncate("🤚a🤚", 3).unwrap(), "🤚...");
assert_eq!(truncate("🤚a🤚", 4).unwrap(), "🤚...");
assert_eq!(truncate("🤚a🤚", 5).unwrap(), "🤚a...");
assert_eq!(truncate("🤚a🤚", 6).unwrap(), "🤚a🤚...");
assert_eq!(truncate("🤚a🤚", 9).unwrap(), "🤚a🤚");
assert_eq!(truncate("🤚a🤚", 10).unwrap(), "🤚a🤚");
}
#[test]
fn test_indent() {
assert_eq!(indent("hello", 2).unwrap(), "hello");
assert_eq!(indent("hello\n", 2).unwrap(), "hello\n");
assert_eq!(indent("hello\nfoo", 2).unwrap(), "hello\n foo");
assert_eq!(
indent("hello\nfoo\n bar", 4).unwrap(),
"hello\n foo\n bar"
);
}
#[cfg(feature = "num-traits")]
#[test]
#[allow(clippy::float_cmp)]
fn test_into_f64() {
assert_eq!(into_f64(1).unwrap(), 1.0_f64);
assert_eq!(into_f64(1.9).unwrap(), 1.9_f64);
assert_eq!(into_f64(-1.9).unwrap(), -1.9_f64);
assert_eq!(into_f64(INFINITY as f32).unwrap(), INFINITY);
assert_eq!(into_f64(-INFINITY as f32).unwrap(), -INFINITY);
}
#[cfg(feature = "num-traits")]
#[test]
fn test_into_isize() {
assert_eq!(into_isize(1).unwrap(), 1_isize);
assert_eq!(into_isize(1.9).unwrap(), 1_isize);
assert_eq!(into_isize(-1.9).unwrap(), -1_isize);
assert_eq!(into_isize(1.5_f64).unwrap(), 1_isize);
assert_eq!(into_isize(-1.5_f64).unwrap(), -1_isize);
match into_isize(INFINITY) {
Err(Fmt(fmt::Error)) => {}
_ => panic!("Should return error of type Err(Fmt(fmt::Error))"),
};
}
#[allow(clippy::needless_borrow)]
#[test]
fn test_join() {
assert_eq!(
join((&["hello", "world"]).iter(), ", ").unwrap(),
"hello, world"
);
assert_eq!(join((&["hello"]).iter(), ", ").unwrap(), "hello");
let empty: &[&str] = &[];
assert_eq!(join(empty.iter(), ", ").unwrap(), "");
let input: Vec<String> = vec!["foo".into(), "bar".into(), "bazz".into()];
assert_eq!(join(input.iter(), ":").unwrap(), "foo:bar:bazz");
let input: &[String] = &["foo".into(), "bar".into()];
assert_eq!(join(input.iter(), ":").unwrap(), "foo:bar");
let real: String = "blah".into();
let input: Vec<&str> = vec![&real];
assert_eq!(join(input.iter(), ";").unwrap(), "blah");
assert_eq!(
join((&&&&&["foo", "bar"]).iter(), ", ").unwrap(),
"foo, bar"
);
}
#[cfg(feature = "num-traits")]
#[test]
#[allow(clippy::float_cmp)]
fn test_abs() {
assert_eq!(abs(1).unwrap(), 1);
assert_eq!(abs(-1).unwrap(), 1);
assert_eq!(abs(1.0).unwrap(), 1.0);
assert_eq!(abs(-1.0).unwrap(), 1.0);
assert_eq!(abs(1.0_f64).unwrap(), 1.0_f64);
assert_eq!(abs(-1.0_f64).unwrap(), 1.0_f64);
}
#[test]
fn test_capitalize() {
assert_eq!(capitalize("foo").unwrap(), "Foo".to_string());
assert_eq!(capitalize("f").unwrap(), "F".to_string());
assert_eq!(capitalize("fO").unwrap(), "Fo".to_string());
assert_eq!(capitalize("").unwrap(), "".to_string());
assert_eq!(capitalize("FoO").unwrap(), "Foo".to_string());
assert_eq!(capitalize("foO BAR").unwrap(), "Foo bar".to_string());
assert_eq!(capitalize("äØÄÅÖ").unwrap(), "Äøäåö".to_string());
assert_eq!(capitalize("ß").unwrap(), "SS".to_string());
assert_eq!(capitalize("ßß").unwrap(), "SSß".to_string());
}
#[test]
fn test_center() {
assert_eq!(center(&"f", 3).unwrap(), " f ".to_string());
assert_eq!(center(&"f", 4).unwrap(), " f ".to_string());
assert_eq!(center(&"foo", 1).unwrap(), "foo".to_string());
assert_eq!(center(&"foo bar", 8).unwrap(), "foo bar ".to_string());
}
#[test]
fn test_wordcount() {
assert_eq!(wordcount("").unwrap(), 0);
assert_eq!(wordcount(" \n\t").unwrap(), 0);
assert_eq!(wordcount("foo").unwrap(), 1);
assert_eq!(wordcount("foo bar").unwrap(), 2);
}
}

View File

@@ -1,34 +0,0 @@
use crate::error::{Error, Result};
use askama_escape::{Escaper, MarkupDisplay};
use serde::Serialize;
/// Serialize to YAML (requires `serde_yaml` feature)
///
/// ## Errors
///
/// This will panic if `S`'s implementation of `Serialize` decides to fail,
/// or if `T` contains a map with non-string keys.
pub fn yaml<E: Escaper, S: Serialize>(e: E, s: S) -> Result<MarkupDisplay<E, String>> {
match serde_yaml::to_string(&s) {
Ok(s) => Ok(MarkupDisplay::new_safe(s, e)),
Err(e) => Err(Error::from(e)),
}
}
#[cfg(test)]
mod tests {
use super::*;
use askama_escape::Html;
#[test]
fn test_yaml() {
assert_eq!(yaml(Html, true).unwrap().to_string(), "true\n");
assert_eq!(yaml(Html, "foo").unwrap().to_string(), "foo\n");
assert_eq!(yaml(Html, true).unwrap().to_string(), "true\n");
assert_eq!(yaml(Html, "foo").unwrap().to_string(), "foo\n");
assert_eq!(
yaml(Html, &vec!["foo", "bar"]).unwrap().to_string(),
"- foo\n- bar\n"
);
}
}

View File

@@ -1,48 +0,0 @@
use std::iter::{Enumerate, Peekable};
pub struct TemplateLoop<I>
where
I: Iterator,
{
iter: Peekable<Enumerate<I>>,
}
impl<I> TemplateLoop<I>
where
I: Iterator,
{
#[inline]
pub fn new(iter: I) -> Self {
TemplateLoop {
iter: iter.enumerate().peekable(),
}
}
}
impl<I> Iterator for TemplateLoop<I>
where
I: Iterator,
{
type Item = (<I as Iterator>::Item, LoopItem);
#[inline]
fn next(&mut self) -> Option<(<I as Iterator>::Item, LoopItem)> {
self.iter.next().map(|(index, item)| {
(
item,
LoopItem {
index,
first: index == 0,
last: self.iter.peek().is_none(),
},
)
})
}
}
#[derive(Copy, Clone)]
pub struct LoopItem {
pub index: usize,
pub first: bool,
pub last: bool,
}

View File

@@ -1,219 +0,0 @@
//! Askama implements a type-safe compiler for Jinja-like templates.
//! It lets you write templates in a Jinja-like syntax,
//! which are linked to a `struct` defining the template context.
//! This is done using a custom derive implementation (implemented
//! in [`askama_derive`](https://crates.io/crates/askama_derive)).
//!
//! For feature highlights and a quick start, please review the
//! [README](https://github.com/djc/askama/blob/main/README.md).
//!
//! The primary documentation for this crate now lives in
//! [the book](https://djc.github.io/askama/).
//!
//! # Creating Askama templates
//!
//! An Askama template is a `struct` definition which provides the template
//! context combined with a UTF-8 encoded text file (or inline source, see
//! below). Askama can be used to generate any kind of text-based format.
//! The template file's extension may be used to provide content type hints.
//!
//! A template consists of **text contents**, which are passed through as-is,
//! **expressions**, which get replaced with content while being rendered, and
//! **tags**, which control the template's logic.
//! The template syntax is very similar to [Jinja](http://jinja.pocoo.org/),
//! as well as Jinja-derivatives like [Twig](http://twig.sensiolabs.org/) or
//! [Tera](https://github.com/Keats/tera).
//!
//! ## The `template()` attribute
//!
//! Askama works by generating one or more trait implementations for any
//! `struct` type decorated with the `#[derive(Template)]` attribute. The
//! code generation process takes some options that can be specified through
//! the `template()` attribute. The following sub-attributes are currently
//! recognized:
//!
//! * `path` (as `path = "foo.html"`): sets the path to the template file. The
//! path is interpreted as relative to the configured template directories
//! (by default, this is a `templates` directory next to your `Cargo.toml`).
//! The file name extension is used to infer an escape mode (see below). In
//! web framework integrations, the path's extension may also be used to
//! infer the content type of the resulting response.
//! Cannot be used together with `source`.
//! * `source` (as `source = "{{ foo }}"`): directly sets the template source.
//! This can be useful for test cases or short templates. The generated path
//! is undefined, which generally makes it impossible to refer to this
//! template from other templates. If `source` is specified, `ext` must also
//! be specified (see below). Cannot be used together with `path`.
//! * `ext` (as `ext = "txt"`): lets you specify the content type as a file
//! extension. This is used to infer an escape mode (see below), and some
//! web framework integrations use it to determine the content type.
//! Cannot be used together with `path`.
//! * `print` (as `print = "code"`): enable debugging by printing nothing
//! (`none`), the parsed syntax tree (`ast`), the generated code (`code`)
//! or `all` for both. The requested data will be printed to stdout at
//! compile time.
//! * `escape` (as `escape = "none"`): override the template's extension used for
//! the purpose of determining the escaper for this template. See the section
//! on configuring custom escapers for more information.
//! * `syntax` (as `syntax = "foo"`): set the syntax name for a parser defined
//! in the configuration file. The default syntax , "default", is the one
//! provided by Askama.
#![forbid(unsafe_code)]
#![deny(elided_lifetimes_in_paths)]
#![deny(unreachable_pub)]
mod error;
pub mod filters;
pub mod helpers;
use std::fmt;
pub use askama_derive::Template;
pub use askama_escape::{Html, MarkupDisplay, Text};
#[doc(hidden)]
pub use crate as shared;
pub use crate::error::{Error, Result};
/// Main `Template` trait; implementations are generally derived
///
/// If you need an object-safe template, use [`DynTemplate`].
pub trait Template: fmt::Display {
/// Helper method which allocates a new `String` and renders into it
fn render(&self) -> Result<String> {
let mut buf = String::with_capacity(Self::SIZE_HINT);
self.render_into(&mut buf)?;
Ok(buf)
}
/// Renders the template to the given `writer` fmt buffer
fn render_into(&self, writer: &mut (impl std::fmt::Write + ?Sized)) -> Result<()>;
/// Renders the template to the given `writer` io buffer
#[inline]
fn write_into(&self, writer: &mut (impl std::io::Write + ?Sized)) -> std::io::Result<()> {
writer.write_fmt(format_args!("{self}"))
}
/// The template's extension, if provided
const EXTENSION: Option<&'static str>;
/// Provides a conservative estimate of the expanded length of the rendered template
const SIZE_HINT: usize;
/// The MIME type (Content-Type) of the data that gets rendered by this Template
const MIME_TYPE: &'static str;
}
/// Object-safe wrapper trait around [`Template`] implementers
///
/// This trades reduced performance (mostly due to writing into `dyn Write`) for object safety.
pub trait DynTemplate {
/// Helper method which allocates a new `String` and renders into it
fn dyn_render(&self) -> Result<String>;
/// Renders the template to the given `writer` fmt buffer
fn dyn_render_into(&self, writer: &mut dyn std::fmt::Write) -> Result<()>;
/// Renders the template to the given `writer` io buffer
fn dyn_write_into(&self, writer: &mut dyn std::io::Write) -> std::io::Result<()>;
/// Helper function to inspect the template's extension
fn extension(&self) -> Option<&'static str>;
/// Provides a conservative estimate of the expanded length of the rendered template
fn size_hint(&self) -> usize;
/// The MIME type (Content-Type) of the data that gets rendered by this Template
fn mime_type(&self) -> &'static str;
}
impl<T: Template> DynTemplate for T {
fn dyn_render(&self) -> Result<String> {
<Self as Template>::render(self)
}
fn dyn_render_into(&self, writer: &mut dyn std::fmt::Write) -> Result<()> {
<Self as Template>::render_into(self, writer)
}
#[inline]
fn dyn_write_into(&self, writer: &mut dyn std::io::Write) -> std::io::Result<()> {
writer.write_fmt(format_args!("{self}"))
}
fn extension(&self) -> Option<&'static str> {
Self::EXTENSION
}
fn size_hint(&self) -> usize {
Self::SIZE_HINT
}
fn mime_type(&self) -> &'static str {
Self::MIME_TYPE
}
}
impl fmt::Display for dyn DynTemplate {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.dyn_render_into(f).map_err(|_| ::std::fmt::Error {})
}
}
#[cfg(test)]
mod tests {
use std::fmt;
use super::*;
use crate::{DynTemplate, Template};
#[test]
fn dyn_template() {
struct Test;
impl Template for Test {
fn render_into(&self, writer: &mut (impl std::fmt::Write + ?Sized)) -> Result<()> {
Ok(writer.write_str("test")?)
}
const EXTENSION: Option<&'static str> = Some("txt");
const SIZE_HINT: usize = 4;
const MIME_TYPE: &'static str = "text/plain; charset=utf-8";
}
impl fmt::Display for Test {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.render_into(f).map_err(|_| fmt::Error {})
}
}
fn render(t: &dyn DynTemplate) -> String {
t.dyn_render().unwrap()
}
let test = &Test as &dyn DynTemplate;
assert_eq!(render(test), "test");
assert_eq!(test.to_string(), "test");
assert_eq!(format!("{test}"), "test");
let mut vec = Vec::new();
test.dyn_write_into(&mut vec).unwrap();
assert_eq!(vec, vec![b't', b'e', b's', b't']);
}
}
/// Old build script helper to rebuild crates if contained templates have changed
///
/// This function is now deprecated and does nothing.
#[deprecated(
since = "0.8.1",
note = "file-level dependency tracking is handled automatically without build script"
)]
pub fn rerun_if_templates_changed() {}

View File

@@ -1 +0,0 @@
{"files":{"Cargo.toml":"f293fbc41371fb46f5b68775b158d8da37c09453dc9356ee8e97fce3d1021b2d","LICENSE-APACHE":"87cb0d734c723c083e51c825930ff42bce28596b52dee15567f6b28f19c195e3","LICENSE-MIT":"df20e0180764bf5bd76f74d47bc9e8c0069a666401629c390003a1d5eba99c92","README.md":"dd3e4e203eeca91219fd57c0ca1f92b413176f406df19568d0fe33d7905123e4","src/config.rs":"de4202804d32cc4da044ed41140ef987056f44116b1bbfac53001e07133e52b9","src/generator.rs":"4fec224dd261bc96a63b831f0692a62d9f8d19566377b39dd69bc0f3de4ab033","src/heritage.rs":"fceb0ac86034b8eb902212f9a78a6fb7d19688c3ccdb117099f15933073bf7bb","src/input.rs":"53afae3f73e2b52d83d73c1b38893677992a5ee04927e8b905198b742b1546ae","src/lib.rs":"003e91569575b72a9587796c82c9f9c0e5e9f3dc8db6b659735cf58f68504b76","src/parser/expr.rs":"3b8178398a293910df161ddd769d2efc7ae8dff03e7313f033149a38a6d81983","src/parser/mod.rs":"3afc065cdc69dc1498ddf9a04a77f56d807ed14653828918d36529a441fb6c48","src/parser/node.rs":"c5437e2525e245b6fcd358696f3607c50ef82cf649a66b6bef7816232c3220fa","src/parser/tests.rs":"81fb02f8cab87c93575fdb6b7d6e9cae6fa3b69173f5f5a76d214f5316ca66ca","templates/a.html":"b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c","templates/b.html":"7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730","templates/sub/b.html":"7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730","templates/sub/c.html":"bf07a7fbb825fc0aae7bf4a1177b2b31fcf8a3feeaf7092761e18c859ee52a9c","templates/sub/sub1/d.html":"86b0c5a1e2b73b08fd54c727f4458649ed9fe3ad1b6e8ac9460c070113509a1e"},"package":"c22fbe0413545c098358e56966ff22cdd039e10215ae213cfbd65032b119fc94"}

View File

@@ -1,72 +0,0 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies.
#
# If you are reading this file be aware that the original Cargo.toml
# will likely look very different (and much more reasonable).
# See Cargo.toml.orig for the original contents.
[package]
edition = "2021"
rust-version = "1.58"
name = "askama_derive"
version = "0.12.1"
description = "Procedural macro package for Askama"
homepage = "https://github.com/djc/askama"
readme = "README.md"
license = "MIT/Apache-2.0"
repository = "https://github.com/djc/askama"
resolver = "1"
[lib]
proc-macro = true
[dependencies.basic-toml]
version = "0.1.1"
optional = true
[dependencies.mime]
version = "0.3"
[dependencies.mime_guess]
version = "2"
[dependencies.nom]
version = "7"
[dependencies.proc-macro2]
version = "1"
[dependencies.quote]
version = "1"
[dependencies.serde]
version = "1.0"
features = ["derive"]
optional = true
[dependencies.syn]
version = "2"
[features]
config = [
"serde",
"basic-toml",
]
humansize = []
markdown = []
num-traits = []
serde-json = []
serde-yaml = []
urlencode = []
with-actix-web = []
with-axum = []
with-gotham = []
with-hyper = []
with-mendes = []
with-rocket = []
with-tide = []
with-warp = []

View File

@@ -1,9 +0,0 @@
# askama_derive: procedural macros for the Askama templating engine
[![Documentation](https://docs.rs/askama_derive/badge.svg)](https://docs.rs/askama_derive/)
[![Latest version](https://img.shields.io/crates/v/askama_derive.svg)](https://crates.io/crates/askama_derive)
[![Build Status](https://github.com/djc/askama/workflows/CI/badge.svg)](https://github.com/djc/askama/actions?query=workflow%3ACI)
[![Chat](https://badges.gitter.im/gitterHQ/gitter.svg)](https://gitter.im/djc/askama)
This crate contains the procedural macros used by the
[Askama](https://github.com/djc/askama) templating engine.

View File

@@ -1,582 +0,0 @@
use std::collections::{BTreeMap, HashSet};
use std::convert::TryFrom;
use std::path::{Path, PathBuf};
use std::{env, fs};
#[cfg(feature = "serde")]
use serde::Deserialize;
use crate::CompileError;
#[derive(Debug)]
pub(crate) struct Config<'a> {
pub(crate) dirs: Vec<PathBuf>,
pub(crate) syntaxes: BTreeMap<String, Syntax<'a>>,
pub(crate) default_syntax: &'a str,
pub(crate) escapers: Vec<(HashSet<String>, String)>,
pub(crate) whitespace: WhitespaceHandling,
}
impl<'a> Config<'a> {
pub(crate) fn new(
s: &'a str,
template_whitespace: Option<&String>,
) -> std::result::Result<Config<'a>, CompileError> {
let root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
let default_dirs = vec![root.join("templates")];
let mut syntaxes = BTreeMap::new();
syntaxes.insert(DEFAULT_SYNTAX_NAME.to_string(), Syntax::default());
let raw = if s.is_empty() {
RawConfig::default()
} else {
RawConfig::from_toml_str(s)?
};
let (dirs, default_syntax, mut whitespace) = match raw.general {
Some(General {
dirs,
default_syntax,
whitespace,
}) => (
dirs.map_or(default_dirs, |v| {
v.into_iter().map(|dir| root.join(dir)).collect()
}),
default_syntax.unwrap_or(DEFAULT_SYNTAX_NAME),
whitespace,
),
None => (
default_dirs,
DEFAULT_SYNTAX_NAME,
WhitespaceHandling::default(),
),
};
if let Some(template_whitespace) = template_whitespace {
whitespace = match template_whitespace.as_str() {
"suppress" => WhitespaceHandling::Suppress,
"minimize" => WhitespaceHandling::Minimize,
"preserve" => WhitespaceHandling::Preserve,
s => return Err(format!("invalid value for `whitespace`: \"{s}\"").into()),
};
}
if let Some(raw_syntaxes) = raw.syntax {
for raw_s in raw_syntaxes {
let name = raw_s.name;
if syntaxes
.insert(name.to_string(), Syntax::try_from(raw_s)?)
.is_some()
{
return Err(format!("syntax \"{name}\" is already defined").into());
}
}
}
if !syntaxes.contains_key(default_syntax) {
return Err(format!("default syntax \"{default_syntax}\" not found").into());
}
let mut escapers = Vec::new();
if let Some(configured) = raw.escaper {
for escaper in configured {
escapers.push((
escaper
.extensions
.iter()
.map(|ext| (*ext).to_string())
.collect(),
escaper.path.to_string(),
));
}
}
for (extensions, path) in DEFAULT_ESCAPERS {
escapers.push((str_set(extensions), (*path).to_string()));
}
Ok(Config {
dirs,
syntaxes,
default_syntax,
escapers,
whitespace,
})
}
pub(crate) fn find_template(
&self,
path: &str,
start_at: Option<&Path>,
) -> std::result::Result<PathBuf, CompileError> {
if let Some(root) = start_at {
let relative = root.with_file_name(path);
if relative.exists() {
return Ok(relative);
}
}
for dir in &self.dirs {
let rooted = dir.join(path);
if rooted.exists() {
return Ok(rooted);
}
}
Err(format!(
"template {:?} not found in directories {:?}",
path, self.dirs
)
.into())
}
}
#[derive(Debug)]
pub(crate) struct Syntax<'a> {
pub(crate) block_start: &'a str,
pub(crate) block_end: &'a str,
pub(crate) expr_start: &'a str,
pub(crate) expr_end: &'a str,
pub(crate) comment_start: &'a str,
pub(crate) comment_end: &'a str,
}
impl Default for Syntax<'static> {
fn default() -> Self {
Self {
block_start: "{%",
block_end: "%}",
expr_start: "{{",
expr_end: "}}",
comment_start: "{#",
comment_end: "#}",
}
}
}
impl<'a> TryFrom<RawSyntax<'a>> for Syntax<'a> {
type Error = CompileError;
fn try_from(raw: RawSyntax<'a>) -> std::result::Result<Self, Self::Error> {
let default = Syntax::default();
let syntax = Self {
block_start: raw.block_start.unwrap_or(default.block_start),
block_end: raw.block_end.unwrap_or(default.block_end),
expr_start: raw.expr_start.unwrap_or(default.expr_start),
expr_end: raw.expr_end.unwrap_or(default.expr_end),
comment_start: raw.comment_start.unwrap_or(default.comment_start),
comment_end: raw.comment_end.unwrap_or(default.comment_end),
};
if syntax.block_start.len() != 2
|| syntax.block_end.len() != 2
|| syntax.expr_start.len() != 2
|| syntax.expr_end.len() != 2
|| syntax.comment_start.len() != 2
|| syntax.comment_end.len() != 2
{
return Err("length of delimiters must be two".into());
}
let bs = syntax.block_start.as_bytes()[0];
let be = syntax.block_start.as_bytes()[1];
let cs = syntax.comment_start.as_bytes()[0];
let ce = syntax.comment_start.as_bytes()[1];
let es = syntax.expr_start.as_bytes()[0];
let ee = syntax.expr_start.as_bytes()[1];
if !((bs == cs && bs == es) || (be == ce && be == ee)) {
return Err(format!("bad delimiters block_start: {}, comment_start: {}, expr_start: {}, needs one of the two characters in common", syntax.block_start, syntax.comment_start, syntax.expr_start).into());
}
Ok(syntax)
}
}
#[cfg_attr(feature = "serde", derive(Deserialize))]
#[derive(Default)]
struct RawConfig<'a> {
#[cfg_attr(feature = "serde", serde(borrow))]
general: Option<General<'a>>,
syntax: Option<Vec<RawSyntax<'a>>>,
escaper: Option<Vec<RawEscaper<'a>>>,
}
impl RawConfig<'_> {
#[cfg(feature = "config")]
fn from_toml_str(s: &str) -> std::result::Result<RawConfig<'_>, CompileError> {
basic_toml::from_str(s)
.map_err(|e| format!("invalid TOML in {CONFIG_FILE_NAME}: {e}").into())
}
#[cfg(not(feature = "config"))]
fn from_toml_str(_: &str) -> std::result::Result<RawConfig<'_>, CompileError> {
Err("TOML support not available".into())
}
}
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
#[cfg_attr(feature = "serde", derive(Deserialize))]
#[cfg_attr(feature = "serde", serde(field_identifier, rename_all = "lowercase"))]
pub(crate) enum WhitespaceHandling {
/// The default behaviour. It will leave the whitespace characters "as is".
Preserve,
/// It'll remove all the whitespace characters before and after the jinja block.
Suppress,
/// It'll remove all the whitespace characters except one before and after the jinja blocks.
/// If there is a newline character, the preserved character in the trimmed characters, it will
/// the one preserved.
Minimize,
}
impl Default for WhitespaceHandling {
fn default() -> Self {
WhitespaceHandling::Preserve
}
}
#[cfg_attr(feature = "serde", derive(Deserialize))]
struct General<'a> {
#[cfg_attr(feature = "serde", serde(borrow))]
dirs: Option<Vec<&'a str>>,
default_syntax: Option<&'a str>,
#[cfg_attr(feature = "serde", serde(default))]
whitespace: WhitespaceHandling,
}
#[cfg_attr(feature = "serde", derive(Deserialize))]
struct RawSyntax<'a> {
name: &'a str,
block_start: Option<&'a str>,
block_end: Option<&'a str>,
expr_start: Option<&'a str>,
expr_end: Option<&'a str>,
comment_start: Option<&'a str>,
comment_end: Option<&'a str>,
}
#[cfg_attr(feature = "serde", derive(Deserialize))]
struct RawEscaper<'a> {
path: &'a str,
extensions: Vec<&'a str>,
}
pub(crate) fn read_config_file(
config_path: Option<&str>,
) -> std::result::Result<String, CompileError> {
let root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
let filename = match config_path {
Some(config_path) => root.join(config_path),
None => root.join(CONFIG_FILE_NAME),
};
if filename.exists() {
fs::read_to_string(&filename)
.map_err(|_| format!("unable to read {:?}", filename.to_str().unwrap()).into())
} else if config_path.is_some() {
Err(format!("`{}` does not exist", root.display()).into())
} else {
Ok("".to_string())
}
}
fn str_set<T>(vals: &[T]) -> HashSet<String>
where
T: ToString,
{
vals.iter().map(|s| s.to_string()).collect()
}
#[allow(clippy::match_wild_err_arm)]
pub(crate) fn get_template_source(tpl_path: &Path) -> std::result::Result<String, CompileError> {
match fs::read_to_string(tpl_path) {
Err(_) => Err(format!(
"unable to open template file '{}'",
tpl_path.to_str().unwrap()
)
.into()),
Ok(mut source) => {
if source.ends_with('\n') {
let _ = source.pop();
}
Ok(source)
}
}
}
static CONFIG_FILE_NAME: &str = "askama.toml";
static DEFAULT_SYNTAX_NAME: &str = "default";
static DEFAULT_ESCAPERS: &[(&[&str], &str)] = &[
(&["html", "htm", "xml"], "::askama::Html"),
(&["md", "none", "txt", "yml", ""], "::askama::Text"),
(&["j2", "jinja", "jinja2"], "::askama::Html"),
];
#[cfg(test)]
mod tests {
use std::env;
use std::path::{Path, PathBuf};
use super::*;
#[test]
fn get_source() {
let path = Config::new("", None)
.and_then(|config| config.find_template("b.html", None))
.unwrap();
assert_eq!(get_template_source(&path).unwrap(), "bar");
}
#[test]
fn test_default_config() {
let mut root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
root.push("templates");
let config = Config::new("", None).unwrap();
assert_eq!(config.dirs, vec![root]);
}
#[cfg(feature = "config")]
#[test]
fn test_config_dirs() {
let mut root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
root.push("tpl");
let config = Config::new("[general]\ndirs = [\"tpl\"]", None).unwrap();
assert_eq!(config.dirs, vec![root]);
}
fn assert_eq_rooted(actual: &Path, expected: &str) {
let mut root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
root.push("templates");
let mut inner = PathBuf::new();
inner.push(expected);
assert_eq!(actual.strip_prefix(root).unwrap(), inner);
}
#[test]
fn find_absolute() {
let config = Config::new("", None).unwrap();
let root = config.find_template("a.html", None).unwrap();
let path = config.find_template("sub/b.html", Some(&root)).unwrap();
assert_eq_rooted(&path, "sub/b.html");
}
#[test]
#[should_panic]
fn find_relative_nonexistent() {
let config = Config::new("", None).unwrap();
let root = config.find_template("a.html", None).unwrap();
config.find_template("c.html", Some(&root)).unwrap();
}
#[test]
fn find_relative() {
let config = Config::new("", None).unwrap();
let root = config.find_template("sub/b.html", None).unwrap();
let path = config.find_template("c.html", Some(&root)).unwrap();
assert_eq_rooted(&path, "sub/c.html");
}
#[test]
fn find_relative_sub() {
let config = Config::new("", None).unwrap();
let root = config.find_template("sub/b.html", None).unwrap();
let path = config.find_template("sub1/d.html", Some(&root)).unwrap();
assert_eq_rooted(&path, "sub/sub1/d.html");
}
#[cfg(feature = "config")]
#[test]
fn add_syntax() {
let raw_config = r#"
[general]
default_syntax = "foo"
[[syntax]]
name = "foo"
block_start = "{<"
[[syntax]]
name = "bar"
expr_start = "{!"
"#;
let default_syntax = Syntax::default();
let config = Config::new(raw_config, None).unwrap();
assert_eq!(config.default_syntax, "foo");
let foo = config.syntaxes.get("foo").unwrap();
assert_eq!(foo.block_start, "{<");
assert_eq!(foo.block_end, default_syntax.block_end);
assert_eq!(foo.expr_start, default_syntax.expr_start);
assert_eq!(foo.expr_end, default_syntax.expr_end);
assert_eq!(foo.comment_start, default_syntax.comment_start);
assert_eq!(foo.comment_end, default_syntax.comment_end);
let bar = config.syntaxes.get("bar").unwrap();
assert_eq!(bar.block_start, default_syntax.block_start);
assert_eq!(bar.block_end, default_syntax.block_end);
assert_eq!(bar.expr_start, "{!");
assert_eq!(bar.expr_end, default_syntax.expr_end);
assert_eq!(bar.comment_start, default_syntax.comment_start);
assert_eq!(bar.comment_end, default_syntax.comment_end);
}
#[cfg(feature = "config")]
#[test]
fn add_syntax_two() {
let raw_config = r#"
syntax = [{ name = "foo", block_start = "{<" },
{ name = "bar", expr_start = "{!" } ]
[general]
default_syntax = "foo"
"#;
let default_syntax = Syntax::default();
let config = Config::new(raw_config, None).unwrap();
assert_eq!(config.default_syntax, "foo");
let foo = config.syntaxes.get("foo").unwrap();
assert_eq!(foo.block_start, "{<");
assert_eq!(foo.block_end, default_syntax.block_end);
assert_eq!(foo.expr_start, default_syntax.expr_start);
assert_eq!(foo.expr_end, default_syntax.expr_end);
assert_eq!(foo.comment_start, default_syntax.comment_start);
assert_eq!(foo.comment_end, default_syntax.comment_end);
let bar = config.syntaxes.get("bar").unwrap();
assert_eq!(bar.block_start, default_syntax.block_start);
assert_eq!(bar.block_end, default_syntax.block_end);
assert_eq!(bar.expr_start, "{!");
assert_eq!(bar.expr_end, default_syntax.expr_end);
assert_eq!(bar.comment_start, default_syntax.comment_start);
assert_eq!(bar.comment_end, default_syntax.comment_end);
}
#[cfg(feature = "toml")]
#[should_panic]
#[test]
fn use_default_at_syntax_name() {
let raw_config = r#"
syntax = [{ name = "default" }]
"#;
let _config = Config::new(raw_config, None).unwrap();
}
#[cfg(feature = "toml")]
#[should_panic]
#[test]
fn duplicated_syntax_name_on_list() {
let raw_config = r#"
syntax = [{ name = "foo", block_start = "~<" },
{ name = "foo", block_start = "%%" } ]
"#;
let _config = Config::new(raw_config, None).unwrap();
}
#[cfg(feature = "toml")]
#[should_panic]
#[test]
fn is_not_exist_default_syntax() {
let raw_config = r#"
[general]
default_syntax = "foo"
"#;
let _config = Config::new(raw_config, None).unwrap();
}
#[cfg(feature = "config")]
#[test]
fn escape_modes() {
let config = Config::new(
r#"
[[escaper]]
path = "::askama::Js"
extensions = ["js"]
"#,
None,
)
.unwrap();
assert_eq!(
config.escapers,
vec![
(str_set(&["js"]), "::askama::Js".into()),
(str_set(&["html", "htm", "xml"]), "::askama::Html".into()),
(
str_set(&["md", "none", "txt", "yml", ""]),
"::askama::Text".into()
),
(str_set(&["j2", "jinja", "jinja2"]), "::askama::Html".into()),
]
);
}
#[cfg(feature = "config")]
#[test]
fn test_whitespace_parsing() {
let config = Config::new(
r#"
[general]
whitespace = "suppress"
"#,
None,
)
.unwrap();
assert_eq!(config.whitespace, WhitespaceHandling::Suppress);
let config = Config::new(r#""#, None).unwrap();
assert_eq!(config.whitespace, WhitespaceHandling::Preserve);
let config = Config::new(
r#"
[general]
whitespace = "preserve"
"#,
None,
)
.unwrap();
assert_eq!(config.whitespace, WhitespaceHandling::Preserve);
let config = Config::new(
r#"
[general]
whitespace = "minimize"
"#,
None,
)
.unwrap();
assert_eq!(config.whitespace, WhitespaceHandling::Minimize);
}
#[cfg(feature = "toml")]
#[test]
fn test_whitespace_in_template() {
// Checking that template arguments have precedence over general configuration.
// So in here, in the template arguments, there is `whitespace = "minimize"` so
// the `WhitespaceHandling` should be `Minimize` as well.
let config = Config::new(
r#"
[general]
whitespace = "suppress"
"#,
Some(&"minimize".to_owned()),
)
.unwrap();
assert_eq!(config.whitespace, WhitespaceHandling::Minimize);
let config = Config::new(r#""#, Some(&"minimize".to_owned())).unwrap();
assert_eq!(config.whitespace, WhitespaceHandling::Minimize);
}
#[test]
fn test_config_whitespace_error() {
let config = Config::new(r#""#, Some(&"trim".to_owned()));
if let Err(err) = config {
assert_eq!(err.msg, "invalid value for `whitespace`: \"trim\"");
} else {
panic!("Config::new should have return an error");
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,126 +0,0 @@
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use crate::config::Config;
use crate::parser::{Loop, Macro, Node};
use crate::CompileError;
pub(crate) struct Heritage<'a> {
pub(crate) root: &'a Context<'a>,
pub(crate) blocks: BlockAncestry<'a>,
}
impl Heritage<'_> {
pub(crate) fn new<'n>(
mut ctx: &'n Context<'n>,
contexts: &'n HashMap<&'n Path, Context<'n>>,
) -> Heritage<'n> {
let mut blocks: BlockAncestry<'n> = ctx
.blocks
.iter()
.map(|(name, def)| (*name, vec![(ctx, *def)]))
.collect();
while let Some(ref path) = ctx.extends {
ctx = &contexts[path.as_path()];
for (name, def) in &ctx.blocks {
blocks.entry(name).or_insert_with(Vec::new).push((ctx, def));
}
}
Heritage { root: ctx, blocks }
}
}
type BlockAncestry<'a> = HashMap<&'a str, Vec<(&'a Context<'a>, &'a Node<'a>)>>;
pub(crate) struct Context<'a> {
pub(crate) nodes: &'a [Node<'a>],
pub(crate) extends: Option<PathBuf>,
pub(crate) blocks: HashMap<&'a str, &'a Node<'a>>,
pub(crate) macros: HashMap<&'a str, &'a Macro<'a>>,
pub(crate) imports: HashMap<&'a str, PathBuf>,
}
impl Context<'_> {
pub(crate) fn new<'n>(
config: &Config<'_>,
path: &Path,
nodes: &'n [Node<'n>],
) -> Result<Context<'n>, CompileError> {
let mut extends = None;
let mut blocks = Vec::new();
let mut macros = HashMap::new();
let mut imports = HashMap::new();
let mut nested = vec![nodes];
let mut top = true;
while let Some(nodes) = nested.pop() {
for n in nodes {
match n {
Node::Extends(extends_path) if top => match extends {
Some(_) => return Err("multiple extend blocks found".into()),
None => {
extends = Some(config.find_template(extends_path, Some(path))?);
}
},
Node::Macro(name, m) if top => {
macros.insert(*name, m);
}
Node::Import(_, import_path, scope) if top => {
let path = config.find_template(import_path, Some(path))?;
imports.insert(*scope, path);
}
Node::Extends(_) | Node::Macro(_, _) | Node::Import(_, _, _) if !top => {
return Err(
"extends, macro or import blocks not allowed below top level".into(),
);
}
def @ Node::BlockDef(_, _, _, _) => {
blocks.push(def);
if let Node::BlockDef(_, _, nodes, _) = def {
nested.push(nodes);
}
}
Node::Cond(branches, _) => {
for (_, _, nodes) in branches {
nested.push(nodes);
}
}
Node::Loop(Loop {
body, else_block, ..
}) => {
nested.push(body);
nested.push(else_block);
}
Node::Match(_, _, arms, _) => {
for (_, _, arm) in arms {
nested.push(arm);
}
}
_ => {}
}
}
top = false;
}
let blocks: HashMap<_, _> = blocks
.iter()
.map(|def| {
if let Node::BlockDef(_, name, _, _) = def {
(*name, *def)
} else {
unreachable!()
}
})
.collect();
Ok(Context {
nodes,
extends,
blocks,
macros,
imports,
})
}
}

View File

@@ -1,231 +0,0 @@
use crate::config::{Config, Syntax};
use crate::generator::TemplateArgs;
use crate::CompileError;
use std::path::{Path, PathBuf};
use std::str::FromStr;
use mime::Mime;
pub(crate) struct TemplateInput<'a> {
pub(crate) ast: &'a syn::DeriveInput,
pub(crate) config: &'a Config<'a>,
pub(crate) syntax: &'a Syntax<'a>,
pub(crate) source: Source,
pub(crate) print: Print,
pub(crate) escaper: &'a str,
pub(crate) ext: Option<String>,
pub(crate) mime_type: String,
pub(crate) path: PathBuf,
}
impl TemplateInput<'_> {
/// Extract the template metadata from the `DeriveInput` structure. This
/// mostly recovers the data for the `TemplateInput` fields from the
/// `template()` attribute list fields.
pub(crate) fn new<'n>(
ast: &'n syn::DeriveInput,
config: &'n Config<'_>,
args: TemplateArgs,
) -> Result<TemplateInput<'n>, CompileError> {
let TemplateArgs {
source,
print,
escaping,
ext,
syntax,
..
} = args;
// Validate the `source` and `ext` value together, since they are
// related. In case `source` was used instead of `path`, the value
// of `ext` is merged into a synthetic `path` value here.
let source = source.expect("template path or source not found in attributes");
let path = match (&source, &ext) {
(Source::Path(path), _) => config.find_template(path, None)?,
(&Source::Source(_), Some(ext)) => PathBuf::from(format!("{}.{}", ast.ident, ext)),
(&Source::Source(_), None) => {
return Err("must include 'ext' attribute when using 'source' attribute".into())
}
};
// Validate syntax
let syntax = syntax.map_or_else(
|| Ok(config.syntaxes.get(config.default_syntax).unwrap()),
|s| {
config
.syntaxes
.get(&s)
.ok_or_else(|| CompileError::from(format!("attribute syntax {s} not exist")))
},
)?;
// Match extension against defined output formats
let escaping = escaping.unwrap_or_else(|| {
path.extension()
.map(|s| s.to_str().unwrap())
.unwrap_or("")
.to_string()
});
let mut escaper = None;
for (extensions, path) in &config.escapers {
if extensions.contains(&escaping) {
escaper = Some(path);
break;
}
}
let escaper = escaper.ok_or_else(|| {
CompileError::from(format!("no escaper defined for extension '{escaping}'"))
})?;
let mime_type =
extension_to_mime_type(ext_default_to_path(ext.as_deref(), &path).unwrap_or("txt"))
.to_string();
Ok(TemplateInput {
ast,
config,
syntax,
source,
print,
escaper,
ext,
mime_type,
path,
})
}
#[inline]
pub(crate) fn extension(&self) -> Option<&str> {
ext_default_to_path(self.ext.as_deref(), &self.path)
}
}
#[inline]
fn ext_default_to_path<'a>(ext: Option<&'a str>, path: &'a Path) -> Option<&'a str> {
ext.or_else(|| extension(path))
}
fn extension(path: &Path) -> Option<&str> {
let ext = path.extension().map(|s| s.to_str().unwrap())?;
const JINJA_EXTENSIONS: [&str; 3] = ["j2", "jinja", "jinja2"];
if JINJA_EXTENSIONS.contains(&ext) {
Path::new(path.file_stem().unwrap())
.extension()
.map(|s| s.to_str().unwrap())
.or(Some(ext))
} else {
Some(ext)
}
}
pub(crate) enum Source {
Path(String),
Source(String),
}
#[derive(PartialEq)]
pub(crate) enum Print {
All,
Ast,
Code,
None,
}
impl FromStr for Print {
type Err = CompileError;
fn from_str(s: &str) -> Result<Print, Self::Err> {
use self::Print::*;
Ok(match s {
"all" => All,
"ast" => Ast,
"code" => Code,
"none" => None,
v => return Err(format!("invalid value for print option: {v}",).into()),
})
}
}
impl Default for Print {
fn default() -> Self {
Self::None
}
}
pub(crate) fn extension_to_mime_type(ext: &str) -> Mime {
let basic_type = mime_guess::from_ext(ext).first_or_octet_stream();
for (simple, utf_8) in &TEXT_TYPES {
if &basic_type == simple {
return utf_8.clone();
}
}
basic_type
}
const TEXT_TYPES: [(Mime, Mime); 6] = [
(mime::TEXT_PLAIN, mime::TEXT_PLAIN_UTF_8),
(mime::TEXT_HTML, mime::TEXT_HTML_UTF_8),
(mime::TEXT_CSS, mime::TEXT_CSS_UTF_8),
(mime::TEXT_CSV, mime::TEXT_CSV_UTF_8),
(
mime::TEXT_TAB_SEPARATED_VALUES,
mime::TEXT_TAB_SEPARATED_VALUES_UTF_8,
),
(
mime::APPLICATION_JAVASCRIPT,
mime::APPLICATION_JAVASCRIPT_UTF_8,
),
];
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_ext() {
assert_eq!(extension(Path::new("foo-bar.txt")), Some("txt"));
assert_eq!(extension(Path::new("foo-bar.html")), Some("html"));
assert_eq!(extension(Path::new("foo-bar.unknown")), Some("unknown"));
assert_eq!(extension(Path::new("foo/bar/baz.txt")), Some("txt"));
assert_eq!(extension(Path::new("foo/bar/baz.html")), Some("html"));
assert_eq!(extension(Path::new("foo/bar/baz.unknown")), Some("unknown"));
}
#[test]
fn test_double_ext() {
assert_eq!(extension(Path::new("foo-bar.html.txt")), Some("txt"));
assert_eq!(extension(Path::new("foo-bar.txt.html")), Some("html"));
assert_eq!(extension(Path::new("foo-bar.txt.unknown")), Some("unknown"));
assert_eq!(extension(Path::new("foo/bar/baz.html.txt")), Some("txt"));
assert_eq!(extension(Path::new("foo/bar/baz.txt.html")), Some("html"));
assert_eq!(
extension(Path::new("foo/bar/baz.txt.unknown")),
Some("unknown")
);
}
#[test]
fn test_skip_jinja_ext() {
assert_eq!(extension(Path::new("foo-bar.html.j2")), Some("html"));
assert_eq!(extension(Path::new("foo-bar.html.jinja")), Some("html"));
assert_eq!(extension(Path::new("foo-bar.html.jinja2")), Some("html"));
assert_eq!(extension(Path::new("foo/bar/baz.txt.j2")), Some("txt"));
assert_eq!(extension(Path::new("foo/bar/baz.txt.jinja")), Some("txt"));
assert_eq!(extension(Path::new("foo/bar/baz.txt.jinja2")), Some("txt"));
}
#[test]
fn test_only_jinja_ext() {
assert_eq!(extension(Path::new("foo-bar.j2")), Some("j2"));
assert_eq!(extension(Path::new("foo-bar.jinja")), Some("jinja"));
assert_eq!(extension(Path::new("foo-bar.jinja2")), Some("jinja2"));
}
}

View File

@@ -1,100 +0,0 @@
#![forbid(unsafe_code)]
#![deny(elided_lifetimes_in_paths)]
#![deny(unreachable_pub)]
use std::borrow::Cow;
use std::fmt;
use proc_macro::TokenStream;
use proc_macro2::Span;
mod config;
mod generator;
mod heritage;
mod input;
mod parser;
#[proc_macro_derive(Template, attributes(template))]
pub fn derive_template(input: TokenStream) -> TokenStream {
generator::derive_template(input)
}
#[derive(Debug, Clone)]
struct CompileError {
msg: Cow<'static, str>,
span: Span,
}
impl CompileError {
fn new<S: Into<Cow<'static, str>>>(s: S, span: Span) -> Self {
Self {
msg: s.into(),
span,
}
}
fn into_compile_error(self) -> TokenStream {
syn::Error::new(self.span, self.msg)
.to_compile_error()
.into()
}
}
impl std::error::Error for CompileError {}
impl fmt::Display for CompileError {
#[inline]
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt.write_str(&self.msg)
}
}
impl From<&'static str> for CompileError {
#[inline]
fn from(s: &'static str) -> Self {
Self::new(s, Span::call_site())
}
}
impl From<String> for CompileError {
#[inline]
fn from(s: String) -> Self {
Self::new(s, Span::call_site())
}
}
// This is used by the code generator to decide whether a named filter is part of
// Askama or should refer to a local `filters` module. It should contain all the
// filters shipped with Askama, even the optional ones (since optional inclusion
// in the const vector based on features seems impossible right now).
const BUILT_IN_FILTERS: &[&str] = &[
"abs",
"capitalize",
"center",
"e",
"escape",
"filesizeformat",
"fmt",
"format",
"indent",
"into_f64",
"into_isize",
"join",
"linebreaks",
"linebreaksbr",
"paragraphbreaks",
"lower",
"lowercase",
"safe",
"trim",
"truncate",
"upper",
"uppercase",
"urlencode",
"urlencode_strict",
"wordcount",
// optional features, reserve the names anyway:
"json",
"markdown",
"yaml",
];

View File

@@ -1,346 +0,0 @@
use std::str;
use nom::branch::alt;
use nom::bytes::complete::{tag, take_till};
use nom::character::complete::char;
use nom::combinator::{cut, map, not, opt, peek, recognize};
use nom::multi::{fold_many0, many0, separated_list0, separated_list1};
use nom::sequence::{delimited, pair, preceded, terminated, tuple};
use nom::IResult;
use super::{
bool_lit, char_lit, identifier, nested_parenthesis, not_ws, num_lit, path, str_lit, ws,
};
#[derive(Debug, PartialEq)]
pub(crate) enum Expr<'a> {
BoolLit(&'a str),
NumLit(&'a str),
StrLit(&'a str),
CharLit(&'a str),
Var(&'a str),
Path(Vec<&'a str>),
Array(Vec<Expr<'a>>),
Attr(Box<Expr<'a>>, &'a str),
Index(Box<Expr<'a>>, Box<Expr<'a>>),
Filter(&'a str, Vec<Expr<'a>>),
Unary(&'a str, Box<Expr<'a>>),
BinOp(&'a str, Box<Expr<'a>>, Box<Expr<'a>>),
Range(&'a str, Option<Box<Expr<'a>>>, Option<Box<Expr<'a>>>),
Group(Box<Expr<'a>>),
Tuple(Vec<Expr<'a>>),
Call(Box<Expr<'a>>, Vec<Expr<'a>>),
RustMacro(&'a str, &'a str),
Try(Box<Expr<'a>>),
}
impl Expr<'_> {
pub(super) fn parse(i: &str) -> IResult<&str, Expr<'_>> {
expr_any(i)
}
pub(super) fn parse_arguments(i: &str) -> IResult<&str, Vec<Expr<'_>>> {
arguments(i)
}
/// Returns `true` if enough assumptions can be made,
/// to determine that `self` is copyable.
pub(crate) fn is_copyable(&self) -> bool {
self.is_copyable_within_op(false)
}
fn is_copyable_within_op(&self, within_op: bool) -> bool {
use Expr::*;
match self {
BoolLit(_) | NumLit(_) | StrLit(_) | CharLit(_) => true,
Unary(.., expr) => expr.is_copyable_within_op(true),
BinOp(_, lhs, rhs) => {
lhs.is_copyable_within_op(true) && rhs.is_copyable_within_op(true)
}
Range(..) => true,
// The result of a call likely doesn't need to be borrowed,
// as in that case the call is more likely to return a
// reference in the first place then.
Call(..) | Path(..) => true,
// If the `expr` is within a `Unary` or `BinOp` then
// an assumption can be made that the operand is copy.
// If not, then the value is moved and adding `.clone()`
// will solve that issue. However, if the operand is
// implicitly borrowed, then it's likely not even possible
// to get the template to compile.
_ => within_op && self.is_attr_self(),
}
}
/// Returns `true` if this is an `Attr` where the `obj` is `"self"`.
pub(crate) fn is_attr_self(&self) -> bool {
match self {
Expr::Attr(obj, _) if matches!(obj.as_ref(), Expr::Var("self")) => true,
Expr::Attr(obj, _) if matches!(obj.as_ref(), Expr::Attr(..)) => obj.is_attr_self(),
_ => false,
}
}
/// Returns `true` if the outcome of this expression may be used multiple times in the same
/// `write!()` call, without evaluating the expression again, i.e. the expression should be
/// side-effect free.
pub(crate) fn is_cacheable(&self) -> bool {
match self {
// Literals are the definition of pure:
Expr::BoolLit(_) => true,
Expr::NumLit(_) => true,
Expr::StrLit(_) => true,
Expr::CharLit(_) => true,
// fmt::Display should have no effects:
Expr::Var(_) => true,
Expr::Path(_) => true,
// Check recursively:
Expr::Array(args) => args.iter().all(|arg| arg.is_cacheable()),
Expr::Attr(lhs, _) => lhs.is_cacheable(),
Expr::Index(lhs, rhs) => lhs.is_cacheable() && rhs.is_cacheable(),
Expr::Filter(_, args) => args.iter().all(|arg| arg.is_cacheable()),
Expr::Unary(_, arg) => arg.is_cacheable(),
Expr::BinOp(_, lhs, rhs) => lhs.is_cacheable() && rhs.is_cacheable(),
Expr::Range(_, lhs, rhs) => {
lhs.as_ref().map_or(true, |v| v.is_cacheable())
&& rhs.as_ref().map_or(true, |v| v.is_cacheable())
}
Expr::Group(arg) => arg.is_cacheable(),
Expr::Tuple(args) => args.iter().all(|arg| arg.is_cacheable()),
// We have too little information to tell if the expression is pure:
Expr::Call(_, _) => false,
Expr::RustMacro(_, _) => false,
Expr::Try(_) => false,
}
}
}
fn expr_bool_lit(i: &str) -> IResult<&str, Expr<'_>> {
map(bool_lit, Expr::BoolLit)(i)
}
fn expr_num_lit(i: &str) -> IResult<&str, Expr<'_>> {
map(num_lit, Expr::NumLit)(i)
}
fn expr_array_lit(i: &str) -> IResult<&str, Expr<'_>> {
delimited(
ws(char('[')),
map(separated_list1(ws(char(',')), expr_any), Expr::Array),
ws(char(']')),
)(i)
}
fn expr_str_lit(i: &str) -> IResult<&str, Expr<'_>> {
map(str_lit, Expr::StrLit)(i)
}
fn expr_char_lit(i: &str) -> IResult<&str, Expr<'_>> {
map(char_lit, Expr::CharLit)(i)
}
fn expr_var(i: &str) -> IResult<&str, Expr<'_>> {
map(identifier, Expr::Var)(i)
}
fn expr_path(i: &str) -> IResult<&str, Expr<'_>> {
let (i, path) = path(i)?;
Ok((i, Expr::Path(path)))
}
fn expr_group(i: &str) -> IResult<&str, Expr<'_>> {
let (i, expr) = preceded(ws(char('(')), opt(expr_any))(i)?;
let expr = match expr {
Some(expr) => expr,
None => {
let (i, _) = char(')')(i)?;
return Ok((i, Expr::Tuple(vec![])));
}
};
let (i, comma) = ws(opt(peek(char(','))))(i)?;
if comma.is_none() {
let (i, _) = char(')')(i)?;
return Ok((i, Expr::Group(Box::new(expr))));
}
let mut exprs = vec![expr];
let (i, _) = fold_many0(
preceded(char(','), ws(expr_any)),
|| (),
|_, expr| {
exprs.push(expr);
},
)(i)?;
let (i, _) = pair(ws(opt(char(','))), char(')'))(i)?;
Ok((i, Expr::Tuple(exprs)))
}
fn expr_single(i: &str) -> IResult<&str, Expr<'_>> {
alt((
expr_bool_lit,
expr_num_lit,
expr_str_lit,
expr_char_lit,
expr_path,
expr_rust_macro,
expr_array_lit,
expr_var,
expr_group,
))(i)
}
enum Suffix<'a> {
Attr(&'a str),
Index(Expr<'a>),
Call(Vec<Expr<'a>>),
Try,
}
fn expr_attr(i: &str) -> IResult<&str, Suffix<'_>> {
map(
preceded(
ws(pair(char('.'), not(char('.')))),
cut(alt((num_lit, identifier))),
),
Suffix::Attr,
)(i)
}
fn expr_index(i: &str) -> IResult<&str, Suffix<'_>> {
map(
preceded(ws(char('[')), cut(terminated(expr_any, ws(char(']'))))),
Suffix::Index,
)(i)
}
fn expr_call(i: &str) -> IResult<&str, Suffix<'_>> {
map(arguments, Suffix::Call)(i)
}
fn expr_try(i: &str) -> IResult<&str, Suffix<'_>> {
map(preceded(take_till(not_ws), char('?')), |_| Suffix::Try)(i)
}
fn filter(i: &str) -> IResult<&str, (&str, Option<Vec<Expr<'_>>>)> {
let (i, (_, fname, args)) = tuple((char('|'), ws(identifier), opt(arguments)))(i)?;
Ok((i, (fname, args)))
}
fn expr_filtered(i: &str) -> IResult<&str, Expr<'_>> {
let (i, (obj, filters)) = tuple((expr_prefix, many0(filter)))(i)?;
let mut res = obj;
for (fname, args) in filters {
res = Expr::Filter(fname, {
let mut args = match args {
Some(inner) => inner,
None => Vec::new(),
};
args.insert(0, res);
args
});
}
Ok((i, res))
}
fn expr_prefix(i: &str) -> IResult<&str, Expr<'_>> {
let (i, (ops, mut expr)) = pair(many0(ws(alt((tag("!"), tag("-"))))), expr_suffix)(i)?;
for op in ops.iter().rev() {
expr = Expr::Unary(op, Box::new(expr));
}
Ok((i, expr))
}
fn expr_suffix(i: &str) -> IResult<&str, Expr<'_>> {
let (mut i, mut expr) = expr_single(i)?;
loop {
let (j, suffix) = opt(alt((expr_attr, expr_index, expr_call, expr_try)))(i)?;
i = j;
match suffix {
Some(Suffix::Attr(attr)) => expr = Expr::Attr(expr.into(), attr),
Some(Suffix::Index(index)) => expr = Expr::Index(expr.into(), index.into()),
Some(Suffix::Call(args)) => expr = Expr::Call(expr.into(), args),
Some(Suffix::Try) => expr = Expr::Try(expr.into()),
None => break,
}
}
Ok((i, expr))
}
fn macro_arguments(i: &str) -> IResult<&str, &str> {
delimited(char('('), recognize(nested_parenthesis), char(')'))(i)
}
fn expr_rust_macro(i: &str) -> IResult<&str, Expr<'_>> {
let (i, (mname, _, args)) = tuple((identifier, char('!'), macro_arguments))(i)?;
Ok((i, Expr::RustMacro(mname, args)))
}
macro_rules! expr_prec_layer {
( $name:ident, $inner:ident, $op:expr ) => {
fn $name(i: &str) -> IResult<&str, Expr<'_>> {
let (i, left) = $inner(i)?;
let (i, right) = many0(pair(
ws(tag($op)),
$inner,
))(i)?;
Ok((
i,
right.into_iter().fold(left, |left, (op, right)| {
Expr::BinOp(op, Box::new(left), Box::new(right))
}),
))
}
};
( $name:ident, $inner:ident, $( $op:expr ),+ ) => {
fn $name(i: &str) -> IResult<&str, Expr<'_>> {
let (i, left) = $inner(i)?;
let (i, right) = many0(pair(
ws(alt(($( tag($op) ),+,))),
$inner,
))(i)?;
Ok((
i,
right.into_iter().fold(left, |left, (op, right)| {
Expr::BinOp(op, Box::new(left), Box::new(right))
}),
))
}
}
}
expr_prec_layer!(expr_muldivmod, expr_filtered, "*", "/", "%");
expr_prec_layer!(expr_addsub, expr_muldivmod, "+", "-");
expr_prec_layer!(expr_shifts, expr_addsub, ">>", "<<");
expr_prec_layer!(expr_band, expr_shifts, "&");
expr_prec_layer!(expr_bxor, expr_band, "^");
expr_prec_layer!(expr_bor, expr_bxor, "|");
expr_prec_layer!(expr_compare, expr_bor, "==", "!=", ">=", ">", "<=", "<");
expr_prec_layer!(expr_and, expr_compare, "&&");
expr_prec_layer!(expr_or, expr_and, "||");
fn expr_any(i: &str) -> IResult<&str, Expr<'_>> {
let range_right = |i| pair(ws(alt((tag("..="), tag("..")))), opt(expr_or))(i);
alt((
map(range_right, |(op, right)| {
Expr::Range(op, None, right.map(Box::new))
}),
map(
pair(expr_or, opt(range_right)),
|(left, right)| match right {
Some((op, right)) => Expr::Range(op, Some(Box::new(left)), right.map(Box::new)),
None => left,
},
),
))(i)
}
fn arguments(i: &str) -> IResult<&str, Vec<Expr<'_>>> {
delimited(
ws(char('(')),
separated_list0(char(','), ws(expr_any)),
ws(char(')')),
)(i)
}

View File

@@ -1,317 +0,0 @@
use std::cell::Cell;
use std::str;
use nom::branch::alt;
use nom::bytes::complete::{escaped, is_not, tag, take_till};
use nom::character::complete::char;
use nom::character::complete::{anychar, digit1};
use nom::combinator::{eof, map, not, opt, recognize, value};
use nom::error::ErrorKind;
use nom::multi::separated_list1;
use nom::sequence::{delimited, pair, tuple};
use nom::{error_position, AsChar, IResult, InputTakeAtPosition};
pub(crate) use self::expr::Expr;
pub(crate) use self::node::{Cond, CondTest, Loop, Macro, Node, Target, When, Whitespace, Ws};
use crate::config::Syntax;
use crate::CompileError;
mod expr;
mod node;
#[cfg(test)]
mod tests;
struct State<'a> {
syntax: &'a Syntax<'a>,
loop_depth: Cell<usize>,
}
impl<'a> State<'a> {
fn new(syntax: &'a Syntax<'a>) -> State<'a> {
State {
syntax,
loop_depth: Cell::new(0),
}
}
fn enter_loop(&self) {
self.loop_depth.set(self.loop_depth.get() + 1);
}
fn leave_loop(&self) {
self.loop_depth.set(self.loop_depth.get() - 1);
}
fn is_in_loop(&self) -> bool {
self.loop_depth.get() > 0
}
}
impl From<char> for Whitespace {
fn from(c: char) -> Self {
match c {
'+' => Self::Preserve,
'-' => Self::Suppress,
'~' => Self::Minimize,
_ => panic!("unsupported `Whitespace` conversion"),
}
}
}
pub(crate) fn parse<'a>(
src: &'a str,
syntax: &'a Syntax<'_>,
) -> Result<Vec<Node<'a>>, CompileError> {
match Node::parse(src, &State::new(syntax)) {
Ok((left, res)) => {
if !left.is_empty() {
Err(format!("unable to parse template:\n\n{left:?}").into())
} else {
Ok(res)
}
}
Err(nom::Err::Error(err)) | Err(nom::Err::Failure(err)) => {
let nom::error::Error { input, .. } = err;
let offset = src.len() - input.len();
let (source_before, source_after) = src.split_at(offset);
let source_after = match source_after.char_indices().enumerate().take(41).last() {
Some((40, (i, _))) => format!("{:?}...", &source_after[..i]),
_ => format!("{source_after:?}"),
};
let (row, last_line) = source_before.lines().enumerate().last().unwrap();
let column = last_line.chars().count();
let msg = format!(
"problems parsing template source at row {}, column {} near:\n{}",
row + 1,
column,
source_after,
);
Err(msg.into())
}
Err(nom::Err::Incomplete(_)) => Err("parsing incomplete".into()),
}
}
fn is_ws(c: char) -> bool {
matches!(c, ' ' | '\t' | '\r' | '\n')
}
fn not_ws(c: char) -> bool {
!is_ws(c)
}
fn ws<'a, O>(
inner: impl FnMut(&'a str) -> IResult<&'a str, O>,
) -> impl FnMut(&'a str) -> IResult<&'a str, O> {
delimited(take_till(not_ws), inner, take_till(not_ws))
}
fn split_ws_parts(s: &str) -> Node<'_> {
let trimmed_start = s.trim_start_matches(is_ws);
let len_start = s.len() - trimmed_start.len();
let trimmed = trimmed_start.trim_end_matches(is_ws);
Node::Lit(&s[..len_start], trimmed, &trimmed_start[trimmed.len()..])
}
/// Skips input until `end` was found, but does not consume it.
/// Returns tuple that would be returned when parsing `end`.
fn skip_till<'a, O>(
end: impl FnMut(&'a str) -> IResult<&'a str, O>,
) -> impl FnMut(&'a str) -> IResult<&'a str, (&'a str, O)> {
enum Next<O> {
IsEnd(O),
NotEnd(char),
}
let mut next = alt((map(end, Next::IsEnd), map(anychar, Next::NotEnd)));
move |start: &'a str| {
let mut i = start;
loop {
let (j, is_end) = next(i)?;
match is_end {
Next::IsEnd(lookahead) => return Ok((i, (j, lookahead))),
Next::NotEnd(_) => i = j,
}
}
}
}
fn keyword<'a>(k: &'a str) -> impl FnMut(&'a str) -> IResult<&'a str, &'a str> {
move |i: &'a str| -> IResult<&'a str, &'a str> {
let (j, v) = identifier(i)?;
if k == v {
Ok((j, v))
} else {
Err(nom::Err::Error(error_position!(i, ErrorKind::Tag)))
}
}
}
fn identifier(input: &str) -> IResult<&str, &str> {
recognize(pair(identifier_start, opt(identifier_tail)))(input)
}
fn identifier_start(s: &str) -> IResult<&str, &str> {
s.split_at_position1_complete(
|c| !(c.is_alpha() || c == '_' || c >= '\u{0080}'),
nom::error::ErrorKind::Alpha,
)
}
fn identifier_tail(s: &str) -> IResult<&str, &str> {
s.split_at_position1_complete(
|c| !(c.is_alphanum() || c == '_' || c >= '\u{0080}'),
nom::error::ErrorKind::Alpha,
)
}
fn bool_lit(i: &str) -> IResult<&str, &str> {
alt((keyword("false"), keyword("true")))(i)
}
fn num_lit(i: &str) -> IResult<&str, &str> {
recognize(pair(digit1, opt(pair(char('.'), digit1))))(i)
}
fn str_lit(i: &str) -> IResult<&str, &str> {
let (i, s) = delimited(
char('"'),
opt(escaped(is_not("\\\""), '\\', anychar)),
char('"'),
)(i)?;
Ok((i, s.unwrap_or_default()))
}
fn char_lit(i: &str) -> IResult<&str, &str> {
let (i, s) = delimited(
char('\''),
opt(escaped(is_not("\\\'"), '\\', anychar)),
char('\''),
)(i)?;
Ok((i, s.unwrap_or_default()))
}
fn nested_parenthesis(i: &str) -> IResult<&str, ()> {
let mut nested = 0;
let mut last = 0;
let mut in_str = false;
let mut escaped = false;
for (i, b) in i.chars().enumerate() {
if !(b == '(' || b == ')') || !in_str {
match b {
'(' => nested += 1,
')' => {
if nested == 0 {
last = i;
break;
}
nested -= 1;
}
'"' => {
if in_str {
if !escaped {
in_str = false;
}
} else {
in_str = true;
}
}
'\\' => {
escaped = !escaped;
}
_ => (),
}
}
if escaped && b != '\\' {
escaped = false;
}
}
if nested == 0 {
Ok((&i[last..], ()))
} else {
Err(nom::Err::Error(error_position!(
i,
ErrorKind::SeparatedNonEmptyList
)))
}
}
fn path(i: &str) -> IResult<&str, Vec<&str>> {
let root = opt(value("", ws(tag("::"))));
let tail = separated_list1(ws(tag("::")), identifier);
match tuple((root, identifier, ws(tag("::")), tail))(i) {
Ok((i, (root, start, _, rest))) => {
let mut path = Vec::new();
path.extend(root);
path.push(start);
path.extend(rest);
Ok((i, path))
}
Err(err) => {
if let Ok((i, name)) = identifier(i) {
// The returned identifier can be assumed to be path if:
// - Contains both a lowercase and uppercase character, i.e. a type name like `None`
// - Doesn't contain any lowercase characters, i.e. it's a constant
// In short, if it contains any uppercase characters it's a path.
if name.contains(char::is_uppercase) {
return Ok((i, vec![name]));
}
}
// If `identifier()` fails then just return the original error
Err(err)
}
}
}
fn take_content<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
let p_start = alt((
tag(s.syntax.block_start),
tag(s.syntax.comment_start),
tag(s.syntax.expr_start),
));
let (i, _) = not(eof)(i)?;
let (i, content) = opt(recognize(skip_till(p_start)))(i)?;
let (i, content) = match content {
Some("") => {
// {block,comment,expr}_start follows immediately.
return Err(nom::Err::Error(error_position!(i, ErrorKind::TakeUntil)));
}
Some(content) => (i, content),
None => ("", i), // there is no {block,comment,expr}_start: take everything
};
Ok((i, split_ws_parts(content)))
}
fn tag_block_start<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> {
tag(s.syntax.block_start)(i)
}
fn tag_block_end<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> {
tag(s.syntax.block_end)(i)
}
fn tag_comment_start<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> {
tag(s.syntax.comment_start)(i)
}
fn tag_comment_end<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> {
tag(s.syntax.comment_end)(i)
}
fn tag_expr_start<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> {
tag(s.syntax.expr_start)(i)
}
fn tag_expr_end<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> {
tag(s.syntax.expr_end)(i)
}

View File

@@ -1,682 +0,0 @@
use std::str;
use nom::branch::alt;
use nom::bytes::complete::{tag, take_until};
use nom::character::complete::char;
use nom::combinator::{complete, consumed, cut, map, opt, peek, value};
use nom::error::{Error, ErrorKind};
use nom::multi::{fold_many0, many0, many1, separated_list0, separated_list1};
use nom::sequence::{delimited, pair, preceded, terminated, tuple};
use nom::{error_position, IResult};
use super::{
bool_lit, char_lit, identifier, keyword, num_lit, path, skip_till, split_ws_parts, str_lit,
tag_block_end, tag_block_start, tag_comment_end, tag_comment_start, tag_expr_end,
tag_expr_start, take_content, ws, Expr, State,
};
use crate::config::WhitespaceHandling;
#[derive(Debug, PartialEq)]
pub(crate) enum Node<'a> {
Lit(&'a str, &'a str, &'a str),
Comment(Ws),
Expr(Ws, Expr<'a>),
Call(Ws, Option<&'a str>, &'a str, Vec<Expr<'a>>),
LetDecl(Ws, Target<'a>),
Let(Ws, Target<'a>, Expr<'a>),
Cond(Vec<Cond<'a>>, Ws),
Match(Ws, Expr<'a>, Vec<When<'a>>, Ws),
Loop(Loop<'a>),
Extends(&'a str),
BlockDef(Ws, &'a str, Vec<Node<'a>>, Ws),
Include(Ws, &'a str),
Import(Ws, &'a str, &'a str),
Macro(&'a str, Macro<'a>),
Raw(Ws, &'a str, &'a str, &'a str, Ws),
Break(Ws),
Continue(Ws),
}
#[derive(Debug, PartialEq)]
pub(crate) enum Target<'a> {
Name(&'a str),
Tuple(Vec<&'a str>, Vec<Target<'a>>),
Struct(Vec<&'a str>, Vec<(&'a str, Target<'a>)>),
NumLit(&'a str),
StrLit(&'a str),
CharLit(&'a str),
BoolLit(&'a str),
Path(Vec<&'a str>),
}
#[derive(Clone, Copy, Debug, PartialEq)]
pub(crate) enum Whitespace {
Preserve,
Suppress,
Minimize,
}
impl From<WhitespaceHandling> for Whitespace {
fn from(ws: WhitespaceHandling) -> Self {
match ws {
WhitespaceHandling::Suppress => Whitespace::Suppress,
WhitespaceHandling::Preserve => Whitespace::Preserve,
WhitespaceHandling::Minimize => Whitespace::Minimize,
}
}
}
#[derive(Debug, PartialEq)]
pub(crate) struct Loop<'a> {
pub(crate) ws1: Ws,
pub(crate) var: Target<'a>,
pub(crate) iter: Expr<'a>,
pub(crate) cond: Option<Expr<'a>>,
pub(crate) body: Vec<Node<'a>>,
pub(crate) ws2: Ws,
pub(crate) else_block: Vec<Node<'a>>,
pub(crate) ws3: Ws,
}
pub(crate) type When<'a> = (Ws, Target<'a>, Vec<Node<'a>>);
#[derive(Debug, PartialEq)]
pub(crate) struct Macro<'a> {
pub(crate) ws1: Ws,
pub(crate) args: Vec<&'a str>,
pub(crate) nodes: Vec<Node<'a>>,
pub(crate) ws2: Ws,
}
/// First field is "minus/plus sign was used on the left part of the item".
///
/// Second field is "minus/plus sign was used on the right part of the item".
#[derive(Clone, Copy, Debug, PartialEq)]
pub(crate) struct Ws(pub(crate) Option<Whitespace>, pub(crate) Option<Whitespace>);
pub(crate) type Cond<'a> = (Ws, Option<CondTest<'a>>, Vec<Node<'a>>);
#[derive(Debug, PartialEq)]
pub(crate) struct CondTest<'a> {
pub(crate) target: Option<Target<'a>>,
pub(crate) expr: Expr<'a>,
}
impl Node<'_> {
pub(super) fn parse<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Vec<Node<'a>>> {
parse_template(i, s)
}
}
impl Target<'_> {
pub(super) fn parse(i: &str) -> IResult<&str, Target<'_>> {
target(i)
}
}
fn expr_handle_ws(i: &str) -> IResult<&str, Whitespace> {
alt((char('-'), char('+'), char('~')))(i).map(|(s, r)| (s, Whitespace::from(r)))
}
fn parameters(i: &str) -> IResult<&str, Vec<&str>> {
delimited(
ws(char('(')),
separated_list0(char(','), ws(identifier)),
ws(char(')')),
)(i)
}
fn block_call(i: &str) -> IResult<&str, Node<'_>> {
let mut p = tuple((
opt(expr_handle_ws),
ws(keyword("call")),
cut(tuple((
opt(tuple((ws(identifier), ws(tag("::"))))),
ws(identifier),
ws(Expr::parse_arguments),
opt(expr_handle_ws),
))),
));
let (i, (pws, _, (scope, name, args, nws))) = p(i)?;
let scope = scope.map(|(scope, _)| scope);
Ok((i, Node::Call(Ws(pws, nws), scope, name, args)))
}
fn cond_if(i: &str) -> IResult<&str, CondTest<'_>> {
let mut p = preceded(
ws(keyword("if")),
cut(tuple((
opt(delimited(
ws(alt((keyword("let"), keyword("set")))),
ws(Target::parse),
ws(char('=')),
)),
ws(Expr::parse),
))),
);
let (i, (target, expr)) = p(i)?;
Ok((i, CondTest { target, expr }))
}
fn cond_block<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Cond<'a>> {
let mut p = tuple((
|i| tag_block_start(i, s),
opt(expr_handle_ws),
ws(keyword("else")),
cut(tuple((
opt(cond_if),
opt(expr_handle_ws),
|i| tag_block_end(i, s),
cut(|i| parse_template(i, s)),
))),
));
let (i, (_, pws, _, (cond, nws, _, block))) = p(i)?;
Ok((i, (Ws(pws, nws), cond, block)))
}
fn block_if<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
let mut p = tuple((
opt(expr_handle_ws),
cond_if,
cut(tuple((
opt(expr_handle_ws),
|i| tag_block_end(i, s),
cut(tuple((
|i| parse_template(i, s),
many0(|i| cond_block(i, s)),
cut(tuple((
|i| tag_block_start(i, s),
opt(expr_handle_ws),
ws(keyword("endif")),
opt(expr_handle_ws),
))),
))),
))),
));
let (i, (pws1, cond, (nws1, _, (block, elifs, (_, pws2, _, nws2))))) = p(i)?;
let mut res = vec![(Ws(pws1, nws1), Some(cond), block)];
res.extend(elifs);
Ok((i, Node::Cond(res, Ws(pws2, nws2))))
}
fn match_else_block<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, When<'a>> {
let mut p = tuple((
|i| tag_block_start(i, s),
opt(expr_handle_ws),
ws(keyword("else")),
cut(tuple((
opt(expr_handle_ws),
|i| tag_block_end(i, s),
cut(|i| parse_template(i, s)),
))),
));
let (i, (_, pws, _, (nws, _, block))) = p(i)?;
Ok((i, (Ws(pws, nws), Target::Name("_"), block)))
}
fn when_block<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, When<'a>> {
let mut p = tuple((
|i| tag_block_start(i, s),
opt(expr_handle_ws),
ws(keyword("when")),
cut(tuple((
ws(Target::parse),
opt(expr_handle_ws),
|i| tag_block_end(i, s),
cut(|i| parse_template(i, s)),
))),
));
let (i, (_, pws, _, (target, nws, _, block))) = p(i)?;
Ok((i, (Ws(pws, nws), target, block)))
}
fn block_match<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
let mut p = tuple((
opt(expr_handle_ws),
ws(keyword("match")),
cut(tuple((
ws(Expr::parse),
opt(expr_handle_ws),
|i| tag_block_end(i, s),
cut(tuple((
ws(many0(ws(value((), |i| block_comment(i, s))))),
many1(|i| when_block(i, s)),
cut(tuple((
opt(|i| match_else_block(i, s)),
cut(tuple((
ws(|i| tag_block_start(i, s)),
opt(expr_handle_ws),
ws(keyword("endmatch")),
opt(expr_handle_ws),
))),
))),
))),
))),
));
let (i, (pws1, _, (expr, nws1, _, (_, arms, (else_arm, (_, pws2, _, nws2)))))) = p(i)?;
let mut arms = arms;
if let Some(arm) = else_arm {
arms.push(arm);
}
Ok((i, Node::Match(Ws(pws1, nws1), expr, arms, Ws(pws2, nws2))))
}
fn block_let(i: &str) -> IResult<&str, Node<'_>> {
let mut p = tuple((
opt(expr_handle_ws),
ws(alt((keyword("let"), keyword("set")))),
cut(tuple((
ws(Target::parse),
opt(tuple((ws(char('=')), ws(Expr::parse)))),
opt(expr_handle_ws),
))),
));
let (i, (pws, _, (var, val, nws))) = p(i)?;
Ok((
i,
if let Some((_, val)) = val {
Node::Let(Ws(pws, nws), var, val)
} else {
Node::LetDecl(Ws(pws, nws), var)
},
))
}
fn parse_loop_content<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Vec<Node<'a>>> {
s.enter_loop();
let result = parse_template(i, s);
s.leave_loop();
result
}
fn block_for<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
let if_cond = preceded(ws(keyword("if")), cut(ws(Expr::parse)));
let else_block = |i| {
let mut p = preceded(
ws(keyword("else")),
cut(tuple((
opt(expr_handle_ws),
delimited(
|i| tag_block_end(i, s),
|i| parse_template(i, s),
|i| tag_block_start(i, s),
),
opt(expr_handle_ws),
))),
);
let (i, (pws, nodes, nws)) = p(i)?;
Ok((i, (pws, nodes, nws)))
};
let mut p = tuple((
opt(expr_handle_ws),
ws(keyword("for")),
cut(tuple((
ws(Target::parse),
ws(keyword("in")),
cut(tuple((
ws(Expr::parse),
opt(if_cond),
opt(expr_handle_ws),
|i| tag_block_end(i, s),
cut(tuple((
|i| parse_loop_content(i, s),
cut(tuple((
|i| tag_block_start(i, s),
opt(expr_handle_ws),
opt(else_block),
ws(keyword("endfor")),
opt(expr_handle_ws),
))),
))),
))),
))),
));
let (i, (pws1, _, (var, _, (iter, cond, nws1, _, (body, (_, pws2, else_block, _, nws2)))))) =
p(i)?;
let (nws3, else_block, pws3) = else_block.unwrap_or_default();
Ok((
i,
Node::Loop(Loop {
ws1: Ws(pws1, nws1),
var,
iter,
cond,
body,
ws2: Ws(pws2, nws3),
else_block,
ws3: Ws(pws3, nws2),
}),
))
}
fn block_extends(i: &str) -> IResult<&str, Node<'_>> {
let (i, (_, name)) = tuple((ws(keyword("extends")), ws(str_lit)))(i)?;
Ok((i, Node::Extends(name)))
}
fn block_block<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
let mut start = tuple((
opt(expr_handle_ws),
ws(keyword("block")),
cut(tuple((ws(identifier), opt(expr_handle_ws), |i| {
tag_block_end(i, s)
}))),
));
let (i, (pws1, _, (name, nws1, _))) = start(i)?;
let mut end = cut(tuple((
|i| parse_template(i, s),
cut(tuple((
|i| tag_block_start(i, s),
opt(expr_handle_ws),
ws(keyword("endblock")),
cut(tuple((opt(ws(keyword(name))), opt(expr_handle_ws)))),
))),
)));
let (i, (contents, (_, pws2, _, (_, nws2)))) = end(i)?;
Ok((
i,
Node::BlockDef(Ws(pws1, nws1), name, contents, Ws(pws2, nws2)),
))
}
fn block_include(i: &str) -> IResult<&str, Node<'_>> {
let mut p = tuple((
opt(expr_handle_ws),
ws(keyword("include")),
cut(pair(ws(str_lit), opt(expr_handle_ws))),
));
let (i, (pws, _, (name, nws))) = p(i)?;
Ok((i, Node::Include(Ws(pws, nws), name)))
}
fn block_import(i: &str) -> IResult<&str, Node<'_>> {
let mut p = tuple((
opt(expr_handle_ws),
ws(keyword("import")),
cut(tuple((
ws(str_lit),
ws(keyword("as")),
cut(pair(ws(identifier), opt(expr_handle_ws))),
))),
));
let (i, (pws, _, (name, _, (scope, nws)))) = p(i)?;
Ok((i, Node::Import(Ws(pws, nws), name, scope)))
}
fn block_macro<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
let mut start = tuple((
opt(expr_handle_ws),
ws(keyword("macro")),
cut(tuple((
ws(identifier),
ws(parameters),
opt(expr_handle_ws),
|i| tag_block_end(i, s),
))),
));
let (i, (pws1, _, (name, params, nws1, _))) = start(i)?;
let mut end = cut(tuple((
|i| parse_template(i, s),
cut(tuple((
|i| tag_block_start(i, s),
opt(expr_handle_ws),
ws(keyword("endmacro")),
cut(tuple((opt(ws(keyword(name))), opt(expr_handle_ws)))),
))),
)));
let (i, (contents, (_, pws2, _, (_, nws2)))) = end(i)?;
assert_ne!(name, "super", "invalid macro name 'super'");
Ok((
i,
Node::Macro(
name,
Macro {
ws1: Ws(pws1, nws1),
args: params,
nodes: contents,
ws2: Ws(pws2, nws2),
},
),
))
}
fn block_raw<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
let endraw = tuple((
|i| tag_block_start(i, s),
opt(expr_handle_ws),
ws(keyword("endraw")),
opt(expr_handle_ws),
peek(|i| tag_block_end(i, s)),
));
let mut p = tuple((
opt(expr_handle_ws),
ws(keyword("raw")),
cut(tuple((
opt(expr_handle_ws),
|i| tag_block_end(i, s),
consumed(skip_till(endraw)),
))),
));
let (_, (pws1, _, (nws1, _, (contents, (i, (_, pws2, _, nws2, _)))))) = p(i)?;
let (lws, val, rws) = match split_ws_parts(contents) {
Node::Lit(lws, val, rws) => (lws, val, rws),
_ => unreachable!(),
};
let ws1 = Ws(pws1, nws1);
let ws2 = Ws(pws2, nws2);
Ok((i, Node::Raw(ws1, lws, val, rws, ws2)))
}
fn break_statement<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
let mut p = tuple((
opt(expr_handle_ws),
ws(keyword("break")),
opt(expr_handle_ws),
));
let (j, (pws, _, nws)) = p(i)?;
if !s.is_in_loop() {
return Err(nom::Err::Failure(error_position!(i, ErrorKind::Tag)));
}
Ok((j, Node::Break(Ws(pws, nws))))
}
fn continue_statement<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
let mut p = tuple((
opt(expr_handle_ws),
ws(keyword("continue")),
opt(expr_handle_ws),
));
let (j, (pws, _, nws)) = p(i)?;
if !s.is_in_loop() {
return Err(nom::Err::Failure(error_position!(i, ErrorKind::Tag)));
}
Ok((j, Node::Continue(Ws(pws, nws))))
}
fn block_node<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
let mut p = tuple((
|i| tag_block_start(i, s),
alt((
block_call,
block_let,
|i| block_if(i, s),
|i| block_for(i, s),
|i| block_match(i, s),
block_extends,
block_include,
block_import,
|i| block_block(i, s),
|i| block_macro(i, s),
|i| block_raw(i, s),
|i| break_statement(i, s),
|i| continue_statement(i, s),
)),
cut(|i| tag_block_end(i, s)),
));
let (i, (_, contents, _)) = p(i)?;
Ok((i, contents))
}
fn block_comment_body<'a>(mut i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> {
let mut level = 0;
loop {
let (end, tail) = take_until(s.syntax.comment_end)(i)?;
match take_until::<_, _, Error<_>>(s.syntax.comment_start)(i) {
Ok((start, _)) if start.as_ptr() < end.as_ptr() => {
level += 1;
i = &start[2..];
}
_ if level > 0 => {
level -= 1;
i = &end[2..];
}
_ => return Ok((end, tail)),
}
}
}
fn block_comment<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
let mut p = tuple((
|i| tag_comment_start(i, s),
cut(tuple((
opt(expr_handle_ws),
|i| block_comment_body(i, s),
|i| tag_comment_end(i, s),
))),
));
let (i, (_, (pws, tail, _))) = p(i)?;
let nws = if tail.ends_with('-') {
Some(Whitespace::Suppress)
} else if tail.ends_with('+') {
Some(Whitespace::Preserve)
} else if tail.ends_with('~') {
Some(Whitespace::Minimize)
} else {
None
};
Ok((i, Node::Comment(Ws(pws, nws))))
}
fn expr_node<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
let mut p = tuple((
|i| tag_expr_start(i, s),
cut(tuple((
opt(expr_handle_ws),
ws(Expr::parse),
opt(expr_handle_ws),
|i| tag_expr_end(i, s),
))),
));
let (i, (_, (pws, expr, nws, _))) = p(i)?;
Ok((i, Node::Expr(Ws(pws, nws), expr)))
}
fn parse_template<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Vec<Node<'a>>> {
many0(alt((
complete(|i| take_content(i, s)),
complete(|i| block_comment(i, s)),
complete(|i| expr_node(i, s)),
complete(|i| block_node(i, s)),
)))(i)
}
fn variant_lit(i: &str) -> IResult<&str, Target<'_>> {
alt((
map(str_lit, Target::StrLit),
map(char_lit, Target::CharLit),
map(num_lit, Target::NumLit),
map(bool_lit, Target::BoolLit),
))(i)
}
fn target(i: &str) -> IResult<&str, Target<'_>> {
let mut opt_opening_paren = map(opt(ws(char('('))), |o| o.is_some());
let mut opt_closing_paren = map(opt(ws(char(')'))), |o| o.is_some());
let mut opt_opening_brace = map(opt(ws(char('{'))), |o| o.is_some());
let (i, lit) = opt(variant_lit)(i)?;
if let Some(lit) = lit {
return Ok((i, lit));
}
// match tuples and unused parentheses
let (i, target_is_tuple) = opt_opening_paren(i)?;
if target_is_tuple {
let (i, is_empty_tuple) = opt_closing_paren(i)?;
if is_empty_tuple {
return Ok((i, Target::Tuple(Vec::new(), Vec::new())));
}
let (i, first_target) = target(i)?;
let (i, is_unused_paren) = opt_closing_paren(i)?;
if is_unused_paren {
return Ok((i, first_target));
}
let mut targets = vec![first_target];
let (i, _) = cut(tuple((
fold_many0(
preceded(ws(char(',')), target),
|| (),
|_, target| {
targets.push(target);
},
),
opt(ws(char(','))),
ws(cut(char(')'))),
)))(i)?;
return Ok((i, Target::Tuple(Vec::new(), targets)));
}
// match structs
let (i, path) = opt(path)(i)?;
if let Some(path) = path {
let i_before_matching_with = i;
let (i, _) = opt(ws(keyword("with")))(i)?;
let (i, is_unnamed_struct) = opt_opening_paren(i)?;
if is_unnamed_struct {
let (i, targets) = alt((
map(char(')'), |_| Vec::new()),
terminated(
cut(separated_list1(ws(char(',')), target)),
pair(opt(ws(char(','))), ws(cut(char(')')))),
),
))(i)?;
return Ok((i, Target::Tuple(path, targets)));
}
let (i, is_named_struct) = opt_opening_brace(i)?;
if is_named_struct {
let (i, targets) = alt((
map(char('}'), |_| Vec::new()),
terminated(
cut(separated_list1(ws(char(',')), named_target)),
pair(opt(ws(char(','))), ws(cut(char('}')))),
),
))(i)?;
return Ok((i, Target::Struct(path, targets)));
}
return Ok((i_before_matching_with, Target::Path(path)));
}
// neither literal nor struct nor path
map(identifier, Target::Name)(i)
}
fn named_target(i: &str) -> IResult<&str, (&str, Target<'_>)> {
let (i, (src, target)) = pair(identifier, opt(preceded(ws(char(':')), target)))(i)?;
Ok((i, (src, target.unwrap_or(Target::Name(src)))))
}

View File

@@ -1,668 +0,0 @@
use crate::config::Syntax;
use crate::parser::{Expr, Node, Whitespace, Ws};
fn check_ws_split(s: &str, res: &(&str, &str, &str)) {
match super::split_ws_parts(s) {
Node::Lit(lws, s, rws) => {
assert_eq!(lws, res.0);
assert_eq!(s, res.1);
assert_eq!(rws, res.2);
}
_ => {
panic!("fail");
}
}
}
#[test]
fn test_ws_splitter() {
check_ws_split("", &("", "", ""));
check_ws_split("a", &("", "a", ""));
check_ws_split("\ta", &("\t", "a", ""));
check_ws_split("b\n", &("", "b", "\n"));
check_ws_split(" \t\r\n", &(" \t\r\n", "", ""));
}
#[test]
#[should_panic]
fn test_invalid_block() {
super::parse("{% extend \"blah\" %}", &Syntax::default()).unwrap();
}
#[test]
fn test_parse_filter() {
use Expr::*;
let syntax = Syntax::default();
assert_eq!(
super::parse("{{ strvar|e }}", &syntax).unwrap(),
vec![Node::Expr(Ws(None, None), Filter("e", vec![Var("strvar")]),)],
);
assert_eq!(
super::parse("{{ 2|abs }}", &syntax).unwrap(),
vec![Node::Expr(Ws(None, None), Filter("abs", vec![NumLit("2")]),)],
);
assert_eq!(
super::parse("{{ -2|abs }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
Filter("abs", vec![Unary("-", NumLit("2").into())]),
)],
);
assert_eq!(
super::parse("{{ (1 - 2)|abs }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
Filter(
"abs",
vec![Group(
BinOp("-", NumLit("1").into(), NumLit("2").into()).into()
)]
),
)],
);
}
#[test]
fn test_parse_numbers() {
let syntax = Syntax::default();
assert_eq!(
super::parse("{{ 2 }}", &syntax).unwrap(),
vec![Node::Expr(Ws(None, None), Expr::NumLit("2"),)],
);
assert_eq!(
super::parse("{{ 2.5 }}", &syntax).unwrap(),
vec![Node::Expr(Ws(None, None), Expr::NumLit("2.5"),)],
);
}
#[test]
fn test_parse_var() {
let s = Syntax::default();
assert_eq!(
super::parse("{{ foo }}", &s).unwrap(),
vec![Node::Expr(Ws(None, None), Expr::Var("foo"))],
);
assert_eq!(
super::parse("{{ foo_bar }}", &s).unwrap(),
vec![Node::Expr(Ws(None, None), Expr::Var("foo_bar"))],
);
assert_eq!(
super::parse("{{ none }}", &s).unwrap(),
vec![Node::Expr(Ws(None, None), Expr::Var("none"))],
);
}
#[test]
fn test_parse_const() {
let s = Syntax::default();
assert_eq!(
super::parse("{{ FOO }}", &s).unwrap(),
vec![Node::Expr(Ws(None, None), Expr::Path(vec!["FOO"]))],
);
assert_eq!(
super::parse("{{ FOO_BAR }}", &s).unwrap(),
vec![Node::Expr(Ws(None, None), Expr::Path(vec!["FOO_BAR"]))],
);
assert_eq!(
super::parse("{{ NONE }}", &s).unwrap(),
vec![Node::Expr(Ws(None, None), Expr::Path(vec!["NONE"]))],
);
}
#[test]
fn test_parse_path() {
let s = Syntax::default();
assert_eq!(
super::parse("{{ None }}", &s).unwrap(),
vec![Node::Expr(Ws(None, None), Expr::Path(vec!["None"]))],
);
assert_eq!(
super::parse("{{ Some(123) }}", &s).unwrap(),
vec![Node::Expr(
Ws(None, None),
Expr::Call(
Box::new(Expr::Path(vec!["Some"])),
vec![Expr::NumLit("123")]
),
)],
);
assert_eq!(
super::parse("{{ Ok(123) }}", &s).unwrap(),
vec![Node::Expr(
Ws(None, None),
Expr::Call(Box::new(Expr::Path(vec!["Ok"])), vec![Expr::NumLit("123")]),
)],
);
assert_eq!(
super::parse("{{ Err(123) }}", &s).unwrap(),
vec![Node::Expr(
Ws(None, None),
Expr::Call(Box::new(Expr::Path(vec!["Err"])), vec![Expr::NumLit("123")]),
)],
);
}
#[test]
fn test_parse_var_call() {
assert_eq!(
super::parse("{{ function(\"123\", 3) }}", &Syntax::default()).unwrap(),
vec![Node::Expr(
Ws(None, None),
Expr::Call(
Box::new(Expr::Var("function")),
vec![Expr::StrLit("123"), Expr::NumLit("3")]
),
)],
);
}
#[test]
fn test_parse_path_call() {
let s = Syntax::default();
assert_eq!(
super::parse("{{ Option::None }}", &s).unwrap(),
vec![Node::Expr(
Ws(None, None),
Expr::Path(vec!["Option", "None"])
)],
);
assert_eq!(
super::parse("{{ Option::Some(123) }}", &s).unwrap(),
vec![Node::Expr(
Ws(None, None),
Expr::Call(
Box::new(Expr::Path(vec!["Option", "Some"])),
vec![Expr::NumLit("123")],
),
)],
);
assert_eq!(
super::parse("{{ self::function(\"123\", 3) }}", &s).unwrap(),
vec![Node::Expr(
Ws(None, None),
Expr::Call(
Box::new(Expr::Path(vec!["self", "function"])),
vec![Expr::StrLit("123"), Expr::NumLit("3")],
),
)],
);
}
#[test]
fn test_parse_root_path() {
let syntax = Syntax::default();
assert_eq!(
super::parse("{{ std::string::String::new() }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
Expr::Call(
Box::new(Expr::Path(vec!["std", "string", "String", "new"])),
vec![]
),
)],
);
assert_eq!(
super::parse("{{ ::std::string::String::new() }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
Expr::Call(
Box::new(Expr::Path(vec!["", "std", "string", "String", "new"])),
vec![]
),
)],
);
}
#[test]
fn change_delimiters_parse_filter() {
let syntax = Syntax {
expr_start: "{=",
expr_end: "=}",
..Syntax::default()
};
super::parse("{= strvar|e =}", &syntax).unwrap();
}
#[test]
fn test_precedence() {
use Expr::*;
let syntax = Syntax::default();
assert_eq!(
super::parse("{{ a + b == c }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
BinOp(
"==",
BinOp("+", Var("a").into(), Var("b").into()).into(),
Var("c").into(),
)
)],
);
assert_eq!(
super::parse("{{ a + b * c - d / e }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
BinOp(
"-",
BinOp(
"+",
Var("a").into(),
BinOp("*", Var("b").into(), Var("c").into()).into(),
)
.into(),
BinOp("/", Var("d").into(), Var("e").into()).into(),
)
)],
);
assert_eq!(
super::parse("{{ a * (b + c) / -d }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
BinOp(
"/",
BinOp(
"*",
Var("a").into(),
Group(BinOp("+", Var("b").into(), Var("c").into()).into()).into()
)
.into(),
Unary("-", Var("d").into()).into()
)
)],
);
assert_eq!(
super::parse("{{ a || b && c || d && e }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
BinOp(
"||",
BinOp(
"||",
Var("a").into(),
BinOp("&&", Var("b").into(), Var("c").into()).into(),
)
.into(),
BinOp("&&", Var("d").into(), Var("e").into()).into(),
)
)],
);
}
#[test]
fn test_associativity() {
use Expr::*;
let syntax = Syntax::default();
assert_eq!(
super::parse("{{ a + b + c }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
BinOp(
"+",
BinOp("+", Var("a").into(), Var("b").into()).into(),
Var("c").into()
)
)],
);
assert_eq!(
super::parse("{{ a * b * c }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
BinOp(
"*",
BinOp("*", Var("a").into(), Var("b").into()).into(),
Var("c").into()
)
)],
);
assert_eq!(
super::parse("{{ a && b && c }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
BinOp(
"&&",
BinOp("&&", Var("a").into(), Var("b").into()).into(),
Var("c").into()
)
)],
);
assert_eq!(
super::parse("{{ a + b - c + d }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
BinOp(
"+",
BinOp(
"-",
BinOp("+", Var("a").into(), Var("b").into()).into(),
Var("c").into()
)
.into(),
Var("d").into()
)
)],
);
assert_eq!(
super::parse("{{ a == b != c > d > e == f }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
BinOp(
"==",
BinOp(
">",
BinOp(
">",
BinOp(
"!=",
BinOp("==", Var("a").into(), Var("b").into()).into(),
Var("c").into()
)
.into(),
Var("d").into()
)
.into(),
Var("e").into()
)
.into(),
Var("f").into()
)
)],
);
}
#[test]
fn test_odd_calls() {
use Expr::*;
let syntax = Syntax::default();
assert_eq!(
super::parse("{{ a[b](c) }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
Call(
Box::new(Index(Box::new(Var("a")), Box::new(Var("b")))),
vec![Var("c")],
),
)],
);
assert_eq!(
super::parse("{{ (a + b)(c) }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
Call(
Box::new(Group(Box::new(BinOp(
"+",
Box::new(Var("a")),
Box::new(Var("b"))
)))),
vec![Var("c")],
),
)],
);
assert_eq!(
super::parse("{{ a + b(c) }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
BinOp(
"+",
Box::new(Var("a")),
Box::new(Call(Box::new(Var("b")), vec![Var("c")])),
),
)],
);
assert_eq!(
super::parse("{{ (-a)(b) }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
Call(
Box::new(Group(Box::new(Unary("-", Box::new(Var("a")))))),
vec![Var("b")],
),
)],
);
assert_eq!(
super::parse("{{ -a(b) }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
Unary("-", Box::new(Call(Box::new(Var("a")), vec![Var("b")])),),
)],
);
}
#[test]
fn test_parse_comments() {
let s = &Syntax::default();
assert_eq!(
super::parse("{##}", s).unwrap(),
vec![Node::Comment(Ws(None, None))],
);
assert_eq!(
super::parse("{#- #}", s).unwrap(),
vec![Node::Comment(Ws(Some(Whitespace::Suppress), None))],
);
assert_eq!(
super::parse("{# -#}", s).unwrap(),
vec![Node::Comment(Ws(None, Some(Whitespace::Suppress)))],
);
assert_eq!(
super::parse("{#--#}", s).unwrap(),
vec![Node::Comment(Ws(
Some(Whitespace::Suppress),
Some(Whitespace::Suppress)
))],
);
assert_eq!(
super::parse("{#- foo\n bar -#}", s).unwrap(),
vec![Node::Comment(Ws(
Some(Whitespace::Suppress),
Some(Whitespace::Suppress)
))],
);
assert_eq!(
super::parse("{#- foo\n {#- bar\n -#} baz -#}", s).unwrap(),
vec![Node::Comment(Ws(
Some(Whitespace::Suppress),
Some(Whitespace::Suppress)
))],
);
assert_eq!(
super::parse("{#+ #}", s).unwrap(),
vec![Node::Comment(Ws(Some(Whitespace::Preserve), None))],
);
assert_eq!(
super::parse("{# +#}", s).unwrap(),
vec![Node::Comment(Ws(None, Some(Whitespace::Preserve)))],
);
assert_eq!(
super::parse("{#++#}", s).unwrap(),
vec![Node::Comment(Ws(
Some(Whitespace::Preserve),
Some(Whitespace::Preserve)
))],
);
assert_eq!(
super::parse("{#+ foo\n bar +#}", s).unwrap(),
vec![Node::Comment(Ws(
Some(Whitespace::Preserve),
Some(Whitespace::Preserve)
))],
);
assert_eq!(
super::parse("{#+ foo\n {#+ bar\n +#} baz -+#}", s).unwrap(),
vec![Node::Comment(Ws(
Some(Whitespace::Preserve),
Some(Whitespace::Preserve)
))],
);
assert_eq!(
super::parse("{#~ #}", s).unwrap(),
vec![Node::Comment(Ws(Some(Whitespace::Minimize), None))],
);
assert_eq!(
super::parse("{# ~#}", s).unwrap(),
vec![Node::Comment(Ws(None, Some(Whitespace::Minimize)))],
);
assert_eq!(
super::parse("{#~~#}", s).unwrap(),
vec![Node::Comment(Ws(
Some(Whitespace::Minimize),
Some(Whitespace::Minimize)
))],
);
assert_eq!(
super::parse("{#~ foo\n bar ~#}", s).unwrap(),
vec![Node::Comment(Ws(
Some(Whitespace::Minimize),
Some(Whitespace::Minimize)
))],
);
assert_eq!(
super::parse("{#~ foo\n {#~ bar\n ~#} baz -~#}", s).unwrap(),
vec![Node::Comment(Ws(
Some(Whitespace::Minimize),
Some(Whitespace::Minimize)
))],
);
assert_eq!(
super::parse("{# foo {# bar #} {# {# baz #} qux #} #}", s).unwrap(),
vec![Node::Comment(Ws(None, None))],
);
}
#[test]
fn test_parse_tuple() {
use super::Expr::*;
let syntax = Syntax::default();
assert_eq!(
super::parse("{{ () }}", &syntax).unwrap(),
vec![Node::Expr(Ws(None, None), Tuple(vec![]),)],
);
assert_eq!(
super::parse("{{ (1) }}", &syntax).unwrap(),
vec![Node::Expr(Ws(None, None), Group(Box::new(NumLit("1"))),)],
);
assert_eq!(
super::parse("{{ (1,) }}", &syntax).unwrap(),
vec![Node::Expr(Ws(None, None), Tuple(vec![NumLit("1")]),)],
);
assert_eq!(
super::parse("{{ (1, ) }}", &syntax).unwrap(),
vec![Node::Expr(Ws(None, None), Tuple(vec![NumLit("1")]),)],
);
assert_eq!(
super::parse("{{ (1 ,) }}", &syntax).unwrap(),
vec![Node::Expr(Ws(None, None), Tuple(vec![NumLit("1")]),)],
);
assert_eq!(
super::parse("{{ (1 , ) }}", &syntax).unwrap(),
vec![Node::Expr(Ws(None, None), Tuple(vec![NumLit("1")]),)],
);
assert_eq!(
super::parse("{{ (1, 2) }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
Tuple(vec![NumLit("1"), NumLit("2")]),
)],
);
assert_eq!(
super::parse("{{ (1, 2,) }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
Tuple(vec![NumLit("1"), NumLit("2")]),
)],
);
assert_eq!(
super::parse("{{ (1, 2, 3) }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
Tuple(vec![NumLit("1"), NumLit("2"), NumLit("3")]),
)],
);
assert_eq!(
super::parse("{{ ()|abs }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
Filter("abs", vec![Tuple(vec![])]),
)],
);
assert_eq!(
super::parse("{{ () | abs }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
BinOp("|", Box::new(Tuple(vec![])), Box::new(Var("abs"))),
)],
);
assert_eq!(
super::parse("{{ (1)|abs }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
Filter("abs", vec![Group(Box::new(NumLit("1")))]),
)],
);
assert_eq!(
super::parse("{{ (1) | abs }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
BinOp(
"|",
Box::new(Group(Box::new(NumLit("1")))),
Box::new(Var("abs"))
),
)],
);
assert_eq!(
super::parse("{{ (1,)|abs }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
Filter("abs", vec![Tuple(vec![NumLit("1")])]),
)],
);
assert_eq!(
super::parse("{{ (1,) | abs }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
BinOp(
"|",
Box::new(Tuple(vec![NumLit("1")])),
Box::new(Var("abs"))
),
)],
);
assert_eq!(
super::parse("{{ (1, 2)|abs }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
Filter("abs", vec![Tuple(vec![NumLit("1"), NumLit("2")])]),
)],
);
assert_eq!(
super::parse("{{ (1, 2) | abs }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
BinOp(
"|",
Box::new(Tuple(vec![NumLit("1"), NumLit("2")])),
Box::new(Var("abs"))
),
)],
);
}
#[test]
fn test_missing_space_after_kw() {
let syntax = Syntax::default();
let err = super::parse("{%leta=b%}", &syntax).unwrap_err();
assert!(matches!(
&*err.msg,
"unable to parse template:\n\n\"{%leta=b%}\""
));
}

View File

@@ -1 +0,0 @@
{"files":{"Cargo.toml":"a140f9df40d83c3f3c39864df0e272bde3e210ad9d37cf90342c45f137c5b1aa","LICENSE-APACHE":"87cb0d734c723c083e51c825930ff42bce28596b52dee15567f6b28f19c195e3","LICENSE-MIT":"df20e0180764bf5bd76f74d47bc9e8c0069a666401629c390003a1d5eba99c92","README.md":"f1c057bd94aff0d98bcd7267655bb8af4c9c81a643423c5948f711e199945905","benches/all.rs":"0e0458780fa24e55402b11fdbc6ef2191b399459461a9f909a516363e824c838","src/lib.rs":"5f96ad55ac916b63ef051373994c08a0bfaa3b85a5bf031a579dc23163c47267"},"package":"619743e34b5ba4e9703bba34deac3427c72507c7159f5fd030aea8cac0cfe341"}

View File

@@ -1,33 +0,0 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies.
#
# If you are reading this file be aware that the original Cargo.toml
# will likely look very different (and much more reasonable).
# See Cargo.toml.orig for the original contents.
[package]
edition = "2018"
name = "askama_escape"
version = "0.10.3"
description = "Optimized HTML escaping code, extracted from Askama"
homepage = "https://github.com/djc/askama"
documentation = "https://docs.rs/askama_escape"
readme = "README.md"
keywords = ["html", "escaping"]
license = "MIT OR Apache-2.0"
repository = "https://github.com/djc/askama"
[[bench]]
name = "all"
harness = false
[dev-dependencies.criterion]
version = "0.3"
[features]
json = []
[badges.maintenance]
status = "actively-developed"

View File

@@ -1,9 +0,0 @@
# askama_escape: escaping utilities for the Askama templating engine
[![Documentation](https://docs.rs/askama_escape/badge.svg)](https://docs.rs/askama_escape/)
[![Latest version](https://img.shields.io/crates/v/askama_escape.svg)](https://crates.io/crates/askama_escape)
[![Build Status](https://github.com/djc/askama/workflows/CI/badge.svg)](https://github.com/djc/askama/actions?query=workflow%3ACI)
[![Chat](https://badges.gitter.im/gitterHQ/gitter.svg)](https://gitter.im/djc/askama)
This crate contains helper code for HTML escaping used by the
[Askama](https://github.com/djc/askama) templating engine.

View File

@@ -1,239 +0,0 @@
#![cfg_attr(not(any(feature = "json", test)), no_std)]
#![deny(elided_lifetimes_in_paths)]
#![deny(unreachable_pub)]
use core::fmt::{self, Display, Formatter, Write};
use core::str;
#[derive(Debug)]
pub struct MarkupDisplay<E, T>
where
E: Escaper,
T: Display,
{
value: DisplayValue<T>,
escaper: E,
}
impl<E, T> MarkupDisplay<E, T>
where
E: Escaper,
T: Display,
{
pub fn new_unsafe(value: T, escaper: E) -> Self {
Self {
value: DisplayValue::Unsafe(value),
escaper,
}
}
pub fn new_safe(value: T, escaper: E) -> Self {
Self {
value: DisplayValue::Safe(value),
escaper,
}
}
#[must_use]
pub fn mark_safe(mut self) -> MarkupDisplay<E, T> {
self.value = match self.value {
DisplayValue::Unsafe(t) => DisplayValue::Safe(t),
_ => self.value,
};
self
}
}
impl<E, T> Display for MarkupDisplay<E, T>
where
E: Escaper,
T: Display,
{
fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
match self.value {
DisplayValue::Unsafe(ref t) => write!(
EscapeWriter {
fmt,
escaper: &self.escaper
},
"{}",
t
),
DisplayValue::Safe(ref t) => t.fmt(fmt),
}
}
}
#[derive(Debug)]
pub struct EscapeWriter<'a, E, W> {
fmt: W,
escaper: &'a E,
}
impl<E, W> Write for EscapeWriter<'_, E, W>
where
W: Write,
E: Escaper,
{
fn write_str(&mut self, s: &str) -> fmt::Result {
self.escaper.write_escaped(&mut self.fmt, s)
}
}
pub fn escape<E>(string: &str, escaper: E) -> Escaped<'_, E>
where
E: Escaper,
{
Escaped { string, escaper }
}
#[derive(Debug)]
pub struct Escaped<'a, E>
where
E: Escaper,
{
string: &'a str,
escaper: E,
}
impl<E> Display for Escaped<'_, E>
where
E: Escaper,
{
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
self.escaper.write_escaped(fmt, self.string)
}
}
pub struct Html;
macro_rules! escaping_body {
($start:ident, $i:ident, $fmt:ident, $bytes:ident, $quote:expr) => {{
if $start < $i {
$fmt.write_str(unsafe { str::from_utf8_unchecked(&$bytes[$start..$i]) })?;
}
$fmt.write_str($quote)?;
$start = $i + 1;
}};
}
impl Escaper for Html {
fn write_escaped<W>(&self, mut fmt: W, string: &str) -> fmt::Result
where
W: Write,
{
let bytes = string.as_bytes();
let mut start = 0;
for (i, b) in bytes.iter().enumerate() {
if b.wrapping_sub(b'"') <= FLAG {
match *b {
b'<' => escaping_body!(start, i, fmt, bytes, "&lt;"),
b'>' => escaping_body!(start, i, fmt, bytes, "&gt;"),
b'&' => escaping_body!(start, i, fmt, bytes, "&amp;"),
b'"' => escaping_body!(start, i, fmt, bytes, "&quot;"),
b'\'' => escaping_body!(start, i, fmt, bytes, "&#x27;"),
_ => (),
}
}
}
if start < bytes.len() {
fmt.write_str(unsafe { str::from_utf8_unchecked(&bytes[start..]) })
} else {
Ok(())
}
}
}
pub struct Text;
impl Escaper for Text {
fn write_escaped<W>(&self, mut fmt: W, string: &str) -> fmt::Result
where
W: Write,
{
fmt.write_str(string)
}
}
#[derive(Debug, PartialEq)]
enum DisplayValue<T>
where
T: Display,
{
Safe(T),
Unsafe(T),
}
pub trait Escaper {
fn write_escaped<W>(&self, fmt: W, string: &str) -> fmt::Result
where
W: Write;
}
const FLAG: u8 = b'>' - b'"';
/// Escape chevrons, ampersand and apostrophes for use in JSON
#[cfg(feature = "json")]
#[derive(Debug, Clone, Default)]
pub struct JsonEscapeBuffer(Vec<u8>);
#[cfg(feature = "json")]
impl JsonEscapeBuffer {
pub fn new() -> Self {
Self(Vec::new())
}
pub fn finish(self) -> String {
unsafe { String::from_utf8_unchecked(self.0) }
}
}
#[cfg(feature = "json")]
impl std::io::Write for JsonEscapeBuffer {
fn write(&mut self, bytes: &[u8]) -> std::io::Result<usize> {
macro_rules! push_esc_sequence {
($start:ident, $i:ident, $self:ident, $bytes:ident, $quote:expr) => {{
if $start < $i {
$self.0.extend_from_slice(&$bytes[$start..$i]);
}
$self.0.extend_from_slice($quote);
$start = $i + 1;
}};
}
self.0.reserve(bytes.len());
let mut start = 0;
for (i, b) in bytes.iter().enumerate() {
match *b {
b'&' => push_esc_sequence!(start, i, self, bytes, br#"\u0026"#),
b'\'' => push_esc_sequence!(start, i, self, bytes, br#"\u0027"#),
b'<' => push_esc_sequence!(start, i, self, bytes, br#"\u003c"#),
b'>' => push_esc_sequence!(start, i, self, bytes, br#"\u003e"#),
_ => (),
}
}
if start < bytes.len() {
self.0.extend_from_slice(&bytes[start..]);
}
Ok(bytes.len())
}
fn flush(&mut self) -> std::io::Result<()> {
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::string::ToString;
#[test]
fn test_escape() {
assert_eq!(escape("", Html).to_string(), "");
assert_eq!(escape("<&>", Html).to_string(), "&lt;&amp;&gt;");
assert_eq!(escape("bla&", Html).to_string(), "bla&amp;");
assert_eq!(escape("<foo", Html).to_string(), "&lt;foo");
assert_eq!(escape("bla&h", Html).to_string(), "bla&amp;h");
}
}

View File

@@ -1 +1 @@
{"files":{"Cargo.toml":"8eb17651996280a83f998c7c5199e0692e859389e86b33729c67801aff9f03bb","README.md":"69ccc6e378995b9d490d64e23b42ea1d7a9e3232e3dae6fabf1f955786a49931","build.rs":"c8d3c38c1208eea36224662b284d8daf3e7ad1b07d22d750524f3da1cc66ccca","src/errorsupport.udl":"8f8e5711913ffd1b515ec60028529768990df51001e6125d4b83c948b41c4466","src/handling.rs":"6e0568b18d426531cb2ae9967c8dd0d51ece5a065f68b15eeb308b995edaa167","src/lib.rs":"1e41747d06a0d032c9601df85dd6e95001e432ae95a75dcca859355cbadef3b0","src/macros.rs":"0d03f82fab20c96a182f941baf3fcf2a286b00fea871ee7fd8e339abc14f9522","src/redact.rs":"c9a4df1a87be68b15d583587bda941d4c60a1d0449e2d43ff99f3611a290a863","src/reporting.rs":"f4af35d5fb5bf0ebef6dc6595edac6351e1dae2bff989c18810480fae2202168","uniffi.toml":"af91bcd8e7b1fa3f475a5e556979ff23c57b338395e0b65abc1cb1a0ee823e23"},"package":null}
{"files":{"Cargo.toml":"c64762e3ad81f92bf69f4fd17efc4b01b21e75b9bdde8ca94078500b6651c867","README.md":"69ccc6e378995b9d490d64e23b42ea1d7a9e3232e3dae6fabf1f955786a49931","build.rs":"c8d3c38c1208eea36224662b284d8daf3e7ad1b07d22d750524f3da1cc66ccca","src/errorsupport.udl":"8f8e5711913ffd1b515ec60028529768990df51001e6125d4b83c948b41c4466","src/handling.rs":"6e0568b18d426531cb2ae9967c8dd0d51ece5a065f68b15eeb308b995edaa167","src/lib.rs":"1e41747d06a0d032c9601df85dd6e95001e432ae95a75dcca859355cbadef3b0","src/macros.rs":"0d03f82fab20c96a182f941baf3fcf2a286b00fea871ee7fd8e339abc14f9522","src/redact.rs":"c9a4df1a87be68b15d583587bda941d4c60a1d0449e2d43ff99f3611a290a863","src/reporting.rs":"f4af35d5fb5bf0ebef6dc6595edac6351e1dae2bff989c18810480fae2202168","uniffi.toml":"af91bcd8e7b1fa3f475a5e556979ff23c57b338395e0b65abc1cb1a0ee823e23"},"package":null}

View File

@@ -48,11 +48,8 @@ version = "1.4"
version = ">=0.11,<=0.12"
[dependencies.uniffi]
version = "0.28.2"
version = "0.29.0"
[build-dependencies.uniffi]
version = "0.28.2"
version = "0.29.0"
features = ["build"]
[lints.clippy]
empty-line-after-doc-comments = "allow"

File diff suppressed because one or more lines are too long

View File

@@ -11,9 +11,9 @@
[package]
edition = "2021"
rust-version = "1.76"
rust-version = "1.82"
name = "glean-core"
version = "63.1.0"
version = "64.0.1"
authors = [
"Jan-Erik Rediger <jrediger@mozilla.com>",
"The Glean Team <glean-team@mozilla.com>",
@@ -40,7 +40,7 @@ license = "MPL-2.0"
repository = "https://github.com/mozilla/glean"
[package.metadata.glean]
glean-parser = "16.1.0"
glean-parser = "17.0.1"
[lib]
name = "glean_core"
@@ -168,7 +168,7 @@ version = "1.0.4"
version = "0.1.40"
[dependencies.uniffi]
version = "0.28.0"
version = "0.29.0"
default-features = false
[dependencies.uuid]
@@ -193,7 +193,7 @@ version = "0.4"
version = "3.8.0"
[build-dependencies.uniffi]
version = "0.28.0"
version = "0.29.0"
features = ["build"]
default-features = false

View File

@@ -127,7 +127,7 @@ where
/// ping_lifetime_max_time: 2000,
/// };
/// let mut glean = Glean::new(cfg).unwrap();
/// let ping = PingType::new("sample", true, false, true, true, true, vec![], vec![], true);
/// let ping = PingType::new("sample", true, false, true, true, true, vec![], vec![], true, vec![]);
/// glean.register_ping_type(&ping);
///
/// let call_counter: CounterMetric = CounterMetric::new(CommonMetricData {
@@ -277,13 +277,11 @@ impl Glean {
// instantiate the core metrics.
glean.on_upload_enabled();
} else {
// If upload is disabled, and we've never run before, only set the
// client_id to KNOWN_CLIENT_ID, but do not send a deletion request
// ping.
// If we have run before, and if the client_id is not equal to
// the KNOWN_CLIENT_ID, do the full upload disabled operations to
// clear metrics, set the client_id to KNOWN_CLIENT_ID, and send a
// deletion request ping.
// If upload is disabled, then clear the metrics
// but do not send a deletion request ping.
// If we have run before, and we have an old client_id,
// do the full upload disabled operations to clear metrics
// and send a deletion request ping.
match glean
.core_metrics
.client_id
@@ -291,7 +289,17 @@ impl Glean {
{
None => glean.clear_metrics(),
Some(uuid) => {
if uuid != *KNOWN_CLIENT_ID {
if uuid == *KNOWN_CLIENT_ID {
// Previously Glean kept the KNOWN_CLIENT_ID stored.
// Let's ensure we erase it now.
if let Some(data) = glean.data_store.as_ref() {
_ = data.remove_single_metric(
Lifetime::User,
"glean_client_info",
"client_id",
);
}
} else {
// Temporarily enable uploading so we can submit a
// deletion request ping.
glean.upload_enabled = true;
@@ -580,14 +588,6 @@ impl Glean {
// so that it can't be accessed until this function is done.
let _lock = self.upload_manager.clear_ping_queue();
// There is only one metric that we want to survive after clearing all
// metrics: first_run_date. Here, we store its value so we can restore
// it after clearing the metrics.
let existing_first_run_date = self
.core_metrics
.first_run_date
.get_value(self, "glean_client_info");
// Clear any pending pings that follow `collection_enabled`.
let ping_maker = PingMaker::new();
let disabled_pings = self
@@ -605,8 +605,7 @@ impl Glean {
// the effect of resetting those to their initial values.
if let Some(data) = self.data_store.as_ref() {
_ = data.clear_lifetime_storage(Lifetime::User, "glean_internal_info");
_ = data.clear_lifetime_storage(Lifetime::User, "glean_client_info");
_ = data.clear_lifetime_storage(Lifetime::Application, "glean_client_info");
_ = data.remove_single_metric(Lifetime::User, "glean_client_info", "client_id");
for (ping_name, ping) in &self.ping_registry {
if ping.follows_collection_enabled() {
_ = data.clear_ping_lifetime_storage(ping_name);
@@ -623,32 +622,6 @@ impl Glean {
// StorageEngineManager), since doing so would mean we would have to have the
// application tell us again which experiments are active if telemetry is
// re-enabled.
{
// We need to briefly set upload_enabled to true here so that `set`
// is not a no-op. This is safe, since nothing on the Rust side can
// run concurrently to this since we hold a mutable reference to the
// Glean object. Additionally, the pending pings have been cleared
// from disk, so the PingUploader can't wake up and start sending
// pings.
self.upload_enabled = true;
// Store a "dummy" KNOWN_CLIENT_ID in the client_id metric. This will
// make it easier to detect if pings were unintentionally sent after
// uploading is disabled.
self.core_metrics
.client_id
.set_from_uuid_sync(self, *KNOWN_CLIENT_ID);
// Restore the first_run_date.
if let Some(existing_first_run_date) = existing_first_run_date {
self.core_metrics
.first_run_date
.set_sync_chrono(self, existing_first_run_date);
}
self.upload_enabled = false;
}
}
/// Gets the application ID as specified on instantiation.

View File

@@ -826,7 +826,6 @@ impl Database {
data: &BTreeMap<String, Metric>,
) -> Result<()> {
if self.ping_lifetime_threshold == 0 && self.ping_lifetime_max_time.is_zero() {
log::trace!("Auto-flush disabled.");
return Ok(());
}

View File

@@ -108,6 +108,7 @@ dictionary PingRateLimit {
};
// An enum representing the different logging levels for the `log` crate.
[Remote]
enum LevelFilter {
"Off",
"Error",
@@ -214,6 +215,8 @@ dictionary PingRequest {
boolean body_has_info_sections;
// The ping's name. Likely also somewhere in `path`.
string ping_name;
// The capabilities required during this ping's upload.
sequence<string> uploader_capabilities;
};
// An enum representing the possible upload tasks to be performed by an uploader.
@@ -223,6 +226,7 @@ interface PingUploadTask {
//
// * request: the ping request for upload
Upload(PingRequest request);
// A flag signaling that the pending pings directories are not done being processed,
// thus the requester should wait and come back later.
//
@@ -254,6 +258,15 @@ interface UploadResult {
// * unused: _ignored_.
UnrecoverableFailure(i8 unused);
// The uploader is not capable of uploading this request due to lack of or
// mismatched capabilities.
//
// e.g. The ping requires upload over OHTTP,
// but the uploader doesn't support OHTTP.
//
// * unused: _ignored_.
Incapable(i8 unused);
// A HTTP response code.
//
// This can still indicate an error, depending on the status code.
@@ -309,7 +322,8 @@ interface PingType {
boolean enabled,
sequence<string> schedules_pings,
sequence<string> reason_codes,
boolean follows_collection_enabled
boolean follows_collection_enabled,
sequence<string> uploader_capabilities
);
void submit(optional string? reason = null);

View File

@@ -172,6 +172,7 @@ impl UploadMetrics {
Cow::from("status_code_unknown"),
Cow::from("unrecoverable"),
Cow::from("recoverable"),
Cow::from("incapable"),
]),
),

View File

@@ -35,6 +35,7 @@ impl InternalPings {
"inactive".to_string(),
],
true,
vec![],
),
metrics: PingType::new(
"metrics",
@@ -52,6 +53,7 @@ impl InternalPings {
"upgrade".to_string(),
],
true,
vec![],
),
events: PingType::new(
"events",
@@ -67,6 +69,7 @@ impl InternalPings {
"max_capacity".to_string(),
],
true,
vec![],
),
deletion_request: PingType::new(
"deletion-request",
@@ -78,6 +81,7 @@ impl InternalPings {
vec![],
vec!["at_init".to_string(), "set_upload_enabled".to_string()],
true,
vec![],
),
}
}

View File

@@ -2,6 +2,7 @@
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at https://mozilla.org/MPL/2.0/.
#![allow(clippy::doc_overindented_list_items)]
#![allow(clippy::significant_drop_in_scrutinee)]
#![allow(clippy::uninlined_format_args)]
#![deny(rustdoc::broken_intra_doc_links)]
@@ -411,7 +412,7 @@ fn initialize_inner(
// The debug view tag might have been set before initialize,
// get the cached value and set it.
let debug_tag = PRE_INIT_DEBUG_VIEW_TAG.lock().unwrap();
if debug_tag.len() > 0 {
if !debug_tag.is_empty() {
glean.set_debug_view_tag(&debug_tag);
}
@@ -425,7 +426,7 @@ fn initialize_inner(
// The source tags might have been set before initialize,
// get the cached value and set them.
let source_tags = PRE_INIT_SOURCE_TAGS.lock().unwrap();
if source_tags.len() > 0 {
if !source_tags.is_empty() {
glean.set_source_tags(source_tags.to_vec());
}
@@ -1306,31 +1307,19 @@ mod ffi {
type CowString = Cow<'static, str>;
impl UniffiCustomTypeConverter for CowString {
type Builtin = String;
fn into_custom(val: Self::Builtin) -> uniffi::Result<Self> {
Ok(Cow::from(val))
}
fn from_custom(obj: Self) -> Self::Builtin {
obj.into_owned()
}
}
uniffi::custom_type!(CowString, String, {
remote,
lower: |s| s.into_owned(),
try_lift: |s| Ok(Cow::from(s))
});
type JsonValue = serde_json::Value;
impl UniffiCustomTypeConverter for JsonValue {
type Builtin = String;
fn into_custom(val: Self::Builtin) -> uniffi::Result<Self> {
Ok(serde_json::from_str(&val)?)
}
fn from_custom(obj: Self) -> Self::Builtin {
serde_json::to_string(&obj).unwrap()
}
}
uniffi::custom_type!(JsonValue, String, {
remote,
lower: |s| serde_json::to_string(&s).unwrap(),
try_lift: |s| Ok(serde_json::from_str(&s)?)
});
}
pub use ffi::*;

View File

@@ -35,6 +35,7 @@ pub fn new_glean(tempdir: Option<tempfile::TempDir>) -> (Glean, tempfile::TempDi
vec![],
vec![],
true,
vec![],
);
glean.register_ping_type(&ping);
let ping = PingType::new_internal(
@@ -47,6 +48,7 @@ pub fn new_glean(tempdir: Option<tempfile::TempDir>) -> (Glean, tempfile::TempDi
vec![],
vec![],
true,
vec![],
);
glean.register_ping_type(&ping);
(glean, dir)
@@ -348,12 +350,11 @@ fn client_id_is_managed_correctly_when_toggling_uploading() {
glean.set_upload_enabled(false);
assert_eq!(
*KNOWN_CLIENT_ID,
None,
glean
.core_metrics
.client_id
.get_value(&glean, "glean_client_info")
.unwrap()
);
glean.set_upload_enabled(true);
@@ -367,18 +368,17 @@ fn client_id_is_managed_correctly_when_toggling_uploading() {
}
#[test]
fn client_id_is_set_to_known_value_when_uploading_disabled_at_start() {
fn client_id_is_not_set_when_uploading_disabled_at_start() {
let dir = tempfile::tempdir().unwrap();
let tmpname = dir.path().display().to_string();
let glean = Glean::with_options(&tmpname, GLOBAL_APPLICATION_ID, false, true);
assert_eq!(
*KNOWN_CLIENT_ID,
None,
glean
.core_metrics
.client_id
.get_value(&glean, "glean_client_info")
.unwrap()
);
}
@@ -1218,6 +1218,7 @@ fn disabled_pings_are_not_submitted() {
vec![],
vec![],
true,
vec![],
);
glean.register_ping_type(&ping);
@@ -1271,6 +1272,7 @@ fn pings_are_controllable_from_remote_settings_config() {
vec![],
vec![],
true,
vec![],
);
glean.register_ping_type(&disabled_ping);
let enabled_ping = PingType::new(
@@ -1283,6 +1285,7 @@ fn pings_are_controllable_from_remote_settings_config() {
vec![],
vec![],
true,
vec![],
);
glean.register_ping_type(&enabled_ping);

View File

@@ -40,6 +40,9 @@ struct InnerPing {
/// True when it follows the `collection_enabled` flag (aka `upload_enabled`) flag.
/// Otherwise it needs to be enabled through `enabled_pings`.
follows_collection_enabled: AtomicBool,
/// Ordered list of uploader capabilities required to upload this ping.
uploader_capabilities: Vec<String>,
}
impl fmt::Debug for PingType {
@@ -57,6 +60,7 @@ impl fmt::Debug for PingType {
"follows_collection_enabled",
&self.0.follows_collection_enabled.load(Ordering::Relaxed),
)
.field("uploader_capabilities", &self.0.uploader_capabilities)
.finish()
}
}
@@ -79,6 +83,7 @@ impl PingType {
/// * `enabled` - Whether or not this ping is enabled. Note: Data that would be sent on a disabled
/// ping will still be collected but is discarded rather than being submitted.
/// * `reason_codes` - The valid reason codes for this ping.
/// * `uploader_capabilities` - The ordered list of capabilities this ping requires to be uploaded with.
#[allow(clippy::too_many_arguments)]
pub fn new<A: Into<String>>(
name: A,
@@ -90,6 +95,7 @@ impl PingType {
schedules_pings: Vec<String>,
reason_codes: Vec<String>,
follows_collection_enabled: bool,
uploader_capabilities: Vec<String>,
) -> Self {
Self::new_internal(
name,
@@ -101,6 +107,7 @@ impl PingType {
schedules_pings,
reason_codes,
follows_collection_enabled,
uploader_capabilities,
)
}
@@ -115,6 +122,7 @@ impl PingType {
schedules_pings: Vec<String>,
reason_codes: Vec<String>,
follows_collection_enabled: bool,
uploader_capabilities: Vec<String>,
) -> Self {
let this = Self(Arc::new(InnerPing {
name: name.into(),
@@ -126,6 +134,7 @@ impl PingType {
schedules_pings,
reason_codes,
follows_collection_enabled: AtomicBool::new(follows_collection_enabled),
uploader_capabilities,
}));
// Register this ping.
@@ -222,6 +231,11 @@ impl PingType {
&self.0.reason_codes
}
/// The capabilities this ping requires to be uploaded under.
pub fn uploader_capabilities(&self) -> &[String] {
&self.0.uploader_capabilities
}
/// Submits the ping for eventual uploading.
///
/// The ping content is assembled as soon as possible, but upload is not
@@ -343,6 +357,7 @@ impl PingType {
headers: Some(ping.headers),
body_has_info_sections: self.0.include_info_sections,
ping_name: self.0.name.to_string(),
uploader_capabilities: self.0.uploader_capabilities.clone(),
};
glean.upload_manager.enqueue_ping(glean, ping);

View File

@@ -34,6 +34,8 @@ pub struct Ping<'a> {
pub includes_info_sections: bool,
/// Other pings that should be scheduled when this ping is sent.
pub schedules_pings: Vec<String>,
/// Capabilities the uploader must have in order to uplaoad this ping.
pub uploader_capabilities: Vec<String>,
}
/// Collect a ping's data, assemble it into its full payload and store it on disk.
@@ -334,6 +336,7 @@ impl PingMaker {
headers: self.get_headers(glean),
includes_info_sections: ping.include_info_sections(),
schedules_pings: ping.schedules_pings().to_vec(),
uploader_capabilities: ping.uploader_capabilities().to_vec(),
})
}
@@ -392,6 +395,7 @@ impl PingMaker {
headers: Some(ping.headers.clone()),
body_has_info_sections: Some(ping.includes_info_sections),
ping_name: Some(ping.name.to_string()),
uploader_capabilities: Some(ping.uploader_capabilities.clone()),
};
file.write_all(::serde_json::to_string(&metadata)?.as_bytes())?;
}

View File

@@ -30,6 +30,8 @@ pub struct PingPayload {
pub body_has_info_sections: bool,
/// The ping's name. (Also likely in the upload_path.)
pub ping_name: String,
/// The capabilities this ping must be uploaded under.
pub uploader_capabilities: Vec<String>,
}
/// A struct to hold the result of scanning all pings directories.
@@ -86,6 +88,8 @@ pub struct PingMetadata {
pub body_has_info_sections: Option<bool>,
/// The name of the ping.
pub ping_name: Option<String>,
/// The capabilities this ping must be uploaded under.
pub uploader_capabilities: Option<Vec<String>>,
}
/// Processes a ping's metadata.
@@ -196,6 +200,7 @@ impl PingDirectoryManager {
headers,
body_has_info_sections,
ping_name,
uploader_capabilities,
} = metadata
.and_then(|m| process_metadata(&path, &m))
.unwrap_or_default();
@@ -208,6 +213,7 @@ impl PingDirectoryManager {
headers,
body_has_info_sections: body_has_info_sections.unwrap_or(true),
ping_name,
uploader_capabilities: uploader_capabilities.unwrap_or_default(),
});
} else {
log::warn!(
@@ -337,7 +343,18 @@ mod test {
let (mut glean, dir) = new_glean(None);
// Register a ping for testing
let ping_type = PingType::new("test", true, true, true, true, true, vec![], vec![], true);
let ping_type = PingType::new(
"test",
true,
true,
true,
true,
true,
vec![],
vec![],
true,
vec![],
);
glean.register_ping_type(&ping_type);
// Submit the ping to populate the pending_pings directory
@@ -364,7 +381,18 @@ mod test {
let (mut glean, dir) = new_glean(None);
// Register a ping for testing
let ping_type = PingType::new("test", true, true, true, true, true, vec![], vec![], true);
let ping_type = PingType::new(
"test",
true,
true,
true,
true,
true,
vec![],
vec![],
true,
vec![],
);
glean.register_ping_type(&ping_type);
// Submit the ping to populate the pending_pings directory
@@ -400,7 +428,18 @@ mod test {
let (mut glean, dir) = new_glean(None);
// Register a ping for testing
let ping_type = PingType::new("test", true, true, true, true, true, vec![], vec![], true);
let ping_type = PingType::new(
"test",
true,
true,
true,
true,
true,
vec![],
vec![],
true,
vec![],
);
glean.register_ping_type(&ping_type);
// Submit the ping to populate the pending_pings directory

View File

@@ -331,6 +331,7 @@ impl PingUploadManager {
headers,
body_has_info_sections,
ping_name,
uploader_capabilities,
} = ping;
let mut request = PingRequest::builder(
&self.language_binding_name,
@@ -340,7 +341,8 @@ impl PingUploadManager {
.path(path)
.body(body)
.body_has_info_sections(body_has_info_sections)
.ping_name(ping_name);
.ping_name(ping_name)
.uploader_capabilities(uploader_capabilities);
if let Some(headers) = headers {
request = request.headers(headers);
@@ -742,7 +744,7 @@ impl PingUploadManager {
self.directory_manager.delete_file(document_id);
}
UnrecoverableFailure { .. } | HttpStatus { code: 400..=499 } => {
UnrecoverableFailure { .. } | HttpStatus { code: 400..=499 } | Incapable { .. } => {
log::warn!(
"Unrecoverable upload failure while attempting to send ping {}. Error was {:?}",
document_id,
@@ -889,6 +891,7 @@ mod test {
headers: None,
body_has_info_sections: true,
ping_name: "ping-name".into(),
uploader_capabilities: vec![],
},
);
@@ -916,6 +919,7 @@ mod test {
headers: None,
body_has_info_sections: true,
ping_name: "ping-name".into(),
uploader_capabilities: vec![],
},
);
}
@@ -954,6 +958,7 @@ mod test {
headers: None,
body_has_info_sections: true,
ping_name: "ping-name".into(),
uploader_capabilities: vec![],
},
);
}
@@ -974,6 +979,7 @@ mod test {
headers: None,
body_has_info_sections: true,
ping_name: "ping-name".into(),
uploader_capabilities: vec![],
},
);
@@ -1007,6 +1013,7 @@ mod test {
headers: None,
body_has_info_sections: true,
ping_name: "ping-name".into(),
uploader_capabilities: vec![],
},
);
}
@@ -1036,6 +1043,7 @@ mod test {
vec![],
vec![],
true,
vec![],
);
glean.register_ping_type(&ping_type);
@@ -1078,6 +1086,7 @@ mod test {
vec![],
vec![],
true,
vec![],
);
glean.register_ping_type(&ping_type);
@@ -1118,6 +1127,7 @@ mod test {
vec![],
vec![],
true,
vec![],
);
glean.register_ping_type(&ping_type);
@@ -1158,6 +1168,7 @@ mod test {
vec![],
vec![],
true,
vec![],
);
glean.register_ping_type(&ping_type);
@@ -1198,6 +1209,7 @@ mod test {
vec![],
vec![],
true,
vec![],
);
glean.register_ping_type(&ping_type);
@@ -1240,6 +1252,7 @@ mod test {
vec![],
vec![],
true,
vec![],
);
glean.register_ping_type(&ping_type);
@@ -1290,6 +1303,7 @@ mod test {
headers: None,
body_has_info_sections: true,
ping_name: "test-ping".into(),
uploader_capabilities: vec![],
},
);
@@ -1310,6 +1324,7 @@ mod test {
headers: None,
body_has_info_sections: true,
ping_name: "test-ping".into(),
uploader_capabilities: vec![],
},
);
@@ -1358,6 +1373,7 @@ mod test {
vec![],
vec![],
true,
vec![],
);
glean.register_ping_type(&ping_type);
@@ -1394,6 +1410,7 @@ mod test {
headers: None,
body_has_info_sections: true,
ping_name: "test-ping".into(),
uploader_capabilities: vec![],
},
);
upload_manager.enqueue_ping(
@@ -1405,6 +1422,7 @@ mod test {
headers: None,
body_has_info_sections: true,
ping_name: "test-ping".into(),
uploader_capabilities: vec![],
},
);
@@ -1434,6 +1452,7 @@ mod test {
vec![],
vec![],
true,
vec![],
);
glean.register_ping_type(&ping_type);
@@ -1494,6 +1513,7 @@ mod test {
vec![],
vec![],
true,
vec![],
);
glean.register_ping_type(&ping_type);
@@ -1575,6 +1595,7 @@ mod test {
vec![],
vec![],
true,
vec![],
);
glean.register_ping_type(&ping_type);
@@ -1657,6 +1678,7 @@ mod test {
vec![],
vec![],
true,
vec![],
);
glean.register_ping_type(&ping_type);
@@ -1741,6 +1763,7 @@ mod test {
vec![],
vec![],
true,
vec![],
);
glean.register_ping_type(&ping_type);
@@ -1841,6 +1864,7 @@ mod test {
headers: None,
body_has_info_sections: true,
ping_name: "ping-name".into(),
uploader_capabilities: vec![],
},
);
upload_manager.enqueue_ping(
@@ -1852,6 +1876,7 @@ mod test {
headers: None,
body_has_info_sections: true,
ping_name: "ping-name".into(),
uploader_capabilities: vec![],
},
);
@@ -1917,6 +1942,7 @@ mod test {
headers: None,
body_has_info_sections: true,
ping_name: "ping-name".into(),
uploader_capabilities: vec![],
};
upload_manager.enqueue_ping(&glean, ping);
assert!(upload_manager.get_upload_task(&glean, false).is_upload());
@@ -1929,6 +1955,7 @@ mod test {
headers: None,
body_has_info_sections: true,
ping_name: "ping-name".into(),
uploader_capabilities: vec![],
};
upload_manager.enqueue_ping(&glean, ping);

View File

@@ -64,6 +64,7 @@ pub struct Builder {
body_max_size: usize,
body_has_info_sections: Option<bool>,
ping_name: Option<String>,
uploader_capabilities: Option<Vec<String>>,
}
impl Builder {
@@ -91,6 +92,7 @@ impl Builder {
body_max_size,
body_has_info_sections: None,
ping_name: None,
uploader_capabilities: None,
}
}
@@ -166,6 +168,12 @@ impl Builder {
self
}
/// Sets the required uploader capabilities.
pub fn uploader_capabilities(mut self, uploader_capabilities: Vec<String>) -> Self {
self.uploader_capabilities = Some(uploader_capabilities);
self
}
/// Consumes the builder and create a PingRequest.
///
/// # Panics
@@ -196,6 +204,9 @@ impl Builder {
ping_name: self
.ping_name
.expect("ping_name must be set before attempting to build PingRequest"),
uploader_capabilities: self
.uploader_capabilities
.expect("uploader_capabilities must be set before attempting to build PingRequest"),
})
}
}
@@ -218,6 +229,8 @@ pub struct PingRequest {
pub body_has_info_sections: bool,
/// The ping's name. Likely also somewhere in `path`.
pub ping_name: String,
/// The capabilities required during this ping's upload.
pub uploader_capabilities: Vec<String>,
}
impl PingRequest {
@@ -280,6 +293,7 @@ mod test {
.body("{}")
.body_has_info_sections(false)
.ping_name("whatevs")
.uploader_capabilities(vec![])
.build()
.unwrap();

View File

@@ -25,6 +25,16 @@ pub enum UploadResult {
unused: i8,
},
/// The uploader is not capable of uploading this request due to lack of or
/// mismatched capabilities.
///
/// e.g. The ping requires upload over OHTTP, but the uploader doesn't support OHTTP.
Incapable {
#[doc(hidden)]
/// Unused field. Required because UniFFI can't handle variants without fields.
unused: i8,
},
/// A HTTP response code.
///
/// This can still indicate an error, depending on the status code.
@@ -55,6 +65,7 @@ impl UploadResult {
UploadResult::HttpStatus { .. } => Some("status_code_unknown"),
UploadResult::UnrecoverableFailure { .. } => Some("unrecoverable"),
UploadResult::RecoverableFailure { .. } => Some("recoverable"),
UploadResult::Incapable { .. } => Some("incapable"),
UploadResult::Done { .. } => None,
}
}
@@ -75,6 +86,14 @@ impl UploadResult {
Self::UnrecoverableFailure { unused: 0 }
}
/// The uploader is not capable of uploading this request due to lack of or
/// mismatched capabilities.
///
/// e.g. The ping requires upload over OHTTP, but the uploader doesn't support OHTTP.
pub fn incapable() -> Self {
Self::Incapable { unused: 0 }
}
/// A HTTP response code.
///
/// This can still indicate an error, depending on the status code.

View File

@@ -12,33 +12,18 @@ use glean_core::Lifetime;
fn nofollows_ping(glean: &mut Glean) -> PingType {
// When `follows_collection_enabled=false` then by default `enabled=false`
let ping = PingType::new(
"nofollows",
/* include_client_id */ false,
/* send_if_empty */ true,
/* precise_timestamps */ true,
/* include_info_sections */ false,
/* enabled */ false,
vec![],
vec![],
/* follows_collection_enabled */ false,
);
let ping = PingBuilder::new("nofollows")
.with_send_if_empty(true)
.with_include_info_sections(false)
.with_enabled(false)
.with_follows_collection_enabled(false)
.build();
glean.register_ping_type(&ping);
ping
}
fn manual_ping(glean: &mut Glean) -> PingType {
let ping = PingType::new(
"manual",
/* include_client_id */ true,
/* send_if_empty */ false,
/* precise_timestamps */ true,
/* include_info_sections */ true,
/* enabled */ true,
vec![],
vec![],
/* collection_enabled */ true,
);
let ping = PingBuilder::new("manual").build();
glean.register_ping_type(&ping);
ping
}
@@ -104,17 +89,9 @@ fn nofollows_ping_can_ride_along() {
let nofollows_ping = nofollows_ping(&mut glean);
// Basically `manual_ping` but with a ride-along
let manual_ping = PingType::new(
"manual",
/* include_client_id */ true,
/* send_if_empty */ false,
/* precise_timestamps */ true,
/* include_info_sections */ true,
/* enabled */ true,
vec!["nofollows".to_string()],
vec![],
/* collection_enabled */ true,
);
let manual_ping = PingBuilder::new("manual")
.with_schedules_pings(vec!["nofollows".to_string()])
.build();
glean.register_ping_type(&manual_ping);
// We need to store a metric as an empty ping is not stored.

View File

@@ -78,11 +78,86 @@ pub fn new_glean(tempdir: Option<tempfile::TempDir>) -> (Glean, tempfile::TempDi
}
pub fn new_test_ping(glean: &mut Glean, name: &str) -> PingType {
let ping = PingType::new(name, true, false, true, true, true, vec![], vec![], true);
let ping = PingBuilder::new(name).build();
glean.register_ping_type(&ping);
ping
}
pub struct PingBuilder {
name: String,
include_client_id: bool,
send_if_empty: bool,
precise_timestamps: bool,
include_info_sections: bool,
enabled: bool,
schedules_pings: Vec<String>,
reason_codes: Vec<String>,
follows_collection_enabled: bool,
uploader_capabilities: Vec<String>,
}
impl PingBuilder {
pub fn new(name: &str) -> Self {
Self {
name: name.to_string(),
include_client_id: true,
send_if_empty: false,
precise_timestamps: true,
include_info_sections: true,
enabled: true,
schedules_pings: vec![],
reason_codes: vec![],
follows_collection_enabled: true,
uploader_capabilities: vec![],
}
}
pub fn build(self) -> PingType {
PingType::new(
self.name,
self.include_client_id,
self.send_if_empty,
self.precise_timestamps,
self.include_info_sections,
self.enabled,
self.schedules_pings,
self.reason_codes,
self.follows_collection_enabled,
self.uploader_capabilities,
)
}
pub fn with_send_if_empty(mut self, value: bool) -> Self {
self.send_if_empty = value;
self
}
pub fn with_include_info_sections(mut self, value: bool) -> Self {
self.include_info_sections = value;
self
}
pub fn with_enabled(mut self, value: bool) -> Self {
self.enabled = value;
self
}
pub fn with_follows_collection_enabled(mut self, value: bool) -> Self {
self.follows_collection_enabled = value;
self
}
pub fn with_schedules_pings(mut self, value: Vec<String>) -> Self {
self.schedules_pings = value;
self
}
pub fn with_reasons(mut self, value: Vec<String>) -> Self {
self.reason_codes = value;
self
}
}
/// Converts an iso8601::DateTime to a chrono::DateTime<FixedOffset>
pub fn iso8601_to_chrono(datetime: &iso8601::DateTime) -> chrono::DateTime<chrono::FixedOffset> {
if let YMD { year, month, day } = datetime.date {

View File

@@ -163,17 +163,11 @@ fn test_sending_of_event_ping_when_it_fills_up() {
let store_names: Vec<String> = vec!["events".into()];
for store_name in &store_names {
glean.register_ping_type(&PingType::new(
store_name.clone(),
true,
false,
true,
true,
true,
vec![],
vec!["max_capacity".to_string()],
true,
));
glean.register_ping_type(
&PingBuilder::new(store_name)
.with_reasons(vec!["max_capacity".to_string()])
.build(),
);
}
let click = EventMetric::new(
@@ -231,17 +225,11 @@ fn test_server_knobs_config_changing_max_events() {
let store_names: Vec<String> = vec!["events".into()];
for store_name in &store_names {
glean.register_ping_type(&PingType::new(
store_name.clone(),
true,
false,
true,
true,
true,
vec![],
vec!["max_capacity".to_string()],
true,
));
glean.register_ping_type(
&PingBuilder::new(store_name)
.with_reasons(vec!["max_capacity".to_string()])
.build(),
);
}
// 1. Set up an event to record
@@ -513,17 +501,7 @@ fn event_storage_trimming() {
let new_ping = |glean: &mut Glean, ping: &str| {
// In Rust, pings are registered via construction.
// But that's done asynchronously, so we do it synchronously here:
glean.register_ping_type(&PingType::new(
ping.to_string(),
true,
false,
true,
true,
true,
vec![],
vec![],
true,
));
glean.register_ping_type(&PingBuilder::new(ping).build());
};
// First, register both pings, so that we can record the event in the two pings.
@@ -578,17 +556,7 @@ fn with_event_timestamps() {
ping_lifetime_max_time: 0,
};
let mut glean = Glean::new(cfg).unwrap();
let ping = PingType::new(
"store1",
true,
false,
true,
true,
true,
vec![],
vec![],
true,
);
let ping = PingBuilder::new("store1").build();
glean.register_ping_type(&ping);
let store_name = "store1";

View File

@@ -104,29 +104,11 @@ fn deletion_request_only_when_toggled_from_on_to_off() {
fn empty_pings_with_flag_are_sent() {
let (mut glean, _t) = new_glean(None);
let ping1 = PingType::new(
"custom-ping1",
true,
true,
true,
true,
true,
vec![],
vec![],
true,
);
let ping1 = PingBuilder::new("custom-ping1")
.with_send_if_empty(true)
.build();
glean.register_ping_type(&ping1);
let ping2 = PingType::new(
"custom-ping2",
true,
false,
true,
true,
true,
vec![],
vec![],
true,
);
let ping2 = PingBuilder::new("custom-ping2").build();
glean.register_ping_type(&ping2);
// No data is stored in either of the custom pings
@@ -163,7 +145,7 @@ fn test_pings_submitted_metric() {
let metrics_ping = new_test_ping(&mut glean, "metrics");
let baseline_ping = new_test_ping(&mut glean, "baseline");
let custom_ping = PingType::new("custom", true, true, true, true, true, vec![], vec![], true);
let custom_ping = PingBuilder::new("custom").with_send_if_empty(true).build();
glean.register_ping_type(&custom_ping);
// We need to store a metric as an empty ping is not stored.
@@ -296,30 +278,15 @@ fn events_ping_with_metric_but_no_events_is_not_sent() {
fn test_scheduled_pings_are_sent() {
let (mut glean, _t) = new_glean(None);
let piggyback_ping = PingType::new(
"piggyback",
true,
true,
true,
true,
true,
vec![],
vec![],
true,
);
let piggyback_ping = PingBuilder::new("piggyback")
.with_send_if_empty(true)
.build();
glean.register_ping_type(&piggyback_ping);
let trigger_ping = PingType::new(
"trigger",
true,
true,
true,
true,
true,
vec!["piggyback".into()],
vec![],
true,
);
let trigger_ping = PingBuilder::new("trigger")
.with_send_if_empty(true)
.with_schedules_pings(vec!["piggyback".into()])
.build();
glean.register_ping_type(&trigger_ping);
assert!(trigger_ping.submit_sync(&glean, None));

View File

@@ -97,17 +97,7 @@ fn test_metrics_must_report_experimentation_id() {
})
.unwrap();
let ping_maker = PingMaker::new();
let ping_type = PingType::new(
"store1",
true,
false,
true,
true,
true,
vec![],
vec![],
true,
);
let ping_type = PingBuilder::new("store1").build();
glean.register_ping_type(&ping_type);
// Record something, so the ping will have data
@@ -164,17 +154,7 @@ fn experimentation_id_is_removed_if_send_if_empty_is_false() {
.unwrap();
let ping_maker = PingMaker::new();
let unknown_ping_type = PingType::new(
"unknown",
true,
false,
true,
true,
true,
vec![],
vec![],
true,
);
let unknown_ping_type = PingBuilder::new("unknown").build();
glean.register_ping_type(&unknown_ping_type);
assert!(ping_maker
@@ -190,17 +170,7 @@ fn collect_must_report_none_when_no_data_is_stored() {
let (mut glean, ping_maker, ping_type, _t) = set_up_basic_ping();
let unknown_ping_type = PingType::new(
"unknown",
true,
false,
true,
true,
true,
vec![],
vec![],
true,
);
let unknown_ping_type = PingBuilder::new("unknown").build();
glean.register_ping_type(&ping_type);
assert!(ping_maker
@@ -224,17 +194,7 @@ fn seq_number_must_be_sequential() {
for i in 0..=1 {
for ping_name in ["store1", "store2"].iter() {
let ping_type = PingType::new(
*ping_name,
true,
false,
true,
true,
true,
vec![],
vec![],
true,
);
let ping_type = PingBuilder::new(ping_name).build();
let ping = ping_maker
.collect(&glean, &ping_type, None, "", "")
.unwrap();
@@ -319,7 +279,7 @@ fn no_pings_submitted_if_upload_disabled() {
// Regression test, bug 1603571
let (mut glean, _t) = new_glean(None);
let ping_type = PingType::new("store1", true, true, true, true, true, vec![], vec![], true);
let ping_type = PingBuilder::new("store1").with_send_if_empty(true).build();
glean.register_ping_type(&ping_type);
assert!(ping_type.submit_sync(&glean, None));
@@ -337,7 +297,7 @@ fn no_pings_submitted_if_upload_disabled() {
fn metadata_is_correctly_added_when_necessary() {
let (mut glean, _t) = new_glean(None);
glean.set_debug_view_tag("valid-tag");
let ping_type = PingType::new("store1", true, true, true, true, true, vec![], vec![], true);
let ping_type = PingBuilder::new("store1").with_send_if_empty(true).build();
glean.register_ping_type(&ping_type);
assert!(ping_type.submit_sync(&glean, None));

View File

@@ -1 +1 @@
{"files":{"Cargo.lock":"d5243e925c951fc394126291886afc070712380f2faa93a68c3f571c7fccc105","Cargo.toml":"f86642423b6bb7faed3108bdf55e4add8e33ccb98dda8a15ede988db2b8fa974","LICENSE":"1f256ecad192880510e84ad60474eab7589218784b9a50bc7ceee34c2b91f1d5","README.md":"5627cc81e6187ab6c2b4dff061af16d559edcab64ba786bac39daa69c703c595","src/common_test.rs":"997f5331b719f82d86bb5e2f8e711da9cfb9433403e233c04a0ff39c3de5f7d0","src/configuration.rs":"de65ab99a26b4547be20803bc195cb50a6ab40b1a3f49a2e6230fed5a9d7a8d8","src/core_metrics.rs":"fef8fb4e5fa57c179836c6eb2cf59278fe3b8b036dbe57b0ff02971b4acd822f","src/lib.rs":"97d7d8001e091bd009e579ccb296a2355f523e5608e81e0485b3492347b40989","src/net/http_uploader.rs":"01ad5bd91384411a12c74434cd1c5cd585078cb34faba4615c70bdb669a9bccb","src/net/mod.rs":"5dff006240a6522e1db988514f22fb9361b3dece0c22fcf9eb8ff1b3308dd8f0","src/private/event.rs":"f299c79e4e2acb657f06004f3038bd8909e287719458566bc7f96262d8665e62","src/private/mod.rs":"66e90c41de74d1e80c5d3f49b8f1a86b8396be0b8c4a80f1a28903fe6d105ecf","src/private/object.rs":"7f17a7a658e8f7aa19a6bedf70f60f3f42713316d5d60298d682bb045caaafb7","src/private/ping.rs":"3b126183d4a5fdc200a9ded45c9a656d7d1e4c44e0d7e1c22f1b0e6968b07630","src/system.rs":"6eae5b41c15eba9cad6dbd116abe3519ee3e1fe034e79bdd692b029829a8c384","src/test.rs":"7c5f67bdce46bdb14b77cde0b716c2c2e0ab831e6c01b2e417c348c562289cac","tests/common/mod.rs":"68b0fca253f5c773cdb54d10a02d324d7c74ed5e16d4ba96387e4b643af2c0f3","tests/custom_distribution_buffered.rs":"47c13d1f39adf3881e10caa19e0c08235f08958809e234bf37a79d37d7322cd5","tests/init_fails.rs":"073b8c244ecbcae8e9cfc12cffd0629038bd978a4a4337073dbed6866023317b","tests/interruptible_shutdown.rs":"17b674c5960f3787ba0c51dc54f0c3759403427ad819985ad85f254e261002ab","tests/memory_distribution_buffered.rs":"db487475a5cf17a0864ccf150984ebdd28bf616573772cf678246cc1bdbcbc0f","tests/metric_metadata.rs":"05c947d3decf0a3281378dbb108080a05319ad8f130af5b07f9b049b80e5f04f","tests/never_init.rs":"fcbba9034f829eef0f54ff650f6442ad75cdd609bdd02f45472fd4456f8e3a66","tests/no_time_to_init.rs":"0a2027de97188a82f97ba6a45c75c740917eea4e1f4bd4b947b6da3da7c354ed","tests/overflowing_preinit.rs":"985e140460a100986fd051ce901b787a3a7a9747a856cd06066b740ac7d2381c","tests/persist_ping_lifetime_nopanic.rs":"18379d3ffbf4a2c8c684c04ff7a0660b86dfbbb447db2d24dfed6073cb7ddf8f","tests/schema.rs":"da8f808f7cfd42b0cefd5dd04ca87d514392476ba268a32c140d3293c9332caf","tests/simple.rs":"4991afdbd037e789af2325fb87dc4a1e0fbbfa63aa54f1f22dc8bf01190473c7","tests/test-delayed-ping-data.sh":"4a6db98b4df6b77898ace6a8b4e8b4c60d3e5c44873bbf38c62e83583e27a3ff","tests/test-enabled-pings.sh":"06656e38f63e65475006b107dd6bd179b0cbaa1fad1470de38e679e91a9315a3","tests/test-pending-gets-removed.sh":"e335f2f00fa97a61b6d94e0005fb3b9de8c8db8076111a67ca47d85392039ea9","tests/test-ping-lifetime-flush.sh":"e8f118ea2f6fd973809e38d5e828a03cfccfe0b0f497ccde5ec92d6d1380c071","tests/test-shutdown-blocking.sh":"a44d8d4bbe2ee3ede9e48121150ae7a5386025160c5cef2181ca142232c5fb27","tests/test-thread-crashing.sh":"f3cd0cc8a7b4fe82bef0fe6fbfbbe45fbad6da3afe0f82578bc5cfb2d6527ac6","tests/timing_distribution_buffered.rs":"501f7289c0c28f0ab83838c88b058999b19436d0f2b693be0787513d7b67e06d","tests/timing_distribution_single_sample.rs":"4f9498b6ef29913da0356027efe5f572c81d2f426e8538c068b54a1cfa33c1b8","tests/upload_timing.rs":"8b9ed65eaba3d51faf3cb62d1280d2737f234e0332615bfe6d9c60aab44b6560"},"package":"e2afa6754943cac5243099efd0d26e89cc8e06f1585776ba14ab0c6ee99e1f71"}
{"files":{"Cargo.lock":"61c7d6620026e84a7bb067305d92ffb8c51a59261c339d3ef6a8406d03bbfd92","Cargo.toml":"bec966c61ac0670d367556c54f9156aad7b2cd72dce2d09c783fdf421f49c563","LICENSE":"1f256ecad192880510e84ad60474eab7589218784b9a50bc7ceee34c2b91f1d5","README.md":"5627cc81e6187ab6c2b4dff061af16d559edcab64ba786bac39daa69c703c595","src/common_test.rs":"c86cccfb7da1506cfed29cb2ee13d839b7ac7cffdfd70793c9665bb44e0b684f","src/configuration.rs":"de65ab99a26b4547be20803bc195cb50a6ab40b1a3f49a2e6230fed5a9d7a8d8","src/core_metrics.rs":"fef8fb4e5fa57c179836c6eb2cf59278fe3b8b036dbe57b0ff02971b4acd822f","src/lib.rs":"61b56a35c2bc6cd60bba2225b399881512d4b9a7d8cadca7fbed37ee6959d74c","src/net/http_uploader.rs":"0a94ac3cd87cb021529dee46d537765ab8d923e0f4ac7615225e878d3739e6dc","src/net/mod.rs":"09ba010b03d045fd8a2ccbe4f205c5275bb622bceb34cb81a0aa8f7d33804e2e","src/private/event.rs":"f299c79e4e2acb657f06004f3038bd8909e287719458566bc7f96262d8665e62","src/private/mod.rs":"66e90c41de74d1e80c5d3f49b8f1a86b8396be0b8c4a80f1a28903fe6d105ecf","src/private/object.rs":"7f17a7a658e8f7aa19a6bedf70f60f3f42713316d5d60298d682bb045caaafb7","src/private/ping.rs":"d2fb45e9e178ff6b17aa9c1b5258dfcd2ed91a2b43b44dec826de256ef5e8520","src/system.rs":"d602804a72258bfd65e51c571946631732ee27d81342d8aa406e47fdd241bbfa","src/test.rs":"bfbea9416dfdc96ebc1f9af5005b5b23f2285b74ef82c74cdab11635322ea3e3","tests/collection_enabled.rs":"3327a949dbdeec493d661261abda68ffa71acc50ab24cba4fde5302749e6f16b","tests/collection_enabled_bin.rs":"d3a6458b84012a447e5cb792f2292a06951ed252fad803b9166b437bacba542c","tests/common/mod.rs":"2fd391c5eb45f56fdfa3261dd631406c67ed36b10b0d5432febe2483da5c9d89","tests/custom_distribution_buffered.rs":"47c13d1f39adf3881e10caa19e0c08235f08958809e234bf37a79d37d7322cd5","tests/init_fails.rs":"ca7fa1b3dd6a21a9e005b7a4f0a18664c4bceb952dd463db8316500f72280d5b","tests/interruptible_shutdown.rs":"3d954bbe47d4f5fd103c51a4ff99f151662143c25c826da9734a00cd215909b9","tests/memory_distribution_buffered.rs":"db487475a5cf17a0864ccf150984ebdd28bf616573772cf678246cc1bdbcbc0f","tests/metric_metadata.rs":"05c947d3decf0a3281378dbb108080a05319ad8f130af5b07f9b049b80e5f04f","tests/near-empty-c0ffee-db.safe.bin":"89afb3bb8fc94430fb0ed0fe55f85f3f8bcc8fd0fed69a9df13cc560294ec9f5","tests/never_init.rs":"51fff5618f6603bc0945d70131698d10a1c6275f43bbc22a2de5807f8a79229f","tests/no_time_to_init.rs":"2ede23df6618ff1cb5ae3b7bbf95900ad0fd92072afa2e0319bf147b4f75cefc","tests/overflowing_preinit.rs":"985e140460a100986fd051ce901b787a3a7a9747a856cd06066b740ac7d2381c","tests/persist_ping_lifetime_nopanic.rs":"18379d3ffbf4a2c8c684c04ff7a0660b86dfbbb447db2d24dfed6073cb7ddf8f","tests/schema.rs":"23b49005402b914e55a0c5c155f30c2662c609f79be78d1385ec25b3600b3547","tests/simple.rs":"15c76a1b5a336fd6abfbdebafc971f5c6a9b75107ddbca65f0031cde3e2886da","tests/test-delayed-ping-data.sh":"4a6db98b4df6b77898ace6a8b4e8b4c60d3e5c44873bbf38c62e83583e27a3ff","tests/test-enabled-pings.sh":"06656e38f63e65475006b107dd6bd179b0cbaa1fad1470de38e679e91a9315a3","tests/test-pending-gets-removed.sh":"e335f2f00fa97a61b6d94e0005fb3b9de8c8db8076111a67ca47d85392039ea9","tests/test-ping-lifetime-flush.sh":"e8f118ea2f6fd973809e38d5e828a03cfccfe0b0f497ccde5ec92d6d1380c071","tests/test-shutdown-blocking.sh":"a44d8d4bbe2ee3ede9e48121150ae7a5386025160c5cef2181ca142232c5fb27","tests/test-thread-crashing.sh":"f3cd0cc8a7b4fe82bef0fe6fbfbbe45fbad6da3afe0f82578bc5cfb2d6527ac6","tests/timing_distribution_buffered.rs":"501f7289c0c28f0ab83838c88b058999b19436d0f2b693be0787513d7b67e06d","tests/timing_distribution_single_sample.rs":"4f9498b6ef29913da0356027efe5f572c81d2f426e8538c068b54a1cfa33c1b8","tests/upload_timing.rs":"b3b9db197bc2ec41556388969a6bf289e7ef19e05b9019bc2bd98c823fcf6ea3","tests/uploader_capabilities.rs":"347f19e534a50a211ea179d6818631270d1b4ec468098e6b6abcde1e4a6a9bca"},"package":"251b9cb685554b96dcf785dba69ce90447006dd6d9229db783336c981c3777e1"}

323
third_party/rust/glean/Cargo.lock generated vendored
View File

@@ -28,9 +28,9 @@ dependencies = [
[[package]]
name = "anyhow"
version = "1.0.71"
version = "1.0.95"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8"
checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04"
[[package]]
name = "arrayref"
@@ -38,49 +38,17 @@ version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4c527152e37cf757a3f78aae5a06fbeefdb07ccc535c980a3208ee3060dd544"
[[package]]
name = "askama"
version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "47cbc3cf73fa8d9833727bbee4835ba5c421a0d65b72daf9a7b5d0e0f9cfb57e"
dependencies = [
"askama_derive",
"askama_escape",
]
[[package]]
name = "askama_derive"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c22fbe0413545c098358e56966ff22cdd039e10215ae213cfbd65032b119fc94"
dependencies = [
"basic-toml",
"mime",
"mime_guess",
"nom",
"proc-macro2",
"quote",
"serde",
"syn",
]
[[package]]
name = "askama_escape"
version = "0.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "619743e34b5ba4e9703bba34deac3427c72507c7159f5fd030aea8cac0cfe341"
[[package]]
name = "autocfg"
version = "1.1.0"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26"
[[package]]
name = "basic-toml"
version = "0.1.2"
version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c0de75129aa8d0cceaf750b89013f0e08804d6ec61416da787b35ad0d7cddf1"
checksum = "823388e228f614e9558c6804262db37960ec8821856535f5c3f59913140558f8"
dependencies = [
"serde",
]
@@ -117,41 +85,15 @@ checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610"
[[package]]
name = "bytes"
version = "1.3.0"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dfb24e866b15a1af2a1b663f10c6b6b8f397a84aadb828f12e5b289ec23a3a3c"
checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b"
[[package]]
name = "camino"
version = "1.1.4"
version = "1.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c530edf18f37068ac2d977409ed5cd50d53d73bc653c7647b48eb78976ac9ae2"
dependencies = [
"serde",
]
[[package]]
name = "cargo-platform"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cbdb825da8a5df079a43676dbe042702f1707b1109f713a01420fbb4cc71fa27"
dependencies = [
"serde",
]
[[package]]
name = "cargo_metadata"
version = "0.15.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "982a0cf6a99c350d7246035613882e376d58cebe571785abc5da4f648d53ac0a"
dependencies = [
"camino",
"cargo-platform",
"semver",
"serde",
"serde_json",
"thiserror",
]
checksum = "8b96ec4966b5813e2c0507c1f86115c8c5abaadc3980879c3424042a02fd1ad3"
[[package]]
name = "cc"
@@ -280,9 +222,12 @@ dependencies = [
[[package]]
name = "fs-err"
version = "2.9.0"
version = "2.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0845fa252299212f0389d64ba26f34fa32cfe41588355f21ed507c59a0f64541"
checksum = "88a41f105fe1d5b6b34b2055e3dc59bb79b46b48b2040b9e6c7b4b5de097aa41"
dependencies = [
"autocfg",
]
[[package]]
name = "getrandom"
@@ -297,7 +242,7 @@ dependencies = [
[[package]]
name = "glean"
version = "63.1.0"
version = "64.0.1"
dependencies = [
"crossbeam-channel",
"env_logger",
@@ -315,9 +260,9 @@ dependencies = [
[[package]]
name = "glean-core"
version = "63.1.0"
version = "64.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53cd53bb7a3b89b17d3989e95dd808b137ff47c504d1d19f14cb0d820cc2f42e"
checksum = "a49d1d62648ddeed8cb996373046ea45de93f1d1ff956aba054b9304bc305753"
dependencies = [
"android_logger",
"bincode",
@@ -339,15 +284,15 @@ dependencies = [
[[package]]
name = "glob"
version = "0.3.0"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574"
checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2"
[[package]]
name = "goblin"
version = "0.8.0"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb07a4ffed2093b118a525b1d8f5204ae274faed5604537caf7135d0f18d9887"
checksum = "1b363a30c165f666402fe6a3024d3bec7ebc898f96a4a23bd1c99f8dbf3f4f47"
dependencies = [
"log",
"plain",
@@ -419,9 +364,9 @@ dependencies = [
[[package]]
name = "itoa"
version = "1.0.4"
version = "1.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc"
checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674"
[[package]]
name = "json-pointer"
@@ -470,27 +415,27 @@ checksum = "da2479e8c062e40bf0066ffa0bc823de0a9368974af99c9f6df941d2c231e03f"
[[package]]
name = "log"
version = "0.4.20"
version = "0.4.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f"
checksum = "04cbf5b083de1c7e0222a7a51dbfdba1cbe1c6ab0b15e29fff3f6c077fd9cd9f"
[[package]]
name = "memchr"
version = "2.5.0"
version = "2.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
[[package]]
name = "mime"
version = "0.3.16"
version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d"
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
[[package]]
name = "mime_guess"
version = "2.0.4"
version = "2.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4192263c238a5f0d0c6bfd21f336a313a4ce1c450542449ca191bb657b4642ef"
checksum = "f7c44f8e672c00fe5308fa235f821cb4198414e1c77935c1ab6948d3fd78550e"
dependencies = [
"mime",
"unicase",
@@ -513,9 +458,9 @@ dependencies = [
[[package]]
name = "nom"
version = "7.1.1"
version = "7.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8903e5a29a317527874d0402f867152a3d21c908bb0b933e416c65e301d4c36"
checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
dependencies = [
"memchr",
"minimal-lexical",
@@ -552,9 +497,9 @@ dependencies = [
[[package]]
name = "once_cell"
version = "1.18.0"
version = "1.20.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775"
[[package]]
name = "ordered-float"
@@ -578,9 +523,9 @@ dependencies = [
[[package]]
name = "paste"
version = "1.0.10"
version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf1c2c742266c2f1041c914ba65355a83ae8747b05f208319784083583494b4b"
checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
[[package]]
name = "percent-encoding"
@@ -596,18 +541,18 @@ checksum = "b4596b6d070b27117e987119b4dac604f3c58cfb0b191112e24771b2faeac1a6"
[[package]]
name = "proc-macro2"
version = "1.0.66"
version = "1.0.93"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9"
checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.31"
version = "1.0.38"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5fe8a65d69dd0808184ebb5f836ab526bb259db23c657efa38711b1072ee47f0"
checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc"
dependencies = [
"proc-macro2",
]
@@ -636,6 +581,45 @@ version = "0.6.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244"
[[package]]
name = "rinja"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3dc4940d00595430b3d7d5a01f6222b5e5b51395d1120bdb28d854bb8abb17a5"
dependencies = [
"itoa",
"rinja_derive",
]
[[package]]
name = "rinja_derive"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08d9ed0146aef6e2825f1b1515f074510549efba38d71f4554eec32eb36ba18b"
dependencies = [
"basic-toml",
"memchr",
"mime",
"mime_guess",
"proc-macro2",
"quote",
"rinja_parser",
"rustc-hash",
"serde",
"syn",
]
[[package]]
name = "rinja_parser"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93f9a866e2e00a7a1fb27e46e9e324a6f7c0e7edc4543cae1d38f4e4a100c610"
dependencies = [
"memchr",
"nom",
"serde",
]
[[package]]
name = "rkv"
version = "0.19.0"
@@ -658,6 +642,12 @@ dependencies = [
"uuid",
]
[[package]]
name = "rustc-hash"
version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d"
[[package]]
name = "rustix"
version = "0.38.20"
@@ -673,9 +663,9 @@ dependencies = [
[[package]]
name = "ryu"
version = "1.0.11"
version = "1.0.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09"
checksum = "6ea1a2d0a644769cc99faa24c3ad26b379b786fe7c36fd3c546254801650e6dd"
[[package]]
name = "scroll"
@@ -697,29 +687,20 @@ dependencies = [
"syn",
]
[[package]]
name = "semver"
version = "1.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e25dfac463d778e353db5be2449d1cce89bd6fd23c9f1ea21310ce6e5a1b29c4"
dependencies = [
"serde",
]
[[package]]
name = "serde"
version = "1.0.179"
version = "1.0.217"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0a5bf42b8d227d4abf38a1ddb08602e229108a517cd4e5bb28f9c7eaafdce5c0"
checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.179"
version = "1.0.217"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "741e124f5485c7e60c03b043f79f320bff3527f4bbf12cf3831750dc46a0ec2c"
checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0"
dependencies = [
"proc-macro2",
"quote",
@@ -728,20 +709,21 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.89"
version = "1.0.138"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "020ff22c755c2ed3f8cf162dbb41a7268d934702f3ed3631656ea597e08fc3db"
checksum = "d434192e7da787e94a6ea7e9670b26a036d0ca41e0b7efb2676dd32bae872949"
dependencies = [
"itoa",
"memchr",
"ryu",
"serde",
]
[[package]]
name = "siphasher"
version = "0.3.10"
version = "0.3.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7bd3e3206899af3f8b12af284fafc038cc1dc2b41d1b89dd17297221c5d225de"
checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d"
[[package]]
name = "smawk"
@@ -757,9 +739,9 @@ checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
[[package]]
name = "syn"
version = "2.0.26"
version = "2.0.98"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "45c3457aacde3c65315de5031ec191ce46604304d2446e803d71ade03308d970"
checksum = "36147f1a48ae0ec2b5b3bc5b537d267457555a10dc06f3dbc8cb11ba3006d3b1"
dependencies = [
"proc-macro2",
"quote",
@@ -781,27 +763,27 @@ dependencies = [
[[package]]
name = "textwrap"
version = "0.16.0"
version = "0.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d"
checksum = "23d434d3f8967a09480fb04132ebe0a3e088c173e6d0ee7897abbdf4eab0f8b9"
dependencies = [
"smawk",
]
[[package]]
name = "thiserror"
version = "1.0.40"
version = "1.0.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac"
checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.40"
version = "1.0.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f"
checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1"
dependencies = [
"proc-macro2",
"quote",
@@ -836,21 +818,18 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c"
[[package]]
name = "toml"
version = "0.5.10"
version = "0.5.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1333c76748e868a4d9d1017b5ab53171dfd095f70c712fdb4653a406547f598f"
checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234"
dependencies = [
"serde",
]
[[package]]
name = "unicase"
version = "2.6.0"
version = "2.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6"
dependencies = [
"version_check",
]
checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539"
[[package]]
name = "unicode-bidi"
@@ -860,9 +839,9 @@ checksum = "099b7128301d285f79ddd55b9a83d5e6b9e97c92e0ea0daebee7263e932de992"
[[package]]
name = "unicode-ident"
version = "1.0.9"
version = "1.0.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b15811caf2415fb889178633e7724bad2509101cde276048e013b9def5e51fa0"
checksum = "a210d160f08b701c8721ba1c726c11662f877ea6b7094007e1ca9a1041945034"
[[package]]
name = "unicode-normalization"
@@ -875,9 +854,9 @@ dependencies = [
[[package]]
name = "uniffi"
version = "0.28.1"
version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2db87def739fe4183947f8419d572d1849a4a09355eba4e988a2105cfd0ac6a7"
checksum = "ba62a57e90f9baed5ad02a71a0870180fa1cc35499093b2d21be2edfb68ec0f7"
dependencies = [
"anyhow",
"uniffi_build",
@@ -887,12 +866,11 @@ dependencies = [
[[package]]
name = "uniffi_bindgen"
version = "0.28.1"
version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a112599c9556d1581e4a3d72019a74c2c3e122cc27f4af12577a429c4d5e614"
checksum = "2242f35214f1e0e3b47c495d340c69f649f9a9ece3a943a29e275686cc884533"
dependencies = [
"anyhow",
"askama",
"camino",
"fs-err",
"glob",
@@ -900,6 +878,7 @@ dependencies = [
"heck",
"once_cell",
"paste",
"rinja",
"serde",
"textwrap",
"toml",
@@ -909,47 +888,44 @@ dependencies = [
[[package]]
name = "uniffi_build"
version = "0.28.1"
version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2b12684401d2a8508ca9c72a95bbc45906417e42fc80942abaf033bbf01aa33"
checksum = "c887a6c9a2857d8dc2ab0c8d578e8aa4978145b4fd65ed44296341e89aebc3cc"
dependencies = [
"anyhow",
"camino",
"uniffi_bindgen",
]
[[package]]
name = "uniffi_checksum_derive"
version = "0.28.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a22dbe67c1c957ac6e7611bdf605a6218aa86b0eebeb8be58b70ae85ad7d73dc"
dependencies = [
"quote",
"syn",
]
[[package]]
name = "uniffi_core"
version = "0.28.1"
version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a0c35aaad30e3a9e6d4fe34e358d64dbc92ee09045b48591b05fc9f12e0905b"
checksum = "cad9fbdeb7ae4daf8d0f7704a3b638c37018eb16bb701e30fa17a2dd3e2d39c1"
dependencies = [
"anyhow",
"bytes",
"camino",
"log",
"once_cell",
"paste",
"static_assertions",
]
[[package]]
name = "uniffi_macros"
version = "0.28.1"
name = "uniffi_internal_macros"
version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "db66474c5c61b0f7afc3b4995fecf9b72b340daa5ca0ef3da7778d75eb5482ea"
checksum = "22a9dba1d78b9ce429439891089c223478043d52a1c3176a0fcea2b5573a7fcf"
dependencies = [
"quote",
"syn",
]
[[package]]
name = "uniffi_macros"
version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78dd5f8eefba5898b901086f5e7916da67b9a5286a01cc44e910cd75fa37c630"
dependencies = [
"bincode",
"camino",
"fs-err",
"once_cell",
@@ -963,39 +939,24 @@ dependencies = [
[[package]]
name = "uniffi_meta"
version = "0.28.1"
version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d898893f102e0e39b8bcb7e3d2188f4156ba280db32db9e8af1f122d057e9526"
checksum = "9d5965b1d4ffacef1eaa72fef9c00d2491641e87ad910f6c5859b9c503ddb16a"
dependencies = [
"anyhow",
"bytes",
"siphasher",
"uniffi_checksum_derive",
]
[[package]]
name = "uniffi_testing"
version = "0.28.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c6aa4f0cf9d12172d84fc00a35a6c1f3522b526daad05ae739f709f6941b9b6"
dependencies = [
"anyhow",
"camino",
"cargo_metadata",
"fs-err",
"once_cell",
"uniffi_internal_macros",
]
[[package]]
name = "uniffi_udl"
version = "0.28.1"
version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6b044e9c519e0bb51e516ab6f6d8f4f4dcf900ce30d5ad07c03f924e2824f28e"
checksum = "279b82bac9a382c796a0d210bb8354a0b813499b28aa1de046c85d78ca389805"
dependencies = [
"anyhow",
"textwrap",
"uniffi_meta",
"uniffi_testing",
"weedle2",
]
@@ -1019,12 +980,6 @@ dependencies = [
"getrandom",
]
[[package]]
name = "version_check"
version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]]
name = "wasi"
version = "0.10.0+wasi-snapshot-preview1"

View File

@@ -11,9 +11,9 @@
[package]
edition = "2021"
rust-version = "1.76"
rust-version = "1.82"
name = "glean"
version = "63.1.0"
version = "64.0.1"
authors = [
"Jan-Erik Rediger <jrediger@mozilla.com>",
"The Glean Team <glean-team@mozilla.com>",
@@ -43,6 +43,14 @@ repository = "https://github.com/mozilla/glean"
name = "glean"
path = "src/lib.rs"
[[test]]
name = "collection_enabled"
path = "tests/collection_enabled.rs"
[[test]]
name = "collection_enabled_bin"
path = "tests/collection_enabled_bin.rs"
[[test]]
name = "custom_distribution_buffered"
path = "tests/custom_distribution_buffered.rs"
@@ -99,11 +107,15 @@ path = "tests/timing_distribution_single_sample.rs"
name = "upload_timing"
path = "tests/upload_timing.rs"
[[test]]
name = "uploader_capabilities"
path = "tests/uploader_capabilities.rs"
[dependencies.crossbeam-channel]
version = "0.5"
[dependencies.glean-core]
version = "63.1.0"
version = "64.0.1"
[dependencies.inherent]
version = "1"

View File

@@ -46,7 +46,18 @@ pub(crate) fn new_glean(
.build(),
};
_ = PingType::new("store1", true, true, true, true, true, vec![], vec![], true);
_ = PingType::new(
"store1",
true,
true,
true,
true,
true,
vec![],
vec![],
true,
vec![],
);
crate::test_reset_glean(cfg, ClientInfoMetrics::unknown(), clear_stores);
dir

View File

@@ -23,7 +23,7 @@
//! let cfg = ConfigurationBuilder::new(true, "/tmp/data", "org.mozilla.glean_core.example").build();
//! glean::initialize(cfg, ClientInfoMetrics::unknown());
//!
//! let prototype_ping = PingType::new("prototype", true, true, true, true, true, vec!(), vec!(), true);
//! let prototype_ping = PingType::new("prototype", true, true, true, true, true, vec!(), vec!(), true, vec![]);
//!
//! prototype_ping.submit(None);
//! ```

View File

@@ -2,7 +2,7 @@
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at https://mozilla.org/MPL/2.0/.
use crate::net::{PingUploadRequest, PingUploader, UploadResult};
use crate::net::{CapablePingUploadRequest, PingUploader, UploadResult};
/// A simple mechanism to upload pings over HTTPS.
#[derive(Debug)]
@@ -14,7 +14,8 @@ impl PingUploader for HttpUploader {
/// # Arguments
///
/// * `upload_request` - the requested upload.
fn upload(&self, upload_request: PingUploadRequest) -> UploadResult {
fn upload(&self, upload_request: CapablePingUploadRequest) -> UploadResult {
let upload_request = upload_request.capable(|_| true).unwrap();
log::debug!("TODO bug 1675468: submitting to {:?}", upload_request.url);
UploadResult::http_status(200)
}

View File

@@ -34,6 +34,26 @@ pub struct PingUploadRequest {
pub ping_name: String,
}
/// A PingUploadRequest requiring proof of uploader capability.
pub struct CapablePingUploadRequest {
request: PingUploadRequest,
capabilities: Vec<String>,
}
impl CapablePingUploadRequest {
/// If you are capable of satisfying this ping upload request's capabilities,
/// obtain the PingUploadRequest.
pub fn capable<F>(self, func: F) -> Option<PingUploadRequest>
where
F: FnOnce(Vec<String>) -> bool,
{
if func(self.capabilities) {
return Some(self.request);
}
None
}
}
/// A description of a component used to upload pings.
pub trait PingUploader: std::fmt::Debug + Send + Sync {
/// Uploads a ping to a server.
@@ -44,7 +64,7 @@ pub trait PingUploader: std::fmt::Debug + Send + Sync {
/// * `body` - the serialized text data to send.
/// * `headers` - a vector of tuples containing the headers to send with
/// the request, i.e. (Name, Value).
fn upload(&self, upload_request: PingUploadRequest) -> UploadResult;
fn upload(&self, upload_request: CapablePingUploadRequest) -> UploadResult;
}
/// The logic for uploading pings: this leaves the actual upload mechanism as
@@ -132,6 +152,10 @@ impl UploadManager {
body_has_info_sections: request.body_has_info_sections,
ping_name: request.ping_name,
};
let upload_request = CapablePingUploadRequest {
request: upload_request,
capabilities: request.uploader_capabilities,
};
let result = inner.uploader.upload(upload_request);
// Process the upload response.
match glean_core::glean_process_ping_upload_response(doc_id, result) {

View File

@@ -34,6 +34,7 @@ impl PingType {
/// * `schedules_pings` - A list of pings which are triggered for submission when this ping is
/// submitted.
/// * `reason_codes` - The valid reason codes for this ping.
/// * `uploader_capabilities` - The capabilities required during this ping's upload.
#[allow(clippy::too_many_arguments)]
pub fn new<A: Into<String>>(
name: A,
@@ -45,6 +46,7 @@ impl PingType {
schedules_pings: Vec<String>,
reason_codes: Vec<String>,
follows_collection_enabled: bool,
uploader_capabilities: Vec<String>,
) -> Self {
let inner = glean_core::metrics::PingType::new(
name.into(),
@@ -56,6 +58,7 @@ impl PingType {
schedules_pings,
reason_codes,
follows_collection_enabled,
uploader_capabilities,
);
Self {

View File

@@ -46,11 +46,36 @@ pub const ARCH: &str = "x86";
/// `target_arch` when building this crate: `x86_64`
pub const ARCH: &str = "x86_64";
#[cfg(target_arch = "powerpc64")]
/// `target_arch` when building this crate: `powerpc64`
pub const ARCH: &str = "powerpc64";
#[cfg(target_arch = "riscv64")]
/// `target_arch` when building this crate: `riscv64`
pub const ARCH: &str = "riscv64";
#[cfg(target_arch = "mips")]
/// `target_arch` when building this crate: `mips`
pub const ARCH: &str = "mips";
#[cfg(target_arch = "loongarch64")]
/// `target_arch` when building this crate: `loongarch64`
pub const ARCH: &str = "loongarch64";
#[cfg(target_arch = "s390x")]
/// `target_arch` when building this crate: `s390x`
pub const ARCH: &str = "s390x";
#[cfg(not(any(
target_arch = "aarch64",
target_arch = "arm",
target_arch = "x86",
target_arch = "x86_64"
target_arch = "x86_64",
target_arch = "powerpc64",
target_arch = "riscv64",
target_arch = "mips",
target_arch = "loongarch64",
target_arch = "s390x",
)))]
/// `target_arch` when building this crate: unknown!
pub const ARCH: &str = "Unknown";

View File

@@ -19,7 +19,18 @@ use super::*;
use crate::common_test::{lock_test, new_glean, GLOBAL_APPLICATION_ID};
fn new_test_ping(name: &str) -> PingType {
PingType::new(name, true, true, true, true, true, vec![], vec![], true)
PingType::new(
name,
true,
true,
true,
true,
true,
vec![],
vec![],
true,
vec![],
)
}
#[test]
@@ -34,7 +45,8 @@ fn send_a_ping() {
sender: crossbeam_channel::Sender<net::PingUploadRequest>,
}
impl net::PingUploader for FakeUploader {
fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
let upload_request = upload_request.capable(|_| true).unwrap();
self.sender.send(upload_request).unwrap();
net::UploadResult::http_status(200)
}
@@ -75,7 +87,8 @@ fn send_a_ping_without_info_sections() {
sender: crossbeam_channel::Sender<net::PingUploadRequest>,
}
impl net::PingUploader for FakeUploader {
fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
let upload_request = upload_request.capable(|_| true).unwrap();
self.sender.send(upload_request).unwrap();
net::UploadResult::http_status(200)
}
@@ -104,6 +117,7 @@ fn send_a_ping_without_info_sections() {
vec![],
vec![],
true,
vec![],
);
custom_ping.submit(None);
@@ -240,7 +254,8 @@ fn sending_of_foreground_background_pings() {
sender: crossbeam_channel::Sender<String>,
}
impl net::PingUploader for FakeUploader {
fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
let upload_request = upload_request.capable(|_| true).unwrap();
self.sender.send(upload_request.url).unwrap();
net::UploadResult::http_status(200)
}
@@ -308,7 +323,8 @@ fn sending_of_startup_baseline_ping() {
sender: crossbeam_channel::Sender<String>,
}
impl net::PingUploader for FakeUploader {
fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
let upload_request = upload_request.capable(|_| true).unwrap();
self.sender.send(upload_request.url).unwrap();
net::UploadResult::http_status(200)
}
@@ -355,7 +371,8 @@ fn no_dirty_baseline_on_clean_shutdowns() {
sender: crossbeam_channel::Sender<String>,
}
impl net::PingUploader for FakeUploader {
fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
let upload_request = upload_request.capable(|_| true).unwrap();
self.sender.send(upload_request.url).unwrap();
net::UploadResult::http_status(200)
}
@@ -578,8 +595,9 @@ fn ping_collection_must_happen_after_concurrently_scheduled_metrics_recordings()
sender: crossbeam_channel::Sender<(String, JsonValue)>,
}
impl net::PingUploader for FakeUploader {
fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
let net::PingUploadRequest { body, url, .. } = upload_request;
fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
let net::PingUploadRequest { body, url, .. } =
upload_request.capable(|_| true).unwrap();
// Decode the gzipped body.
let mut gzip_decoder = GzDecoder::new(&body[..]);
let mut s = String::with_capacity(body.len());
@@ -663,9 +681,8 @@ fn basic_metrics_should_be_cleared_when_disabling_uploading() {
assert_eq!("TEST VALUE", metric.test_get_value(None).unwrap());
}
// TODO: Should probably move into glean-core.
#[test]
fn core_metrics_should_be_cleared_and_restored_when_disabling_and_enabling_uploading() {
fn core_metrics_are_not_cleared_when_disabling_and_enabling_uploading() {
let _lock = lock_test();
let dir = tempfile::tempdir().unwrap();
@@ -691,12 +708,13 @@ fn core_metrics_should_be_cleared_and_restored_when_disabling_and_enabling_uploa
});
assert!(os_version.test_get_value(None).is_some());
let initial_value = os_version.test_get_value(None).unwrap();
set_upload_enabled(false);
assert!(os_version.test_get_value(None).is_none());
assert_eq!(initial_value, os_version.test_get_value(None).unwrap());
set_upload_enabled(true);
assert!(os_version.test_get_value(None).is_some());
assert_eq!(initial_value, os_version.test_get_value(None).unwrap());
}
#[test]
@@ -712,7 +730,8 @@ fn sending_deletion_ping_if_disabled_outside_of_run() {
sender: crossbeam_channel::Sender<String>,
}
impl net::PingUploader for FakeUploader {
fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
let upload_request = upload_request.capable(|_| true).unwrap();
self.sender.send(upload_request.url).unwrap();
net::UploadResult::http_status(200)
}
@@ -757,7 +776,8 @@ fn no_sending_of_deletion_ping_if_unchanged_outside_of_run() {
sender: crossbeam_channel::Sender<String>,
}
impl net::PingUploader for FakeUploader {
fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
let upload_request = upload_request.capable(|_| true).unwrap();
self.sender.send(upload_request.url).unwrap();
net::UploadResult::http_status(200)
}
@@ -800,7 +820,8 @@ fn deletion_request_ping_contains_experimentation_id() {
sender: crossbeam_channel::Sender<JsonValue>,
}
impl net::PingUploader for FakeUploader {
fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
let upload_request = upload_request.capable(|_| true).unwrap();
let body = upload_request.body;
let mut gzip_decoder = GzDecoder::new(&body[..]);
let mut body_str = String::with_capacity(body.len());
@@ -864,8 +885,9 @@ fn test_sending_of_startup_baseline_ping_with_application_lifetime_metric() {
sender: crossbeam_channel::Sender<(String, JsonValue)>,
}
impl net::PingUploader for FakeUploader {
fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
let net::PingUploadRequest { url, body, .. } = upload_request;
fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
let net::PingUploadRequest { url, body, .. } =
upload_request.capable(|_| true).unwrap();
// Decode the gzipped body.
let mut gzip_decoder = GzDecoder::new(&body[..]);
let mut s = String::with_capacity(body.len());
@@ -945,7 +967,8 @@ fn setting_debug_view_tag_before_initialization_should_not_crash() {
sender: crossbeam_channel::Sender<Vec<(String, String)>>,
}
impl net::PingUploader for FakeUploader {
fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
let upload_request = upload_request.capable(|_| true).unwrap();
self.sender.send(upload_request.headers).unwrap();
net::UploadResult::http_status(200)
}
@@ -991,7 +1014,8 @@ fn setting_source_tags_before_initialization_should_not_crash() {
sender: crossbeam_channel::Sender<Vec<(String, String)>>,
}
impl net::PingUploader for FakeUploader {
fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
let upload_request = upload_request.capable(|_| true).unwrap();
self.sender.send(upload_request.headers).unwrap();
net::UploadResult::http_status(200)
}
@@ -1036,7 +1060,8 @@ fn setting_source_tags_after_initialization_should_not_crash() {
sender: crossbeam_channel::Sender<Vec<(String, String)>>,
}
impl net::PingUploader for FakeUploader {
fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
let upload_request = upload_request.capable(|_| true).unwrap();
self.sender.send(upload_request.headers).unwrap();
net::UploadResult::http_status(200)
}
@@ -1095,7 +1120,8 @@ fn flipping_upload_enabled_respects_order_of_events() {
sender: crossbeam_channel::Sender<String>,
}
impl net::PingUploader for FakeUploader {
fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
let upload_request = upload_request.capable(|_| true).unwrap();
self.sender.send(upload_request.url).unwrap();
net::UploadResult::http_status(200)
}
@@ -1148,7 +1174,8 @@ fn registering_pings_before_init_must_work() {
sender: crossbeam_channel::Sender<String>,
}
impl net::PingUploader for FakeUploader {
fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
let upload_request = upload_request.capable(|_| true).unwrap();
self.sender.send(upload_request.url).unwrap();
net::UploadResult::http_status(200)
}
@@ -1189,7 +1216,8 @@ fn test_a_ping_before_submission() {
sender: crossbeam_channel::Sender<String>,
}
impl net::PingUploader for FakeUploader {
fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
let upload_request = upload_request.capable(|_| true).unwrap();
self.sender.send(upload_request.url).unwrap();
net::UploadResult::http_status(200)
}
@@ -1291,7 +1319,7 @@ fn signaling_done() {
counter: Arc<Mutex<HashMap<ThreadId, u32>>>,
}
impl net::PingUploader for FakeUploader {
fn upload(&self, _upload_request: net::PingUploadRequest) -> net::UploadResult {
fn upload(&self, _upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
let mut map = self.counter.lock().unwrap();
*map.entry(thread::current().id()).or_insert(0) += 1;
@@ -1363,7 +1391,8 @@ fn configure_ping_throttling() {
done: Arc<std::sync::atomic::AtomicBool>,
}
impl net::PingUploader for FakeUploader {
fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
let upload_request = upload_request.capable(|_| true).unwrap();
if self.done.load(std::sync::atomic::Ordering::SeqCst) {
// If we've outlived the test, just lie.
return net::UploadResult::http_status(200);
@@ -1438,7 +1467,8 @@ fn pings_ride_along_builtin_pings() {
sender: crossbeam_channel::Sender<String>,
}
impl net::PingUploader for FakeUploader {
fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
let upload_request = upload_request.capable(|_| true).unwrap();
self.sender.send(upload_request.url).unwrap();
net::UploadResult::http_status(200)
}

View File

@@ -0,0 +1,171 @@
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at https://mozilla.org/MPL/2.0/.
//! This integration test should model how the RLB is used when embedded in another Rust application
//! (e.g. FOG/Firefox Desktop).
//!
//! We write a single test scenario per file to avoid any state keeping across runs
//! (different files run as different processes).
mod common;
use std::io::Read;
use crossbeam_channel::bounded;
use crossbeam_channel::Sender;
use crossbeam_channel::TryRecvError;
use flate2::read::GzDecoder;
use glean::net;
use glean::ClientInfoMetrics;
use glean::ConfigurationBuilder;
use pings::nofollows;
use serde_json::Value as JsonValue;
mod pings {
use super::*;
use glean::private::PingType;
use once_cell::sync::Lazy;
#[allow(non_upper_case_globals)]
pub static nofollows: Lazy<PingType> = Lazy::new(|| {
common::PingBuilder::new("nofollows")
.with_send_if_empty(true)
.with_include_info_sections(true) // WITH info sections
.with_enabled(false)
.with_follows_collection_enabled(false)
.with_include_client_id(true)
.build()
});
#[allow(non_upper_case_globals)]
pub static manual: Lazy<PingType> = Lazy::new(|| {
common::PingBuilder::new("manual")
.with_send_if_empty(true)
.build()
});
}
// Define a fake uploader that reports when and what it uploads.
#[derive(Debug)]
struct ReportingUploader {
sender: Sender<JsonValue>,
}
impl net::PingUploader for ReportingUploader {
fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
let upload_request = upload_request.capable(|_| true).unwrap();
let body = upload_request.body;
let decode = |body: Vec<u8>| {
let mut gzip_decoder = GzDecoder::new(&body[..]);
let mut s = String::with_capacity(body.len());
gzip_decoder
.read_to_string(&mut s)
.ok()
.map(|_| &s[..])
.or_else(|| std::str::from_utf8(&body).ok())
.and_then(|payload| serde_json::from_str(payload).ok())
.unwrap()
};
self.sender.send(decode(body)).unwrap();
net::UploadResult::http_status(200)
}
}
/// Test scenario:
///
/// * Glean has _some_ data already stored.
/// * Glean is started with collection-enabled=false.
/// * Most data is cleared, but not `client_info`` (except `client_id`)
/// * Pings with `follows_collection_enabled=false` still have the `client_info` filled in.
#[test]
fn nofollows_contains_client_info_when_collection_disabled() {
common::enable_test_logging();
// Create a custom configuration to use our reporting uploader.
let dir = tempfile::tempdir().unwrap();
let tmpname = dir.path().to_path_buf();
// collection-enabled = true
// Forces database to be created with data, then clears data.
// Keeps `first_run_date`.
// Ensures the _next_ init is NOT a first-run.
let cfg = ConfigurationBuilder::new(true, tmpname.clone(), "glean-fc")
.with_server_endpoint("invalid-test-host")
.with_use_core_mps(false)
.build();
common::initialize(cfg);
glean::set_upload_enabled(false);
glean::shutdown();
// collection-enabled = false
let (tx, rx) = bounded(1);
let cfg = ConfigurationBuilder::new(false, tmpname.clone(), "glean-fc")
.with_server_endpoint("invalid-test-host")
.with_use_core_mps(false)
.with_uploader(ReportingUploader { sender: tx })
.build();
// Same as `common::initialize`.
let client_info = ClientInfoMetrics {
app_build: "1.0.0".to_string(),
app_display_version: "1.0.0".to_string(),
channel: Some("testing".to_string()),
locale: Some("xx-XX".to_string()),
};
glean::test_reset_glean(cfg, client_info, false);
_ = &*pings::nofollows;
_ = &*pings::manual;
nofollows.set_enabled(true);
pings::manual.submit(None);
pings::nofollows.submit(None);
// Wait for the ping to arrive.
let payload = rx.recv().unwrap();
let client_info = payload["client_info"].as_object().unwrap();
// General client info is set
assert!(client_info["app_build"].is_string());
assert!(client_info["architecture"].is_string());
assert!(client_info["os"].is_string());
assert!(client_info["telemetry_sdk_build"].is_string());
// No client_id
assert_eq!(None, client_info.get("client_id"));
// No second ping received.
assert!(matches!(rx.try_recv(), Err(TryRecvError::Empty)));
// Now we enable collection.
// This should give us a client ID.
glean::set_collection_enabled(true);
pings::manual.submit(None);
let payload = rx.recv().unwrap();
let client_info = payload["client_info"].as_object().unwrap();
// General client info is set
assert!(client_info["app_build"].is_string());
assert!(client_info["architecture"].is_string());
assert!(client_info["os"].is_string());
assert!(client_info["telemetry_sdk_build"].is_string());
// No client_id
let client_id = client_info["client_id"].as_str().unwrap();
pings::nofollows.submit(None);
let payload = rx.recv().unwrap();
let client_info = payload["client_info"].as_object().unwrap();
// General client info is set
assert!(client_info["app_build"].is_string());
assert!(client_info["architecture"].is_string());
assert!(client_info["os"].is_string());
assert!(client_info["telemetry_sdk_build"].is_string());
// No client_id
let nf_client_id = client_info["client_id"].as_str().unwrap();
assert_eq!(client_id, nf_client_id);
glean::shutdown();
}

View File

@@ -0,0 +1,165 @@
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at https://mozilla.org/MPL/2.0/.
//! This integration test should model how the RLB is used when embedded in another Rust application
//! (e.g. FOG/Firefox Desktop).
//!
//! We write a single test scenario per file to avoid any state keeping across runs
//! (different files run as different processes).
mod common;
use std::fs;
use std::io::Read;
use crossbeam_channel::bounded;
use crossbeam_channel::Sender;
use crossbeam_channel::TryRecvError;
use flate2::read::GzDecoder;
use glean::net;
use glean::ClientInfoMetrics;
use glean::ConfigurationBuilder;
use pings::nofollows;
use serde_json::Value as JsonValue;
mod pings {
use super::*;
use glean::private::PingType;
use once_cell::sync::Lazy;
#[allow(non_upper_case_globals)]
pub static nofollows: Lazy<PingType> = Lazy::new(|| {
common::PingBuilder::new("nofollows")
.with_send_if_empty(true)
.with_include_info_sections(true) // WITH info sections
.with_enabled(false)
.with_follows_collection_enabled(false)
.with_include_client_id(true)
.build()
});
#[allow(non_upper_case_globals)]
pub static manual: Lazy<PingType> = Lazy::new(|| {
common::PingBuilder::new("manual")
.with_send_if_empty(true)
.build()
});
}
// Define a fake uploader that reports when and what it uploads.
#[derive(Debug)]
struct ReportingUploader {
sender: Sender<JsonValue>,
}
impl net::PingUploader for ReportingUploader {
fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
let upload_request = upload_request.capable(|_| true).unwrap();
let body = upload_request.body;
let decode = |body: Vec<u8>| {
let mut gzip_decoder = GzDecoder::new(&body[..]);
let mut s = String::with_capacity(body.len());
gzip_decoder
.read_to_string(&mut s)
.ok()
.map(|_| &s[..])
.or_else(|| std::str::from_utf8(&body).ok())
.and_then(|payload| serde_json::from_str(payload).ok())
.unwrap()
};
self.sender.send(decode(body)).unwrap();
net::UploadResult::http_status(200)
}
}
/// Test scenario:
///
/// * Glean has _some_ data already stored.
/// * Glean is started with collection-enabled=false.
/// * Most data is cleared, but not `client_info`` (except `client_id`)
/// * Pings with `follows_collection_enabled=false` still have the `client_info` filled in.
#[test]
fn nofollows_contains_client_info_when_collection_disabled() {
common::enable_test_logging();
// Create a custom configuration to use our reporting uploader.
let dir = tempfile::tempdir().unwrap();
let tmpname = dir.path().to_path_buf();
let db_dir = tmpname.join("db");
fs::create_dir_all(&db_dir).unwrap();
let db_path = db_dir.join("data.safe.bin");
fs::write(db_path, include_bytes!("./near-empty-c0ffee-db.safe.bin")).unwrap();
// collection-enabled = false
let (tx, rx) = bounded(1);
let cfg = ConfigurationBuilder::new(false, tmpname.clone(), "glean-fc")
.with_server_endpoint("invalid-test-host")
.with_use_core_mps(false)
.with_uploader(ReportingUploader { sender: tx })
.build();
// Same as `common::initialize`.
let client_info = ClientInfoMetrics {
app_build: "1.0.0".to_string(),
app_display_version: "1.0.0".to_string(),
channel: Some("testing".to_string()),
locale: Some("xx-XX".to_string()),
};
glean::test_reset_glean(cfg, client_info, false);
_ = &*pings::nofollows;
_ = &*pings::manual;
nofollows.set_enabled(true);
pings::manual.submit(None);
pings::nofollows.submit(None);
// Wait for the ping to arrive.
let payload = rx.recv().unwrap();
let client_info = payload["client_info"].as_object().unwrap();
// General client info is set
assert!(client_info["app_build"].is_string());
assert!(client_info["architecture"].is_string());
assert!(client_info["os"].is_string());
assert!(client_info["telemetry_sdk_build"].is_string());
// No client_id
assert_eq!(None, client_info.get("client_id"));
// No second ping received.
assert!(matches!(rx.try_recv(), Err(TryRecvError::Empty)));
// Now we enable collection.
// This should give us a client ID.
glean::set_collection_enabled(true);
pings::manual.submit(None);
let payload = rx.recv().unwrap();
let client_info = payload["client_info"].as_object().unwrap();
// General client info is set
assert!(client_info["app_build"].is_string());
assert!(client_info["architecture"].is_string());
assert!(client_info["os"].is_string());
assert!(client_info["telemetry_sdk_build"].is_string());
// No client_id
let client_id = client_info["client_id"].as_str().unwrap();
pings::nofollows.submit(None);
let payload = rx.recv().unwrap();
let client_info = payload["client_info"].as_object().unwrap();
// General client info is set
assert!(client_info["app_build"].is_string());
assert!(client_info["architecture"].is_string());
assert!(client_info["os"].is_string());
assert!(client_info["telemetry_sdk_build"].is_string());
// No client_id
let nf_client_id = client_info["client_id"].as_str().unwrap();
assert_eq!(client_id, nf_client_id);
glean::shutdown();
}

View File

@@ -48,6 +48,81 @@ pub fn initialize(cfg: Configuration) {
locale: Some("xx-XX".to_string()),
};
_ = PingType::new("store1", true, true, true, true, true, vec![], vec![], true);
_ = PingBuilder::new("store1").with_send_if_empty(true).build();
glean::initialize(cfg, client_info);
}
pub struct PingBuilder {
name: String,
include_client_id: bool,
send_if_empty: bool,
precise_timestamps: bool,
include_info_sections: bool,
enabled: bool,
schedules_pings: Vec<String>,
reason_codes: Vec<String>,
follows_collection_enabled: bool,
uploader_capabilities: Vec<String>,
}
impl PingBuilder {
pub fn new(name: &str) -> Self {
Self {
name: name.to_string(),
include_client_id: true,
send_if_empty: false,
precise_timestamps: true,
include_info_sections: true,
enabled: true,
schedules_pings: vec![],
reason_codes: vec![],
follows_collection_enabled: true,
uploader_capabilities: vec![],
}
}
pub fn build(self) -> PingType {
PingType::new(
self.name,
self.include_client_id,
self.send_if_empty,
self.precise_timestamps,
self.include_info_sections,
self.enabled,
self.schedules_pings,
self.reason_codes,
self.follows_collection_enabled,
self.uploader_capabilities,
)
}
pub fn with_send_if_empty(mut self, value: bool) -> Self {
self.send_if_empty = value;
self
}
pub fn with_uploader_capabilities(mut self, value: Vec<String>) -> Self {
self.uploader_capabilities = value;
self
}
pub fn with_include_info_sections(mut self, value: bool) -> Self {
self.include_info_sections = value;
self
}
pub fn with_enabled(mut self, value: bool) -> Self {
self.enabled = value;
self
}
pub fn with_follows_collection_enabled(mut self, value: bool) -> Self {
self.follows_collection_enabled = value;
self
}
pub fn with_include_client_id(mut self, value: bool) -> Self {
self.include_client_id = value;
self
}
}

View File

@@ -38,22 +38,15 @@ mod metrics {
}
mod pings {
use super::*;
use glean::private::PingType;
use once_cell::sync::Lazy;
#[allow(non_upper_case_globals)]
pub static validation: Lazy<PingType> = Lazy::new(|| {
glean::private::PingType::new(
"validation",
true,
true,
true,
true,
true,
vec![],
vec![],
true,
)
common::PingBuilder::new("validation")
.with_send_if_empty(true)
.build()
});
}

View File

@@ -39,22 +39,15 @@ mod metrics {
}
mod pings {
use super::*;
use glean::private::PingType;
use once_cell::sync::Lazy;
#[allow(non_upper_case_globals)]
pub static validation: Lazy<PingType> = Lazy::new(|| {
glean::private::PingType::new(
"validation",
true,
true,
true,
true,
true,
vec![],
vec![],
true,
)
common::PingBuilder::new("validation")
.with_send_if_empty(true)
.build()
});
}
@@ -66,7 +59,8 @@ struct ReportingUploader {
}
impl net::PingUploader for ReportingUploader {
fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
let upload_request = upload_request.capable(|_| true).unwrap();
let calls = self.calls.fetch_add(1, Ordering::SeqCst);
let body = upload_request.body;
let decode = |body: Vec<u8>| {

Binary file not shown.

View File

@@ -34,22 +34,15 @@ mod metrics {
}
mod pings {
use super::*;
use glean::private::PingType;
use once_cell::sync::Lazy;
#[allow(non_upper_case_globals)]
pub static validation: Lazy<PingType> = Lazy::new(|| {
glean::private::PingType::new(
"validation",
true,
true,
true,
true,
true,
vec![],
vec![],
true,
)
common::PingBuilder::new("validation")
.with_send_if_empty(true)
.build()
});
}

View File

@@ -36,22 +36,15 @@ mod metrics {
}
mod pings {
use super::*;
use glean::private::PingType;
use once_cell::sync::Lazy;
#[allow(non_upper_case_globals)]
pub static validation: Lazy<PingType> = Lazy::new(|| {
glean::private::PingType::new(
"validation",
true,
true,
true,
true,
true,
vec![],
vec![],
true,
)
common::PingBuilder::new("validation")
.with_send_if_empty(true)
.build()
});
}

View File

@@ -2,6 +2,9 @@
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at https://mozilla.org/MPL/2.0/.
mod common;
use crate::common::*;
use std::collections::HashMap;
use std::io::Read;
@@ -9,7 +12,7 @@ use flate2::read::GzDecoder;
use jsonschema_valid::schemas::Draft;
use serde_json::Value;
use glean::net::{PingUploadRequest, UploadResult};
use glean::net::{CapablePingUploadRequest, UploadResult};
use glean::private::*;
use glean::{
traits, ClientInfoMetrics, CommonMetricData, ConfigurationBuilder, HistogramType, MemoryUnit,
@@ -59,7 +62,8 @@ fn validate_against_schema() {
sender: crossbeam_channel::Sender<Vec<u8>>,
}
impl glean::net::PingUploader for ValidatingUploader {
fn upload(&self, ping_request: PingUploadRequest) -> UploadResult {
fn upload(&self, ping_request: CapablePingUploadRequest) -> UploadResult {
let ping_request = ping_request.capable(|_| true).unwrap();
self.sender.send(ping_request.body).unwrap();
UploadResult::http_status(200)
}
@@ -170,17 +174,7 @@ fn validate_against_schema() {
text_metric.set("loooooong text".repeat(100));
// Define a new ping and submit it.
let custom_ping = glean::private::PingType::new(
PING_NAME,
true,
true,
true,
true,
true,
vec![],
vec![],
true,
);
let custom_ping = PingBuilder::new(PING_NAME).with_send_if_empty(true).build();
custom_ping.submit(None);
// Wait for the ping to arrive.

View File

@@ -36,22 +36,15 @@ mod metrics {
}
mod pings {
use super::*;
use glean::private::PingType;
use once_cell::sync::Lazy;
#[allow(non_upper_case_globals)]
pub static validation: Lazy<PingType> = Lazy::new(|| {
glean::private::PingType::new(
"validation",
true,
true,
true,
true,
true,
vec![],
vec![],
true,
)
common::PingBuilder::new("validation")
.with_send_if_empty(true)
.build()
});
}

View File

@@ -92,22 +92,15 @@ pub mod metrics {
}
mod pings {
use super::*;
use glean::private::PingType;
use once_cell::sync::Lazy;
#[allow(non_upper_case_globals)]
pub static validation: Lazy<PingType> = Lazy::new(|| {
glean::private::PingType::new(
"validation",
true,
true,
true,
true,
true,
vec![],
vec![],
true,
)
common::PingBuilder::new("validation")
.with_send_if_empty(true)
.build()
});
}
@@ -119,7 +112,8 @@ struct FakeUploader {
}
impl net::PingUploader for FakeUploader {
fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
let upload_request = upload_request.capable(|_| true).unwrap();
let calls = self.calls.fetch_add(1, Ordering::SeqCst);
let body = upload_request.body;
let decode = |body: Vec<u8>| {

View File

@@ -0,0 +1,112 @@
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at https://mozilla.org/MPL/2.0/.
//! This integration test should model how the RLB is used when embedded in another Rust application
//! (e.g. FOG/Firefox Desktop).
//!
//! We write a single test scenario per file to avoid any state keeping across runs
//! (different files run as different processes).
mod common;
use crossbeam_channel::{bounded, Sender};
use glean::net;
use glean::ConfigurationBuilder;
mod pings {
use super::*;
use glean::private::PingType;
use once_cell::sync::Lazy;
#[allow(non_upper_case_globals)]
pub static no_capabilities: Lazy<PingType> = Lazy::new(|| {
common::PingBuilder::new("no-capabilities")
.with_send_if_empty(true)
.build()
});
#[allow(non_upper_case_globals)]
pub static one_capability: Lazy<PingType> = Lazy::new(|| {
common::PingBuilder::new("one-capability")
.with_send_if_empty(true)
.with_uploader_capabilities(vec!["capability1".to_string()])
.build()
});
#[allow(non_upper_case_globals)]
pub static two_capabilities: Lazy<PingType> = Lazy::new(|| {
common::PingBuilder::new("two-capabilities")
.with_send_if_empty(true)
.with_uploader_capabilities(vec!["capability1".to_string(), "capability2".to_string()])
.build()
});
}
// Define a fake uploader that reports when and what it uploads.
#[derive(Debug)]
struct ReportingUploader {
sender: Sender<net::UploadResult>,
}
impl net::PingUploader for ReportingUploader {
fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
let uploader_capabilities: Vec<String> = vec!["capability1".to_string()];
let Some(_upload_request) = upload_request.capable(|capabilities| {
capabilities.iter().all(|required_capability| {
uploader_capabilities
.iter()
.any(|uploader_capability| uploader_capability == required_capability)
})
}) else {
self.sender.send(net::UploadResult::incapable()).unwrap();
return net::UploadResult::incapable();
};
self.sender
.send(net::UploadResult::http_status(200))
.unwrap();
net::UploadResult::http_status(200)
}
}
/// Test scenario: We only upload pings we're capable of.
#[test]
fn interruptible_shutdown() {
common::enable_test_logging();
// Create a custom configuration to use our reporting uploader.
let dir = tempfile::tempdir().unwrap();
let tmpname = dir.path().to_path_buf();
let (tx, rx) = bounded(1);
let cfg = ConfigurationBuilder::new(true, tmpname.clone(), "glean-interruptible-shutdown")
.with_server_endpoint("invalid-test-host")
.with_use_core_mps(false)
.with_uploader(ReportingUploader { sender: tx })
.build();
common::initialize(cfg);
pings::no_capabilities.submit(None);
let result = rx.recv().unwrap();
assert!(
matches!(result, net::UploadResult::HttpStatus { code: 200 }),
"Can upload pings requiring no capabilities."
);
pings::one_capability.submit(None);
let result = rx.recv().unwrap();
assert!(
matches!(result, net::UploadResult::HttpStatus { code: 200 }),
"Can upload pings with matching capability."
);
pings::two_capabilities.submit(None);
let result = rx.recv().unwrap();
assert!(
matches!(result, net::UploadResult::Incapable { .. }),
"Can't upload pings requiring capabilities we don't support."
);
}

View File

@@ -1 +1 @@
{"files":{"Cargo.toml":"ff59cecd6f8a6388c9465f405cfc880e7fa57d3c83454e5c2eb24b406d3d1fb0","README.md":"7f1418b4a7c138ba20bcaea077fe6cf0d6ffbaf6df6b90c80efc52aa0d0e2e9f","build.rs":"49840f26c73c5db19cb4e7f02930e49d7a19648168b83f2313ac1a0303c103df","src/error.rs":"b83cbe8abd22a9d687508d236a2a77e28b3fc6c39673633e5820cc0e3fc86cba","src/interrupt_support.udl":"bac2d5a94b5ae5d1b819b2058b82c541e02b1f75ef157c1eb236bfb4f0c78a05","src/interruptee.rs":"c56f9ac610d0b24a128a907266432287558c4b73f6c24b82674ca7894181d18f","src/lib.rs":"cf44a84310913be5264e1c4a3e004a9f7a6cd82d01a109bb6ac4d6002b5dd560","src/shutdown.rs":"e4b7a89f1ef319646aee3282a0d60465c3dbf571c52a0295f3b1a8909f345818","src/sql.rs":"db9b93fb2fe813ae0af6313082f07fad0e381691290466a7ac67bec14024722d"},"package":null}
{"files":{"Cargo.toml":"f86577c4ceee8cab07cc66a2305629708c0bdf2d3e023ffc4b55344148b1817b","README.md":"7f1418b4a7c138ba20bcaea077fe6cf0d6ffbaf6df6b90c80efc52aa0d0e2e9f","build.rs":"49840f26c73c5db19cb4e7f02930e49d7a19648168b83f2313ac1a0303c103df","src/error.rs":"b83cbe8abd22a9d687508d236a2a77e28b3fc6c39673633e5820cc0e3fc86cba","src/interrupt_support.udl":"bac2d5a94b5ae5d1b819b2058b82c541e02b1f75ef157c1eb236bfb4f0c78a05","src/interruptee.rs":"c56f9ac610d0b24a128a907266432287558c4b73f6c24b82674ca7894181d18f","src/lib.rs":"cf44a84310913be5264e1c4a3e004a9f7a6cd82d01a109bb6ac4d6002b5dd560","src/shutdown.rs":"e4b7a89f1ef319646aee3282a0d60465c3dbf571c52a0295f3b1a8909f345818","src/sql.rs":"db9b93fb2fe813ae0af6313082f07fad0e381691290466a7ac67bec14024722d"},"package":null}

View File

@@ -41,8 +41,8 @@ features = [
]
[dependencies.uniffi]
version = "0.28.2"
version = "0.29.0"
[build-dependencies.uniffi]
version = "0.28.2"
version = "0.29.0"
features = ["build"]

View File

@@ -1 +1 @@
{"files":{"Cargo.toml":"bb96760f2d45e86313dbec93a3210e5073c4ee74116097bb5ca45ba9c5b049a6","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"48573443063fa4e0786c3b46f42b6efd1f171c6b73408a64afc1b34de89f31fe","benches/bench.rs":"636f3093bd461210ad3063289d455f90669c4a1be3273bcd30898de39f02c641","src/lib.rs":"da13f0d5dcba3bb2971f67b6856ea6f2e3cbdc31d47f7042d7f131b08bb7de85","src/udiv128.rs":"d28c1872c37ee2185931babcb20a221b8706a5aa8abc4963419763888023ff17","tests/test.rs":"f7404fc5f7cd1bdaf74a3b64a70d5b30586241ddc1ce2c82bd1b564999fcce0e"},"package":"fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440"}
{"files":{"Cargo.lock":"d609730a66530a060198a10acf8ff9f499be3fe740fefec2cf4e3026a983038e","Cargo.toml":"c1d45a6aa2324a0862b0e6c8100e8f595616f91612f915f63c862010954667bc","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"48573443063fa4e0786c3b46f42b6efd1f171c6b73408a64afc1b34de89f31fe","benches/bench.rs":"636f3093bd461210ad3063289d455f90669c4a1be3273bcd30898de39f02c641","src/lib.rs":"ef9f1a8665a678cf5b77bcaa628d00538d620de0c84fd2a8b92323a314a95636","src/udiv128.rs":"d28c1872c37ee2185931babcb20a221b8706a5aa8abc4963419763888023ff17","tests/test.rs":"aa1e910573a1d847d39773b4a2e4c597a8d3810070332673df0f6864cab24807"},"package":"4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"}

56
third_party/rust/itoa/Cargo.lock generated vendored Normal file
View File

@@ -0,0 +1,56 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
[[package]]
name = "itoa"
version = "1.0.15"
dependencies = [
"no-panic 0.1.33 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "no-panic"
version = "0.1.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 1.0.94 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 1.0.39 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 2.0.99 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "proc-macro2"
version = "1.0.94"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"unicode-ident 1.0.17 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "quote"
version = "1.0.39"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 1.0.94 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "syn"
version = "2.0.99"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 1.0.94 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 1.0.39 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-ident 1.0.17 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "unicode-ident"
version = "1.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
[metadata]
"checksum no-panic 0.1.33 (registry+https://github.com/rust-lang/crates.io-index)" = "8f7da86466fe446079286ef4b2f6d789755b610a9d85da8477633f734d2697e8"
"checksum proc-macro2 1.0.94 (registry+https://github.com/rust-lang/crates.io-index)" = "a31971752e70b8b2686d7e46ec17fb38dad4051d94024c88df49b667caea9c84"
"checksum quote 1.0.39 (registry+https://github.com/rust-lang/crates.io-index)" = "c1f1914ce909e1658d9907913b4b91947430c7d9be598b15a1912935b8c04801"
"checksum syn 2.0.99 (registry+https://github.com/rust-lang/crates.io-index)" = "e02e925281e18ffd9d640e234264753c43edc62d64b2d4cf898f1bc5e75f3fc2"
"checksum unicode-ident 1.0.17 (registry+https://github.com/rust-lang/crates.io-index)" = "00e2473a93778eb0bad35909dff6a10d28e63f792f16ed15e404fca9d5eeedbe"

View File

@@ -13,12 +13,18 @@
edition = "2018"
rust-version = "1.36"
name = "itoa"
version = "1.0.5"
version = "1.0.15"
authors = ["David Tolnay <dtolnay@gmail.com>"]
build = false
exclude = [
"performance.png",
"chart/**",
]
autolib = false
autobins = false
autoexamples = false
autotests = false
autobenches = false
description = "Fast integer primitive to string conversion"
documentation = "https://docs.rs/itoa"
readme = "README.md"
@@ -26,13 +32,32 @@ keywords = ["integer"]
categories = [
"value-formatting",
"no-std",
"no-std::no-alloc",
]
license = "MIT OR Apache-2.0"
repository = "https://github.com/dtolnay/itoa"
[package.metadata.docs.rs]
rustdoc-args = [
"--generate-link-to-definition",
"--extern-html-root-url=core=https://doc.rust-lang.org",
"--extern-html-root-url=alloc=https://doc.rust-lang.org",
"--extern-html-root-url=std=https://doc.rust-lang.org",
]
targets = ["x86_64-unknown-linux-gnu"]
[lib]
name = "itoa"
path = "src/lib.rs"
[[test]]
name = "test"
path = "tests/test.rs"
[[bench]]
name = "bench"
path = "benches/bench.rs"
[dependencies.no-panic]
version = "0.1"
optional = true

View File

@@ -174,28 +174,3 @@ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@@ -13,7 +13,6 @@
//! See also [`ryu`] for printing floating point primitives.
//!
//! [libcore]: https://github.com/rust-lang/rust/blob/b8214dc6c6fc20d0a660fb5700dca9ebf51ebe89/src/libcore/fmt/num.rs#L201-L254
//! [`core::fmt::Formatter`]: https://doc.rust-lang.org/std/fmt/struct.Formatter.html
//! [`ryu`]: https://github.com/dtolnay/ryu
//!
//! # Example
@@ -30,18 +29,23 @@
//!
//! ![performance](https://raw.githubusercontent.com/dtolnay/itoa/master/performance.png)
#![doc(html_root_url = "https://docs.rs/itoa/1.0.5")]
#![doc(html_root_url = "https://docs.rs/itoa/1.0.15")]
#![no_std]
#![allow(
clippy::cast_lossless,
clippy::cast_possible_truncation,
clippy::cast_possible_wrap,
clippy::cast_sign_loss,
clippy::expl_impl_clone_on_copy,
clippy::must_use_candidate,
clippy::needless_doctest_main,
clippy::unreadable_literal
)]
mod udiv128;
use core::mem::{self, MaybeUninit};
use core::hint;
use core::mem::MaybeUninit;
use core::{ptr, slice, str};
#[cfg(feature = "no-panic")]
use no_panic::no_panic;
@@ -57,7 +61,7 @@ use no_panic::no_panic;
/// assert_eq!(printed, "1234");
/// ```
pub struct Buffer {
bytes: [MaybeUninit<u8>; I128_MAX_LEN],
bytes: [MaybeUninit<u8>; i128::MAX_STR_LEN],
}
impl Default for Buffer {
@@ -67,8 +71,11 @@ impl Default for Buffer {
}
}
impl Copy for Buffer {}
impl Clone for Buffer {
#[inline]
#[allow(clippy::non_canonical_clone_impl)] // false positive https://github.com/rust-lang/rust-clippy/issues/11072
fn clone(&self) -> Self {
Buffer::new()
}
@@ -80,7 +87,7 @@ impl Buffer {
#[inline]
#[cfg_attr(feature = "no-panic", no_panic)]
pub fn new() -> Buffer {
let bytes = [MaybeUninit::<u8>::uninit(); I128_MAX_LEN];
let bytes = [MaybeUninit::<u8>::uninit(); i128::MAX_STR_LEN];
Buffer { bytes }
}
@@ -88,27 +95,37 @@ impl Buffer {
/// representation within the buffer.
#[cfg_attr(feature = "no-panic", no_panic)]
pub fn format<I: Integer>(&mut self, i: I) -> &str {
i.write(unsafe {
&mut *(&mut self.bytes as *mut [MaybeUninit<u8>; I128_MAX_LEN]
let string = i.write(unsafe {
&mut *(&mut self.bytes as *mut [MaybeUninit<u8>; i128::MAX_STR_LEN]
as *mut <I as private::Sealed>::Buffer)
})
});
if string.len() > I::MAX_STR_LEN {
unsafe { hint::unreachable_unchecked() };
}
string
}
}
/// An integer that can be written into an [`itoa::Buffer`][Buffer].
///
/// This trait is sealed and cannot be implemented for types outside of itoa.
pub trait Integer: private::Sealed {}
pub trait Integer: private::Sealed {
/// The maximum length of string that formatting an integer of this type can
/// produce on the current target platform.
const MAX_STR_LEN: usize;
}
// Seal to prevent downstream implementations of the Integer trait.
mod private {
#[doc(hidden)]
pub trait Sealed: Copy {
#[doc(hidden)]
type Buffer: 'static;
fn write(self, buf: &mut Self::Buffer) -> &str;
}
}
const DEC_DIGITS_LUT: &[u8] = b"\
const DEC_DIGITS_LUT: [u8; 200] = *b"\
0001020304050607080910111213141516171819\
2021222324252627282930313233343536373839\
4041424344454647484950515253545556575859\
@@ -118,8 +135,10 @@ const DEC_DIGITS_LUT: &[u8] = b"\
// Adaptation of the original implementation at
// https://github.com/rust-lang/rust/blob/b8214dc6c6fc20d0a660fb5700dca9ebf51ebe89/src/libcore/fmt/num.rs#L188-L266
macro_rules! impl_Integer {
($($max_len:expr => $t:ident),* as $conv_fn:ident) => {$(
impl Integer for $t {}
($t:ty[len = $max_len:expr] as $large_unsigned:ty) => {
impl Integer for $t {
const MAX_STR_LEN: usize = $max_len;
}
impl private::Sealed for $t {
type Buffer = [MaybeUninit<u8>; $max_len];
@@ -130,98 +149,109 @@ macro_rules! impl_Integer {
fn write(self, buf: &mut [MaybeUninit<u8>; $max_len]) -> &str {
let is_nonnegative = self >= 0;
let mut n = if is_nonnegative {
self as $conv_fn
self as $large_unsigned
} else {
// convert the negative num to positive by summing 1 to it's 2 complement
(!(self as $conv_fn)).wrapping_add(1)
// Convert negative number to positive by summing 1 to its two's complement.
(!(self as $large_unsigned)).wrapping_add(1)
};
let mut curr = buf.len() as isize;
let mut curr = buf.len();
let buf_ptr = buf.as_mut_ptr() as *mut u8;
let lut_ptr = DEC_DIGITS_LUT.as_ptr();
unsafe {
// need at least 16 bits for the 4-characters-at-a-time to work.
if mem::size_of::<$t>() >= 2 {
// eagerly decode 4 characters at a time
while n >= 10000 {
let rem = (n % 10000) as isize;
n /= 10000;
// Render 4 digits at a time.
while n >= 10000 {
let rem = n % 10000;
n /= 10000;
let d1 = (rem / 100) << 1;
let d2 = (rem % 100) << 1;
curr -= 4;
ptr::copy_nonoverlapping(lut_ptr.offset(d1), buf_ptr.offset(curr), 2);
ptr::copy_nonoverlapping(lut_ptr.offset(d2), buf_ptr.offset(curr + 2), 2);
}
}
// if we reach here numbers are <= 9999, so at most 4 chars long
let mut n = n as isize; // possibly reduce 64bit math
// decode 2 more chars, if > 2 chars
if n >= 100 {
let d1 = (n % 100) << 1;
n /= 100;
curr -= 2;
ptr::copy_nonoverlapping(lut_ptr.offset(d1), buf_ptr.offset(curr), 2);
}
// decode last 1 or 2 chars
if n < 10 {
curr -= 1;
*buf_ptr.offset(curr) = (n as u8) + b'0';
} else {
let d1 = n << 1;
curr -= 2;
ptr::copy_nonoverlapping(lut_ptr.offset(d1), buf_ptr.offset(curr), 2);
}
if !is_nonnegative {
curr -= 1;
*buf_ptr.offset(curr) = b'-';
let d1 = ((rem / 100) << 1) as usize;
let d2 = ((rem % 100) << 1) as usize;
curr -= 4;
unsafe {
ptr::copy_nonoverlapping(lut_ptr.add(d1), buf_ptr.add(curr), 2);
ptr::copy_nonoverlapping(lut_ptr.add(d2), buf_ptr.add(curr + 2), 2);
}
}
let len = buf.len() - curr as usize;
let bytes = unsafe { slice::from_raw_parts(buf_ptr.offset(curr), len) };
// Render 2 more digits, if >2 digits.
if n >= 100 {
let d1 = ((n % 100) << 1) as usize;
n /= 100;
curr -= 2;
unsafe {
ptr::copy_nonoverlapping(lut_ptr.add(d1), buf_ptr.add(curr), 2);
}
}
// Render last 1 or 2 digits.
if n < 10 {
curr -= 1;
unsafe {
*buf_ptr.add(curr) = (n as u8) + b'0';
}
} else {
let d1 = (n << 1) as usize;
curr -= 2;
unsafe {
ptr::copy_nonoverlapping(lut_ptr.add(d1), buf_ptr.add(curr), 2);
}
}
if !is_nonnegative {
curr -= 1;
unsafe {
*buf_ptr.add(curr) = b'-';
}
}
let len = buf.len() - curr;
let bytes = unsafe { slice::from_raw_parts(buf_ptr.add(curr), len) };
unsafe { str::from_utf8_unchecked(bytes) }
}
}
)*};
};
}
const I8_MAX_LEN: usize = 4;
const U8_MAX_LEN: usize = 3;
const I16_MAX_LEN: usize = 6;
const U16_MAX_LEN: usize = 5;
const I32_MAX_LEN: usize = 11;
const U32_MAX_LEN: usize = 10;
const I64_MAX_LEN: usize = 20;
const U64_MAX_LEN: usize = 20;
impl_Integer!(i8[len = 4] as u32);
impl_Integer!(u8[len = 3] as u32);
impl_Integer!(i16[len = 6] as u32);
impl_Integer!(u16[len = 5] as u32);
impl_Integer!(i32[len = 11] as u32);
impl_Integer!(u32[len = 10] as u32);
impl_Integer!(i64[len = 20] as u64);
impl_Integer!(u64[len = 20] as u64);
impl_Integer!(
I8_MAX_LEN => i8,
U8_MAX_LEN => u8,
I16_MAX_LEN => i16,
U16_MAX_LEN => u16,
I32_MAX_LEN => i32,
U32_MAX_LEN => u32
as u32);
macro_rules! impl_Integer_size {
($t:ty as $primitive:ident #[cfg(target_pointer_width = $width:literal)]) => {
#[cfg(target_pointer_width = $width)]
impl Integer for $t {
const MAX_STR_LEN: usize = <$primitive as Integer>::MAX_STR_LEN;
}
impl_Integer!(I64_MAX_LEN => i64, U64_MAX_LEN => u64 as u64);
#[cfg(target_pointer_width = $width)]
impl private::Sealed for $t {
type Buffer = <$primitive as private::Sealed>::Buffer;
#[cfg(target_pointer_width = "16")]
impl_Integer!(I16_MAX_LEN => isize, U16_MAX_LEN => usize as u16);
#[inline]
#[cfg_attr(feature = "no-panic", no_panic)]
fn write(self, buf: &mut Self::Buffer) -> &str {
(self as $primitive).write(buf)
}
}
};
}
#[cfg(target_pointer_width = "32")]
impl_Integer!(I32_MAX_LEN => isize, U32_MAX_LEN => usize as u32);
#[cfg(target_pointer_width = "64")]
impl_Integer!(I64_MAX_LEN => isize, U64_MAX_LEN => usize as u64);
impl_Integer_size!(isize as i16 #[cfg(target_pointer_width = "16")]);
impl_Integer_size!(usize as u16 #[cfg(target_pointer_width = "16")]);
impl_Integer_size!(isize as i32 #[cfg(target_pointer_width = "32")]);
impl_Integer_size!(usize as u32 #[cfg(target_pointer_width = "32")]);
impl_Integer_size!(isize as i64 #[cfg(target_pointer_width = "64")]);
impl_Integer_size!(usize as u64 #[cfg(target_pointer_width = "64")]);
macro_rules! impl_Integer128 {
($($max_len:expr => $t:ident),*) => {$(
impl Integer for $t {}
($t:ty[len = $max_len:expr]) => {
impl Integer for $t {
const MAX_STR_LEN: usize = $max_len;
}
impl private::Sealed for $t {
type Buffer = [MaybeUninit<u8>; $max_len];
@@ -234,57 +264,66 @@ macro_rules! impl_Integer128 {
let n = if is_nonnegative {
self as u128
} else {
// convert the negative num to positive by summing 1 to it's 2 complement
// Convert negative number to positive by summing 1 to its two's complement.
(!(self as u128)).wrapping_add(1)
};
let mut curr = buf.len() as isize;
let mut curr = buf.len();
let buf_ptr = buf.as_mut_ptr() as *mut u8;
unsafe {
// Divide by 10^19 which is the highest power less than 2^64.
// Divide by 10^19 which is the highest power less than 2^64.
let (n, rem) = udiv128::udivmod_1e19(n);
let buf1 = unsafe {
buf_ptr.add(curr - u64::MAX_STR_LEN) as *mut [MaybeUninit<u8>; u64::MAX_STR_LEN]
};
curr -= rem.write(unsafe { &mut *buf1 }).len();
if n != 0 {
// Memset the base10 leading zeros of rem.
let target = buf.len() - 19;
unsafe {
ptr::write_bytes(buf_ptr.add(target), b'0', curr - target);
}
curr = target;
// Divide by 10^19 again.
let (n, rem) = udiv128::udivmod_1e19(n);
let buf1 = buf_ptr.offset(curr - U64_MAX_LEN as isize) as *mut [MaybeUninit<u8>; U64_MAX_LEN];
curr -= rem.write(&mut *buf1).len() as isize;
let buf2 = unsafe {
buf_ptr.add(curr - u64::MAX_STR_LEN)
as *mut [MaybeUninit<u8>; u64::MAX_STR_LEN]
};
curr -= rem.write(unsafe { &mut *buf2 }).len();
if n != 0 {
// Memset the base10 leading zeros of rem.
let target = buf.len() as isize - 19;
ptr::write_bytes(buf_ptr.offset(target), b'0', (curr - target) as usize);
// Memset the leading zeros.
let target = buf.len() - 38;
unsafe {
ptr::write_bytes(buf_ptr.add(target), b'0', curr - target);
}
curr = target;
// Divide by 10^19 again.
let (n, rem) = udiv128::udivmod_1e19(n);
let buf2 = buf_ptr.offset(curr - U64_MAX_LEN as isize) as *mut [MaybeUninit<u8>; U64_MAX_LEN];
curr -= rem.write(&mut *buf2).len() as isize;
if n != 0 {
// Memset the leading zeros.
let target = buf.len() as isize - 38;
ptr::write_bytes(buf_ptr.offset(target), b'0', (curr - target) as usize);
curr = target;
// There is at most one digit left
// because u128::max / 10^19 / 10^19 is 3.
curr -= 1;
*buf_ptr.offset(curr) = (n as u8) + b'0';
// There is at most one digit left
// because u128::MAX / 10^19 / 10^19 is 3.
curr -= 1;
unsafe {
*buf_ptr.add(curr) = (n as u8) + b'0';
}
}
if !is_nonnegative {
curr -= 1;
*buf_ptr.offset(curr) = b'-';
}
let len = buf.len() - curr as usize;
let bytes = slice::from_raw_parts(buf_ptr.offset(curr), len);
str::from_utf8_unchecked(bytes)
}
if !is_nonnegative {
curr -= 1;
unsafe {
*buf_ptr.add(curr) = b'-';
}
}
let len = buf.len() - curr;
let bytes = unsafe { slice::from_raw_parts(buf_ptr.add(curr), len) };
unsafe { str::from_utf8_unchecked(bytes) }
}
}
)*};
};
}
const U128_MAX_LEN: usize = 39;
const I128_MAX_LEN: usize = 40;
impl_Integer128!(I128_MAX_LEN => i128, U128_MAX_LEN => u128);
impl_Integer128!(i128[len = 40]);
impl_Integer128!(u128[len = 39]);

View File

@@ -26,4 +26,5 @@ test! {
test_u128_0(0u128, "0")
test_u128_max(u128::max_value(), "340282366920938463463374607431768211455")
test_i128_min(i128::min_value(), "-170141183460469231731687303715884105728")
test_i128_max(i128::max_value(), "170141183460469231731687303715884105727")
}

View File

@@ -1 +1 @@
{"files":{"Cargo.toml":"6765e7643f3e8ad46145166225fa93a08e8a5eb327eca1460340b29c29cd73f5","src/bin/generate-test-data.rs":"7cc80b56929091d02675b9dd9bf4c657a95cda502656cf2ec8d91f56d7a393c7","src/db.rs":"d9dd44501ee3b19c696d8830d3036f7bfe0e8ad7751d5a057f5d8295ebf0bd4f","src/error.rs":"3a1308e65440769d9435fc95528d4ef42994c84d88e1da04ba058491dea387c4","src/ingest.rs":"9f8f7584be5ed27dc962d9137eaa4730948356c724f687e03048a8370c9ed889","src/interest.rs":"e4369a1280867438bca12746f71288a03b4d5e180e156f4bc0335046012565f7","src/lib.rs":"1e57d2f7ca6452f6fe6e5f89a21e33292a86a2a5174b2e541473a69060fa4a32","src/ranker.rs":"e71414fe79ade26f3c79dceb5211af4f37984a9cded8c938dc1da8d8d28c2ad3","src/rs.rs":"fb12d29f75a59af1bfdd320ad01f9bb5a03cf5a3f84738ebdaccb67b84695eef","src/schema.rs":"38ea82679da2729a571aad936f96469e732ec1c104d7c21fd869842f7a5f30a3","src/url_hash.rs":"2e908316fb70923644d1990dbf470d69ce2f5e99b0c5c3d95ec691590be8ffa5","test-data":"1ef2cd092d59e7e126cd4a514af983d449ed9f9c98708702fd237464a76c2b5e"},"package":null}
{"files":{"Cargo.toml":"317f4e90836bae2153c14c3e564337f9bbb8defb20c18b877a8122fc427f00c8","src/bin/generate-test-data.rs":"7cc80b56929091d02675b9dd9bf4c657a95cda502656cf2ec8d91f56d7a393c7","src/db.rs":"d9dd44501ee3b19c696d8830d3036f7bfe0e8ad7751d5a057f5d8295ebf0bd4f","src/error.rs":"3a1308e65440769d9435fc95528d4ef42994c84d88e1da04ba058491dea387c4","src/ingest.rs":"09ac45d68470512f13a58f832c64d2a3dd85230b6454b3fd8fbecc6c1c735a7b","src/interest.rs":"e4369a1280867438bca12746f71288a03b4d5e180e156f4bc0335046012565f7","src/lib.rs":"80e69c16d0b84ae4b7434cd7cf43ade1c2f556bfa166bfb72a250b1eca8de075","src/ranker.rs":"e71414fe79ade26f3c79dceb5211af4f37984a9cded8c938dc1da8d8d28c2ad3","src/rs.rs":"3ba6ad925e62bbce1790598cb429328191393ec89f2ebc3d1fbf26b0db5de955","src/schema.rs":"38ea82679da2729a571aad936f96469e732ec1c104d7c21fd869842f7a5f30a3","src/url_hash.rs":"2e908316fb70923644d1990dbf470d69ce2f5e99b0c5c3d95ec691590be8ffa5","test-data":"1ef2cd092d59e7e126cd4a514af983d449ed9f9c98708702fd237464a76c2b5e"},"package":null}

View File

@@ -68,8 +68,8 @@ features = ["derive"]
path = "../support/sql"
[dependencies.uniffi]
version = "0.28.2"
version = "0.29.0"
[build-dependencies.uniffi]
version = "0.28.2"
version = "0.29.0"
features = ["build"]

Some files were not shown because too many files have changed in this diff Show More