diff --git a/.cargo/config.toml.in b/.cargo/config.toml.in
index 65f5255a10c1..7fd98731d576 100644
--- a/.cargo/config.toml.in
+++ b/.cargo/config.toml.in
@@ -70,9 +70,9 @@ git = "https://github.com/jfkthame/mapped_hyph.git"
rev = "eff105f6ad7ec9b79816cfc1985a28e5340ad14b"
replace-with = "vendored-sources"
-[source."git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94"]
+[source."git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a"]
git = "https://github.com/mozilla/application-services"
-rev = "8e84c588a5cc2686973c5026ecd240d6275d7e94"
+rev = "d773da92641d92930b7308300e9fc2746a05ce6a"
replace-with = "vendored-sources"
[source."git+https://github.com/mozilla/audioipc?rev=e6f44a2bd1e57d11dfc737632a9e849077632330"]
diff --git a/Cargo.lock b/Cargo.lock
index 4b55fc7614ad..806b08b9ce03 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -189,6 +189,38 @@ dependencies = [
"libc",
]
+[[package]]
+name = "askama"
+version = "0.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "47cbc3cf73fa8d9833727bbee4835ba5c421a0d65b72daf9a7b5d0e0f9cfb57e"
+dependencies = [
+ "askama_derive",
+ "askama_escape",
+]
+
+[[package]]
+name = "askama_derive"
+version = "0.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c22fbe0413545c098358e56966ff22cdd039e10215ae213cfbd65032b119fc94"
+dependencies = [
+ "basic-toml",
+ "mime",
+ "mime_guess",
+ "nom",
+ "proc-macro2",
+ "quote",
+ "serde",
+ "syn",
+]
+
+[[package]]
+name = "askama_escape"
+version = "0.10.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "619743e34b5ba4e9703bba34deac3427c72507c7159f5fd030aea8cac0cfe341"
+
[[package]]
name = "async-task"
version = "4.3.0"
@@ -435,7 +467,7 @@ dependencies = [
"proc-macro2",
"quote",
"regex",
- "rustc-hash 1.999.999",
+ "rustc-hash",
"shlex",
"syn",
]
@@ -902,7 +934,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e"
dependencies = [
"termcolor",
- "unicode-width 0.1.999",
+ "unicode-width",
]
[[package]]
@@ -1783,7 +1815,7 @@ dependencies = [
[[package]]
name = "error-support"
version = "0.1.0"
-source = "git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94#8e84c588a5cc2686973c5026ecd240d6275d7e94"
+source = "git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a#d773da92641d92930b7308300e9fc2746a05ce6a"
dependencies = [
"error-support-macros",
"lazy_static",
@@ -1795,7 +1827,7 @@ dependencies = [
[[package]]
name = "error-support-macros"
version = "0.1.0"
-source = "git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94#8e84c588a5cc2686973c5026ecd240d6275d7e94"
+source = "git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a#d773da92641d92930b7308300e9fc2746a05ce6a"
dependencies = [
"proc-macro2",
"quote",
@@ -1912,7 +1944,7 @@ dependencies = [
[[package]]
name = "firefox-versioning"
version = "0.1.0"
-source = "git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94#8e84c588a5cc2686973c5026ecd240d6275d7e94"
+source = "git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a#d773da92641d92930b7308300e9fc2746a05ce6a"
dependencies = [
"serde_json",
"thiserror 1.999.999",
@@ -1964,7 +1996,7 @@ dependencies = [
"fluent-syntax",
"intl-memoizer",
"intl_pluralrules",
- "rustc-hash 1.999.999",
+ "rustc-hash",
"self_cell",
"smallvec",
"unic-langid",
@@ -1981,7 +2013,7 @@ dependencies = [
"fluent-bundle",
"futures",
"once_cell",
- "rustc-hash 1.999.999",
+ "rustc-hash",
"unic-langid",
]
@@ -2565,9 +2597,9 @@ dependencies = [
[[package]]
name = "glean"
-version = "64.0.0"
+version = "63.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ba92338cfd9fb34b00c02c6da8e22936b41835eb02ab5462d3d88cc4b6249c35"
+checksum = "e2afa6754943cac5243099efd0d26e89cc8e06f1585776ba14ab0c6ee99e1f71"
dependencies = [
"crossbeam-channel",
"glean-core",
@@ -2579,9 +2611,9 @@ dependencies = [
[[package]]
name = "glean-core"
-version = "64.0.0"
+version = "63.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "92a2cbf41fbb9996b14fc1721b8bd06e669589de05e6efc20a24bab14285623a"
+checksum = "53cd53bb7a3b89b17d3989e95dd808b137ff47c504d1d19f14cb0d820cc2f42e"
dependencies = [
"android_logger",
"bincode",
@@ -3211,7 +3243,7 @@ dependencies = [
[[package]]
name = "interrupt-support"
version = "0.1.0"
-source = "git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94#8e84c588a5cc2686973c5026ecd240d6275d7e94"
+source = "git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a#d773da92641d92930b7308300e9fc2746a05ce6a"
dependencies = [
"lazy_static",
"parking_lot",
@@ -3293,9 +3325,9 @@ dependencies = [
[[package]]
name = "itoa"
-version = "1.0.15"
+version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
+checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440"
[[package]]
name = "jexl-eval"
@@ -3431,7 +3463,7 @@ dependencies = [
"futures",
"pin-project-lite",
"replace_with",
- "rustc-hash 1.999.999",
+ "rustc-hash",
"unic-langid",
]
@@ -4488,7 +4520,7 @@ dependencies = [
"indexmap",
"log",
"num-traits",
- "rustc-hash 1.999.999",
+ "rustc-hash",
"serde",
"spirv",
"strum",
@@ -4917,7 +4949,7 @@ checksum = "d01a5bd0424d00070b0098dd17ebca6f961a959dead1dbcbbbc1d1cd8d3deeba"
[[package]]
name = "payload-support"
version = "0.1.0"
-source = "git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94#8e84c588a5cc2686973c5026ecd240d6275d7e94"
+source = "git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a#d773da92641d92930b7308300e9fc2746a05ce6a"
dependencies = [
"serde",
"serde_derive",
@@ -5413,7 +5445,7 @@ checksum = "dbb5fb1acd8a1a18b3dd5be62d25485eb770e05afb408a9627d14d451bae12da"
[[package]]
name = "relevancy"
version = "0.1.0"
-source = "git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94#8e84c588a5cc2686973c5026ecd240d6275d7e94"
+source = "git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a#d773da92641d92930b7308300e9fc2746a05ce6a"
dependencies = [
"anyhow",
"base64 0.21.999",
@@ -5438,7 +5470,7 @@ dependencies = [
[[package]]
name = "remote_settings"
version = "0.1.0"
-source = "git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94#8e84c588a5cc2686973c5026ecd240d6275d7e94"
+source = "git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a#d773da92641d92930b7308300e9fc2746a05ce6a"
dependencies = [
"anyhow",
"camino",
@@ -5483,45 +5515,6 @@ dependencies = [
"cache-padded",
]
-[[package]]
-name = "rinja"
-version = "0.3.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3dc4940d00595430b3d7d5a01f6222b5e5b51395d1120bdb28d854bb8abb17a5"
-dependencies = [
- "itoa",
- "rinja_derive",
-]
-
-[[package]]
-name = "rinja_derive"
-version = "0.3.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "08d9ed0146aef6e2825f1b1515f074510549efba38d71f4554eec32eb36ba18b"
-dependencies = [
- "basic-toml",
- "memchr",
- "mime",
- "mime_guess",
- "proc-macro2",
- "quote",
- "rinja_parser",
- "rustc-hash 2.1.1",
- "serde",
- "syn",
-]
-
-[[package]]
-name = "rinja_parser"
-version = "0.3.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "93f9a866e2e00a7a1fb27e46e9e324a6f7c0e7edc4543cae1d38f4e4a100c610"
-dependencies = [
- "memchr",
- "nom",
- "serde",
-]
-
[[package]]
name = "rkv"
version = "0.19.0"
@@ -5672,16 +5665,9 @@ checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342"
[[package]]
name = "rustc-hash"
-version = "1.999.999"
-dependencies = [
- "rustc-hash 2.1.1",
-]
-
-[[package]]
-name = "rustc-hash"
-version = "2.1.1"
+version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d"
+checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
[[package]]
name = "rustc_version"
@@ -5767,7 +5753,7 @@ dependencies = [
[[package]]
name = "search"
version = "0.1.0"
-source = "git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94#8e84c588a5cc2686973c5026ecd240d6275d7e94"
+source = "git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a#d773da92641d92930b7308300e9fc2746a05ce6a"
dependencies = [
"error-support",
"firefox-versioning",
@@ -6057,7 +6043,7 @@ dependencies = [
[[package]]
name = "sql-support"
version = "0.1.0"
-source = "git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94#8e84c588a5cc2686973c5026ecd240d6275d7e94"
+source = "git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a#d773da92641d92930b7308300e9fc2746a05ce6a"
dependencies = [
"interrupt-support",
"lazy_static",
@@ -6256,7 +6242,7 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
[[package]]
name = "suggest"
version = "0.1.0"
-source = "git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94#8e84c588a5cc2686973c5026ecd240d6275d7e94"
+source = "git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a#d773da92641d92930b7308300e9fc2746a05ce6a"
dependencies = [
"anyhow",
"chrono",
@@ -6308,7 +6294,7 @@ dependencies = [
[[package]]
name = "sync-guid"
version = "0.1.0"
-source = "git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94#8e84c588a5cc2686973c5026ecd240d6275d7e94"
+source = "git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a#d773da92641d92930b7308300e9fc2746a05ce6a"
dependencies = [
"base64 0.21.999",
"rand",
@@ -6319,7 +6305,7 @@ dependencies = [
[[package]]
name = "sync15"
version = "0.1.0"
-source = "git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94#8e84c588a5cc2686973c5026ecd240d6275d7e94"
+source = "git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a#d773da92641d92930b7308300e9fc2746a05ce6a"
dependencies = [
"anyhow",
"error-support",
@@ -6359,7 +6345,7 @@ dependencies = [
[[package]]
name = "tabs"
version = "0.1.0"
-source = "git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94#8e84c588a5cc2686973c5026ecd240d6275d7e94"
+source = "git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a#d773da92641d92930b7308300e9fc2746a05ce6a"
dependencies = [
"anyhow",
"error-support",
@@ -6685,7 +6671,7 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6d3364c5e96cb2ad1603037ab253ddd34d7fb72a58bdddf4b7350760fc69a46"
dependencies = [
- "rustc-hash 1.999.999",
+ "rustc-hash",
]
[[package]]
@@ -6703,7 +6689,7 @@ checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba"
[[package]]
name = "types"
version = "0.1.0"
-source = "git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94#8e84c588a5cc2686973c5026ecd240d6275d7e94"
+source = "git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a#d773da92641d92930b7308300e9fc2746a05ce6a"
dependencies = [
"rusqlite",
"serde",
@@ -6780,22 +6766,15 @@ checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc"
[[package]]
name = "unicode-width"
-version = "0.1.999"
-dependencies = [
- "unicode-width 0.2.0",
-]
-
-[[package]]
-name = "unicode-width"
-version = "0.2.0"
+version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd"
+checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b"
[[package]]
name = "uniffi"
-version = "0.29.0"
+version = "0.28.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ba62a57e90f9baed5ad02a71a0870180fa1cc35499093b2d21be2edfb68ec0f7"
+checksum = "51ce6280c581045879e11b400bae14686a819df22b97171215d15549efa04ddb"
dependencies = [
"anyhow",
"cargo_metadata",
@@ -6810,12 +6789,12 @@ name = "uniffi-bindgen-gecko-js"
version = "0.1.0"
dependencies = [
"anyhow",
+ "askama",
"camino",
"cargo_metadata",
"clap",
"extend",
"heck",
- "rinja",
"serde",
"textwrap",
"toml",
@@ -6915,11 +6894,12 @@ dependencies = [
[[package]]
name = "uniffi_bindgen"
-version = "0.29.0"
+version = "0.28.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2242f35214f1e0e3b47c495d340c69f649f9a9ece3a943a29e275686cc884533"
+checksum = "5e9f25730c9db2e878521d606f54e921edb719cdd94d735e7f97705d6796d024"
dependencies = [
"anyhow",
+ "askama",
"camino",
"cargo_metadata",
"fs-err",
@@ -6928,7 +6908,6 @@ dependencies = [
"heck",
"once_cell",
"paste",
- "rinja",
"serde",
"textwrap",
"toml",
@@ -6939,9 +6918,9 @@ dependencies = [
[[package]]
name = "uniffi_build"
-version = "0.29.0"
+version = "0.28.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c887a6c9a2857d8dc2ab0c8d578e8aa4978145b4fd65ed44296341e89aebc3cc"
+checksum = "88dba57ac699bd8ec53d6a352c8dd0e479b33f698c5659831bb1e4ce468c07bd"
dependencies = [
"anyhow",
"camino",
@@ -6949,34 +6928,36 @@ dependencies = [
]
[[package]]
-name = "uniffi_core"
-version = "0.29.0"
+name = "uniffi_checksum_derive"
+version = "0.28.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cad9fbdeb7ae4daf8d0f7704a3b638c37018eb16bb701e30fa17a2dd3e2d39c1"
-dependencies = [
- "anyhow",
- "bytes",
- "once_cell",
- "paste",
- "static_assertions",
-]
-
-[[package]]
-name = "uniffi_internal_macros"
-version = "0.29.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "22a9dba1d78b9ce429439891089c223478043d52a1c3176a0fcea2b5573a7fcf"
+checksum = "d2c801f0f05b06df456a2da4c41b9c2c4fdccc6b9916643c6c67275c4c9e4d07"
dependencies = [
"quote",
"syn",
]
[[package]]
-name = "uniffi_macros"
-version = "0.29.0"
+name = "uniffi_core"
+version = "0.28.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "78dd5f8eefba5898b901086f5e7916da67b9a5286a01cc44e910cd75fa37c630"
+checksum = "61049e4db6212d0ede80982adf0e1d6fa224e6118387324c5cfbe3083dfb2252"
dependencies = [
+ "anyhow",
+ "bytes",
+ "log",
+ "once_cell",
+ "paste",
+ "static_assertions",
+]
+
+[[package]]
+name = "uniffi_macros"
+version = "0.28.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b40fd2249e0c5dcbd2bfa3c263db1ec981f7273dca7f4132bf06a272359a586c"
+dependencies = [
+ "bincode",
"camino",
"fs-err",
"once_cell",
@@ -6990,20 +6971,21 @@ dependencies = [
[[package]]
name = "uniffi_meta"
-version = "0.29.0"
+version = "0.28.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9d5965b1d4ffacef1eaa72fef9c00d2491641e87ad910f6c5859b9c503ddb16a"
+checksum = "c9ad57039b4fafdbf77428d74fff40e0908e5a1731e023c19cfe538f6d4a8ed6"
dependencies = [
"anyhow",
+ "bytes",
"siphasher",
- "uniffi_internal_macros",
+ "uniffi_checksum_derive",
]
[[package]]
name = "uniffi_testing"
-version = "0.29.0"
+version = "0.28.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "43b0f35750a3e1836f10f26e4eceba688748b8a1e94a6ee251c976099d984d4f"
+checksum = "21fa171d4d258dc51bbd01893cc9608c1b62273d2f9ea55fb64f639e77824567"
dependencies = [
"anyhow",
"camino",
@@ -7014,13 +6996,14 @@ dependencies = [
[[package]]
name = "uniffi_udl"
-version = "0.29.0"
+version = "0.28.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "279b82bac9a382c796a0d210bb8354a0b813499b28aa1de046c85d78ca389805"
+checksum = "f52299e247419e7e2934bef2f94d7cccb0e6566f3248b1d48b160d8f369a2668"
dependencies = [
"anyhow",
"textwrap",
"uniffi_meta",
+ "uniffi_testing",
"weedle2",
]
@@ -7086,7 +7069,7 @@ checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]]
name = "viaduct"
version = "0.1.0"
-source = "git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94#8e84c588a5cc2686973c5026ecd240d6275d7e94"
+source = "git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a#d773da92641d92930b7308300e9fc2746a05ce6a"
dependencies = [
"ffi-support",
"log",
@@ -7224,7 +7207,7 @@ dependencies = [
"bumpalo",
"leb128",
"memchr",
- "unicode-width 0.1.999",
+ "unicode-width",
"wasm-encoder",
]
@@ -7256,7 +7239,7 @@ dependencies = [
[[package]]
name = "webext-storage"
version = "0.1.0"
-source = "git+https://github.com/mozilla/application-services?rev=8e84c588a5cc2686973c5026ecd240d6275d7e94#8e84c588a5cc2686973c5026ecd240d6275d7e94"
+source = "git+https://github.com/mozilla/application-services?rev=d773da92641d92930b7308300e9fc2746a05ce6a#d773da92641d92930b7308300e9fc2746a05ce6a"
dependencies = [
"anyhow",
"error-support",
@@ -7410,7 +7393,7 @@ dependencies = [
"parking_lot",
"profiling",
"ron",
- "rustc-hash 1.999.999",
+ "rustc-hash",
"serde",
"smallvec",
"thiserror 2.0.9",
diff --git a/Cargo.toml b/Cargo.toml
index d16e286be4d5..44dba1782c15 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -61,12 +61,12 @@ rust-version = "1.82.0"
[workspace.dependencies]
# Shared across multiple UniFFI consumers.
-uniffi = "0.29.0"
-uniffi_bindgen = "0.29.0"
+uniffi = "0.28.2"
+uniffi_bindgen = "0.28.2"
# Shared across multiple application-services consumers.
rusqlite = "0.31.0"
# Shared across multiple glean consumers.
-glean = "=64.0.0"
+glean = "=63.1.0"
# Explicitly specify what our profiles use. The opt-level setting here is
# a total fiction; see the setup of MOZ_RUST_DEFAULT_FLAGS for what the
@@ -176,9 +176,6 @@ goblin = { path = "build/rust/goblin" }
# Implement getrandom 0.2 in terms of 0.3
getrandom = { path = "build/rust/getrandom" }
-# Patch rustc-hash 1.1.0 to 2.1.1
-rustc-hash = { path = "build/rust/rustc-hash" }
-
# Patch memoffset from 0.8.0 to 0.9.0 since it's compatible and it avoids duplication
memoffset = { path = "build/rust/memoffset" }
@@ -220,9 +217,6 @@ rure = { path = "third_party/rust/rure" }
# Patch `plist` to work with `indexmap` 2.*
plist = { path = "third_party/rust/plist" }
-# Patch `unicode-width` 0.1.* to 0.2.
-unicode-width = { path = "build/rust/unicode-width" }
-
# To-be-published changes.
unicode-bidi = { git = "https://github.com/servo/unicode-bidi", rev = "ca612daf1c08c53abe07327cb3e6ef6e0a760f0c" }
nss-gk-api = { git = "https://github.com/beurdouche/nss-gk-api", rev = "e48a946811ffd64abc78de3ee284957d8d1c0d63" }
@@ -247,14 +241,14 @@ malloc_size_of_derive = { path = "xpcom/rust/malloc_size_of_derive" }
objc = { git = "https://github.com/glandium/rust-objc", rev = "4de89f5aa9851ceca4d40e7ac1e2759410c04324" }
# application-services overrides to make updating them all simpler.
-interrupt-support = { git = "https://github.com/mozilla/application-services", rev = "8e84c588a5cc2686973c5026ecd240d6275d7e94" }
-relevancy = { git = "https://github.com/mozilla/application-services", rev = "8e84c588a5cc2686973c5026ecd240d6275d7e94" }
-search = { git = "https://github.com/mozilla/application-services", rev = "8e84c588a5cc2686973c5026ecd240d6275d7e94" }
-sql-support = { git = "https://github.com/mozilla/application-services", rev = "8e84c588a5cc2686973c5026ecd240d6275d7e94" }
-suggest = { git = "https://github.com/mozilla/application-services", rev = "8e84c588a5cc2686973c5026ecd240d6275d7e94" }
-sync15 = { git = "https://github.com/mozilla/application-services", rev = "8e84c588a5cc2686973c5026ecd240d6275d7e94" }
-tabs = { git = "https://github.com/mozilla/application-services", rev = "8e84c588a5cc2686973c5026ecd240d6275d7e94" }
-viaduct = { git = "https://github.com/mozilla/application-services", rev = "8e84c588a5cc2686973c5026ecd240d6275d7e94" }
-webext-storage = { git = "https://github.com/mozilla/application-services", rev = "8e84c588a5cc2686973c5026ecd240d6275d7e94" }
+interrupt-support = { git = "https://github.com/mozilla/application-services", rev = "d773da92641d92930b7308300e9fc2746a05ce6a" }
+relevancy = { git = "https://github.com/mozilla/application-services", rev = "d773da92641d92930b7308300e9fc2746a05ce6a" }
+search = { git = "https://github.com/mozilla/application-services", rev = "d773da92641d92930b7308300e9fc2746a05ce6a" }
+sql-support = { git = "https://github.com/mozilla/application-services", rev = "d773da92641d92930b7308300e9fc2746a05ce6a" }
+suggest = { git = "https://github.com/mozilla/application-services", rev = "d773da92641d92930b7308300e9fc2746a05ce6a" }
+sync15 = { git = "https://github.com/mozilla/application-services", rev = "d773da92641d92930b7308300e9fc2746a05ce6a" }
+tabs = { git = "https://github.com/mozilla/application-services", rev = "d773da92641d92930b7308300e9fc2746a05ce6a" }
+viaduct = { git = "https://github.com/mozilla/application-services", rev = "d773da92641d92930b7308300e9fc2746a05ce6a" }
+webext-storage = { git = "https://github.com/mozilla/application-services", rev = "d773da92641d92930b7308300e9fc2746a05ce6a" }
allocator-api2 = { path = "third_party/rust/allocator-api2" }
diff --git a/build/rust/rustc-hash/Cargo.toml b/build/rust/rustc-hash/Cargo.toml
deleted file mode 100644
index 977387a469fd..000000000000
--- a/build/rust/rustc-hash/Cargo.toml
+++ /dev/null
@@ -1,17 +0,0 @@
-[package]
-name = "rustc-hash"
-version = "1.999.999"
-edition = "2018"
-license = "Apache-2.0 OR MIT"
-
-[lib]
-path = "lib.rs"
-
-[dependencies.rustc-hash]
-version = "2.1.1"
-default-features = false
-
-[features]
-default = ["rustc-hash/default"]
-std = ["rustc-hash/std"]
-rand = ["rustc-hash/rand"]
diff --git a/build/rust/rustc-hash/lib.rs b/build/rust/rustc-hash/lib.rs
deleted file mode 100644
index dd0d3cd92fd7..000000000000
--- a/build/rust/rustc-hash/lib.rs
+++ /dev/null
@@ -1,11 +0,0 @@
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 or the MIT license
-// , at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-pub use rustc_hash::*;
diff --git a/build/rust/unicode-width/Cargo.toml b/build/rust/unicode-width/Cargo.toml
deleted file mode 100644
index 91e1341a2f1f..000000000000
--- a/build/rust/unicode-width/Cargo.toml
+++ /dev/null
@@ -1,18 +0,0 @@
-[package]
-name = "unicode-width"
-version = "0.1.999"
-edition = "2018"
-license = "MPL-2.0"
-
-[lib]
-path = "lib.rs"
-
-[dependencies.unicode-width]
-version = "0.2.0"
-default-features = false
-
-[features]
-default = ["unicode-width/default"]
-cjk = ["unicode-width/cjk"]
-no_std = ["unicode-width/no_std"]
-rustc-dep-of-std = ["unicode-width/rustc-dep-of-std"]
diff --git a/build/rust/unicode-width/lib.rs b/build/rust/unicode-width/lib.rs
deleted file mode 100644
index 817c98ed5779..000000000000
--- a/build/rust/unicode-width/lib.rs
+++ /dev/null
@@ -1,5 +0,0 @@
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-pub use unicode_width::*;
diff --git a/gfx/wr/Cargo.lock b/gfx/wr/Cargo.lock
index 137abf1f0206..e7fa273f5e5a 100644
--- a/gfx/wr/Cargo.lock
+++ b/gfx/wr/Cargo.lock
@@ -87,6 +87,38 @@ dependencies = [
"term",
]
+[[package]]
+name = "askama"
+version = "0.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "47cbc3cf73fa8d9833727bbee4835ba5c421a0d65b72daf9a7b5d0e0f9cfb57e"
+dependencies = [
+ "askama_derive",
+ "askama_escape",
+]
+
+[[package]]
+name = "askama_derive"
+version = "0.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c22fbe0413545c098358e56966ff22cdd039e10215ae213cfbd65032b119fc94"
+dependencies = [
+ "basic-toml",
+ "mime",
+ "mime_guess",
+ "nom 7.1.1",
+ "proc-macro2",
+ "quote",
+ "serde",
+ "syn 2.0.25",
+]
+
+[[package]]
+name = "askama_escape"
+version = "0.10.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "619743e34b5ba4e9703bba34deac3427c72507c7159f5fd030aea8cac0cfe341"
+
[[package]]
name = "atty"
version = "0.2.14"
@@ -149,7 +181,7 @@ dependencies = [
"proc-macro2",
"quote",
"regex",
- "rustc-hash 1.1.0",
+ "rustc-hash",
"shlex",
"syn 2.0.25",
"which",
@@ -253,6 +285,32 @@ name = "camino"
version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "869119e97797867fd90f5e22af7d0bd274bd4635ebb9eb68c04f3f513ae6c412"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "cargo-platform"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cbdb825da8a5df079a43676dbe042702f1707b1109f713a01420fbb4cc71fa27"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "cargo_metadata"
+version = "0.15.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "982a0cf6a99c350d7246035613882e376d58cebe571785abc5da4f648d53ac0a"
+dependencies = [
+ "camino",
+ "cargo-platform",
+ "semver",
+ "serde",
+ "serde_json",
+ "thiserror",
+]
[[package]]
name = "cbitset"
@@ -982,9 +1040,9 @@ dependencies = [
[[package]]
name = "glean"
-version = "64.0.0"
+version = "63.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ba92338cfd9fb34b00c02c6da8e22936b41835eb02ab5462d3d88cc4b6249c35"
+checksum = "e2afa6754943cac5243099efd0d26e89cc8e06f1585776ba14ab0c6ee99e1f71"
dependencies = [
"crossbeam-channel",
"glean-core",
@@ -996,9 +1054,9 @@ dependencies = [
[[package]]
name = "glean-core"
-version = "64.0.0"
+version = "63.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "92a2cbf41fbb9996b14fc1721b8bd06e669589de05e6efc20a24bab14285623a"
+checksum = "53cd53bb7a3b89b17d3989e95dd808b137ff47c504d1d19f14cb0d820cc2f42e"
dependencies = [
"android_logger",
"bincode",
@@ -1311,9 +1369,9 @@ dependencies = [
[[package]]
name = "itoa"
-version = "1.0.15"
+version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
+checksum = "112c678d4050afce233f4f2852bb2eb519230b3cf12f33585275537d7e41578d"
[[package]]
name = "jni-sys"
@@ -2270,45 +2328,6 @@ version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
-[[package]]
-name = "rinja"
-version = "0.3.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3dc4940d00595430b3d7d5a01f6222b5e5b51395d1120bdb28d854bb8abb17a5"
-dependencies = [
- "itoa",
- "rinja_derive",
-]
-
-[[package]]
-name = "rinja_derive"
-version = "0.3.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "08d9ed0146aef6e2825f1b1515f074510549efba38d71f4554eec32eb36ba18b"
-dependencies = [
- "basic-toml",
- "memchr",
- "mime",
- "mime_guess",
- "proc-macro2",
- "quote",
- "rinja_parser",
- "rustc-hash 2.1.1",
- "serde",
- "syn 2.0.25",
-]
-
-[[package]]
-name = "rinja_parser"
-version = "0.3.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "93f9a866e2e00a7a1fb27e46e9e324a6f7c0e7edc4543cae1d38f4e4a100c610"
-dependencies = [
- "memchr",
- "nom 7.1.1",
- "serde",
-]
-
[[package]]
name = "rkv"
version = "0.19.0"
@@ -2351,7 +2370,7 @@ dependencies = [
"countme",
"hashbrown 0.12.3",
"memoffset",
- "rustc-hash 1.1.0",
+ "rustc-hash",
"text-size",
]
@@ -2361,12 +2380,6 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
-[[package]]
-name = "rustc-hash"
-version = "2.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d"
-
[[package]]
name = "rustix"
version = "0.38.38"
@@ -2438,6 +2451,9 @@ name = "semver"
version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2333e6df6d6598f2b1974829f853c2b4c5f4a6e503c10af918081aa6f8564e1"
+dependencies = [
+ "serde",
+]
[[package]]
name = "serde"
@@ -2828,9 +2844,9 @@ checksum = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c"
[[package]]
name = "uniffi"
-version = "0.29.0"
+version = "0.28.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ba62a57e90f9baed5ad02a71a0870180fa1cc35499093b2d21be2edfb68ec0f7"
+checksum = "2db87def739fe4183947f8419d572d1849a4a09355eba4e988a2105cfd0ac6a7"
dependencies = [
"anyhow",
"uniffi_build",
@@ -2840,11 +2856,12 @@ dependencies = [
[[package]]
name = "uniffi_bindgen"
-version = "0.29.0"
+version = "0.28.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2242f35214f1e0e3b47c495d340c69f649f9a9ece3a943a29e275686cc884533"
+checksum = "7a112599c9556d1581e4a3d72019a74c2c3e122cc27f4af12577a429c4d5e614"
dependencies = [
"anyhow",
+ "askama",
"camino",
"fs-err",
"glob",
@@ -2852,7 +2869,6 @@ dependencies = [
"heck",
"once_cell",
"paste",
- "rinja",
"serde",
"textwrap 0.16.1",
"toml",
@@ -2862,9 +2878,9 @@ dependencies = [
[[package]]
name = "uniffi_build"
-version = "0.29.0"
+version = "0.28.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c887a6c9a2857d8dc2ab0c8d578e8aa4978145b4fd65ed44296341e89aebc3cc"
+checksum = "e2b12684401d2a8508ca9c72a95bbc45906417e42fc80942abaf033bbf01aa33"
dependencies = [
"anyhow",
"camino",
@@ -2872,34 +2888,37 @@ dependencies = [
]
[[package]]
-name = "uniffi_core"
-version = "0.29.0"
+name = "uniffi_checksum_derive"
+version = "0.28.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cad9fbdeb7ae4daf8d0f7704a3b638c37018eb16bb701e30fa17a2dd3e2d39c1"
-dependencies = [
- "anyhow",
- "bytes",
- "once_cell",
- "paste",
- "static_assertions",
-]
-
-[[package]]
-name = "uniffi_internal_macros"
-version = "0.29.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "22a9dba1d78b9ce429439891089c223478043d52a1c3176a0fcea2b5573a7fcf"
+checksum = "a22dbe67c1c957ac6e7611bdf605a6218aa86b0eebeb8be58b70ae85ad7d73dc"
dependencies = [
"quote",
"syn 2.0.25",
]
[[package]]
-name = "uniffi_macros"
-version = "0.29.0"
+name = "uniffi_core"
+version = "0.28.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "78dd5f8eefba5898b901086f5e7916da67b9a5286a01cc44e910cd75fa37c630"
+checksum = "5a0c35aaad30e3a9e6d4fe34e358d64dbc92ee09045b48591b05fc9f12e0905b"
dependencies = [
+ "anyhow",
+ "bytes",
+ "camino",
+ "log",
+ "once_cell",
+ "paste",
+ "static_assertions",
+]
+
+[[package]]
+name = "uniffi_macros"
+version = "0.28.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "db66474c5c61b0f7afc3b4995fecf9b72b340daa5ca0ef3da7778d75eb5482ea"
+dependencies = [
+ "bincode",
"camino",
"fs-err",
"once_cell",
@@ -2913,24 +2932,39 @@ dependencies = [
[[package]]
name = "uniffi_meta"
-version = "0.29.0"
+version = "0.28.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9d5965b1d4ffacef1eaa72fef9c00d2491641e87ad910f6c5859b9c503ddb16a"
+checksum = "d898893f102e0e39b8bcb7e3d2188f4156ba280db32db9e8af1f122d057e9526"
dependencies = [
"anyhow",
+ "bytes",
"siphasher",
- "uniffi_internal_macros",
+ "uniffi_checksum_derive",
+]
+
+[[package]]
+name = "uniffi_testing"
+version = "0.28.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2c6aa4f0cf9d12172d84fc00a35a6c1f3522b526daad05ae739f709f6941b9b6"
+dependencies = [
+ "anyhow",
+ "camino",
+ "cargo_metadata",
+ "fs-err",
+ "once_cell",
]
[[package]]
name = "uniffi_udl"
-version = "0.29.0"
+version = "0.28.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "279b82bac9a382c796a0d210bb8354a0b813499b28aa1de046c85d78ca389805"
+checksum = "6b044e9c519e0bb51e516ab6f6d8f4f4dcf900ce30d5ad07c03f924e2824f28e"
dependencies = [
"anyhow",
"textwrap 0.16.1",
"uniffi_meta",
+ "uniffi_testing",
"weedle2",
]
diff --git a/gfx/wr/Cargo.toml b/gfx/wr/Cargo.toml
index 363f20b5b6da..c5efde7d1cf9 100644
--- a/gfx/wr/Cargo.toml
+++ b/gfx/wr/Cargo.toml
@@ -9,7 +9,7 @@ members = [
resolver = "2"
[workspace.dependencies]
-glean = "=64.0.0"
+glean = "=63.1.0"
[profile.release]
debug = true
diff --git a/gfx/wr/servo-tidy.toml b/gfx/wr/servo-tidy.toml
index 1c9890ede045..355cb7d7f565 100644
--- a/gfx/wr/servo-tidy.toml
+++ b/gfx/wr/servo-tidy.toml
@@ -28,8 +28,6 @@ packages = [
"parking_lot_core",
"rand",
"rand_core",
- # rinja (dependency of uniffi) requires 2.1, rowan requires 1.1
- "rustc-hash",
# transition to syn 2 is underway.
"syn",
"synstructure",
diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml
index 5d73562cb507..2e3dce91ef9e 100644
--- a/gradle/libs.versions.toml
+++ b/gradle/libs.versions.toml
@@ -4,7 +4,7 @@ android-gradle-plugin = "8.9.1" # Keep lint version in sync
lint = "31.9.1"
python-envs-plugin = "0.0.31"
-mozilla-glean = "64.0.0"
+mozilla-glean = "63.1.0"
maven-ant-tasks = "2.1.3"
jacoco = "0.8.12"
okhttp = "4.12.0"
diff --git a/mobile/android/android-components/components/service/glean/src/main/java/mozilla/components/service/glean/net/ConceptFetchHttpUploader.kt b/mobile/android/android-components/components/service/glean/src/main/java/mozilla/components/service/glean/net/ConceptFetchHttpUploader.kt
index 2c5b5ab457c6..06947e6d5bad 100644
--- a/mobile/android/android-components/components/service/glean/src/main/java/mozilla/components/service/glean/net/ConceptFetchHttpUploader.kt
+++ b/mobile/android/android-components/components/service/glean/src/main/java/mozilla/components/service/glean/net/ConceptFetchHttpUploader.kt
@@ -11,11 +11,8 @@ import mozilla.components.concept.fetch.Header
import mozilla.components.concept.fetch.Request
import mozilla.components.concept.fetch.toMutableHeaders
import mozilla.components.support.base.log.logger.Logger
-import mozilla.telemetry.glean.net.CapablePingUploadRequest
import mozilla.telemetry.glean.net.HeadersList
import mozilla.telemetry.glean.net.HttpStatus
-import mozilla.telemetry.glean.net.Incapable
-import mozilla.telemetry.glean.net.PingUploadRequest
import mozilla.telemetry.glean.net.RecoverableFailure
import mozilla.telemetry.glean.net.UploadResult
import java.io.IOException
@@ -67,12 +64,8 @@ class ConceptFetchHttpUploader(
* or faced an unrecoverable error), false if there was a recoverable
* error callers can deal with.
*/
- override fun upload(request: CapablePingUploadRequest): UploadResult {
- val req: PingUploadRequest? = request.capable({ capabilities: List -> capabilities.size == 0 })
- if (req == null) {
- return Incapable(0)
- }
- val request = buildRequest(req)
+ override fun upload(url: String, data: ByteArray, headers: HeadersList): UploadResult {
+ val request = buildRequest(url, data, headers)
return try {
performUpload(client.value, request)
@@ -83,11 +76,15 @@ class ConceptFetchHttpUploader(
}
@VisibleForTesting(otherwise = PRIVATE)
- internal fun buildRequest(request: PingUploadRequest): Request {
- val conceptHeaders = request.headers.map { (name, value) -> Header(name, value) }.toMutableHeaders()
+ internal fun buildRequest(
+ url: String,
+ data: ByteArray,
+ headers: HeadersList,
+ ): Request {
+ val conceptHeaders = headers.map { (name, value) -> Header(name, value) }.toMutableHeaders()
return Request(
- url = request.url,
+ url = url,
method = Request.Method.POST,
connectTimeout = Pair(DEFAULT_CONNECTION_TIMEOUT, TimeUnit.MILLISECONDS),
readTimeout = Pair(DEFAULT_READ_TIMEOUT, TimeUnit.MILLISECONDS),
@@ -96,7 +93,7 @@ class ConceptFetchHttpUploader(
// offer a better API to do that, so we nuke all cookies going to our telemetry
// endpoint.
cookiePolicy = Request.CookiePolicy.OMIT,
- body = Request.Body(request.data.inputStream()),
+ body = Request.Body(data.inputStream()),
private = usePrivateRequest,
conservative = true,
)
diff --git a/mobile/android/android-components/components/service/glean/src/test/java/mozilla/components/service/glean/net/ConceptFetchHttpUploaderTest.kt b/mobile/android/android-components/components/service/glean/src/test/java/mozilla/components/service/glean/net/ConceptFetchHttpUploaderTest.kt
index b27310a2bce5..48d0d6c3a4e4 100644
--- a/mobile/android/android-components/components/service/glean/src/test/java/mozilla/components/service/glean/net/ConceptFetchHttpUploaderTest.kt
+++ b/mobile/android/android-components/components/service/glean/src/test/java/mozilla/components/service/glean/net/ConceptFetchHttpUploaderTest.kt
@@ -13,9 +13,7 @@ import mozilla.components.support.test.any
import mozilla.components.support.test.argumentCaptor
import mozilla.components.support.test.mock
import mozilla.telemetry.glean.config.Configuration
-import mozilla.telemetry.glean.net.CapablePingUploadRequest
import mozilla.telemetry.glean.net.HttpStatus
-import mozilla.telemetry.glean.net.PingUploadRequest
import mozilla.telemetry.glean.net.RecoverableFailure
import okhttp3.mockwebserver.Dispatcher
import okhttp3.mockwebserver.MockResponse
@@ -61,8 +59,7 @@ class ConceptFetchHttpUploaderTest {
val uploader =
spy(ConceptFetchHttpUploader(lazy { HttpURLConnectionClient() }))
- val uploadRequest = PingUploadRequest(url = testPath, data = testPing.toByteArray(), emptyMap(), emptyList())
- val request = uploader.buildRequest(uploadRequest)
+ val request = uploader.buildRequest(testPath, testPing.toByteArray(), emptyMap())
assertEquals(
Pair(ConceptFetchHttpUploader.DEFAULT_READ_TIMEOUT, TimeUnit.MILLISECONDS),
@@ -88,8 +85,7 @@ class ConceptFetchHttpUploaderTest {
)
val uploader = ConceptFetchHttpUploader(lazy { mockClient })
- val uploadRequest = CapablePingUploadRequest(PingUploadRequest(url = testPath, data = testPing.toByteArray(), expectedHeaders, emptyList()))
- uploader.upload(uploadRequest)
+ uploader.upload(testPath, testPing.toByteArray(), expectedHeaders)
val requestCaptor = argumentCaptor()
verify(mockClient).fetch(requestCaptor.capture())
@@ -106,8 +102,7 @@ class ConceptFetchHttpUploaderTest {
val uploader =
spy(ConceptFetchHttpUploader(lazy { HttpURLConnectionClient() }))
- val uploadRequest = PingUploadRequest(url = testPath, data = testPing.toByteArray(), emptyMap(), emptyList())
- val request = uploader.buildRequest(uploadRequest)
+ val request = uploader.buildRequest(testPath, testPing.toByteArray(), emptyMap())
assertEquals(request.cookiePolicy, Request.CookiePolicy.OMIT)
}
@@ -126,8 +121,7 @@ class ConceptFetchHttpUploaderTest {
val uploader = spy(ConceptFetchHttpUploader(lazy { mockClient }))
- val uploadRequest = CapablePingUploadRequest(PingUploadRequest(url = testPath, data = testPing.toByteArray(), emptyMap(), emptyList()))
- assertEquals(HttpStatus(200), uploader.upload(uploadRequest))
+ assertEquals(HttpStatus(200), uploader.upload(testPath, testPing.toByteArray(), emptyMap()))
}
@Test
@@ -145,8 +139,7 @@ class ConceptFetchHttpUploaderTest {
val uploader = spy(ConceptFetchHttpUploader(lazy { mockClient }))
- val uploadRequest = CapablePingUploadRequest(PingUploadRequest(url = testPath, data = testPing.toByteArray(), emptyMap(), emptyList()))
- assertEquals(HttpStatus(responseCode), uploader.upload(uploadRequest))
+ assertEquals(HttpStatus(responseCode), uploader.upload(testPath, testPing.toByteArray(), emptyMap()))
}
}
@@ -165,8 +158,7 @@ class ConceptFetchHttpUploaderTest {
val uploader = spy(ConceptFetchHttpUploader(lazy { mockClient }))
- val uploadRequest = CapablePingUploadRequest(PingUploadRequest(url = testPath, data = testPing.toByteArray(), emptyMap(), emptyList()))
- assertEquals(HttpStatus(responseCode), uploader.upload(uploadRequest))
+ assertEquals(HttpStatus(responseCode), uploader.upload(testPath, testPing.toByteArray(), emptyMap()))
}
}
@@ -185,8 +177,7 @@ class ConceptFetchHttpUploaderTest {
val uploader = spy(ConceptFetchHttpUploader(lazy { mockClient }))
- val uploadRequest = CapablePingUploadRequest(PingUploadRequest(url = testPath, data = testPing.toByteArray(), emptyMap(), emptyList()))
- assertEquals(HttpStatus(responseCode), uploader.upload(uploadRequest))
+ assertEquals(HttpStatus(responseCode), uploader.upload(testPath, testPing.toByteArray(), emptyMap()))
}
}
@@ -197,8 +188,7 @@ class ConceptFetchHttpUploaderTest {
val client = ConceptFetchHttpUploader(lazy { HttpURLConnectionClient() })
val submissionUrl = "http://" + server.hostName + ":" + server.port + testPath
- val uploadRequest = CapablePingUploadRequest(PingUploadRequest(url = submissionUrl, data = testPing.toByteArray(), mapOf("test" to "header"), emptyList()))
- assertEquals(HttpStatus(200), client.upload(uploadRequest))
+ assertEquals(HttpStatus(200), client.upload(submissionUrl, testPing.toByteArray(), mapOf("test" to "header")))
val request = server.takeRequest()
assertEquals(testPath, request.path)
@@ -216,8 +206,7 @@ class ConceptFetchHttpUploaderTest {
val client = ConceptFetchHttpUploader(lazy { HttpURLConnectionClient() })
val submissionUrl = "http://" + server.hostName + ":" + server.port + testPath
- val uploadRequest = CapablePingUploadRequest(PingUploadRequest(url = submissionUrl, data = testPing.toByteArray(), mapOf("test" to "header"), emptyList()))
- assertEquals(HttpStatus(200), client.upload(uploadRequest))
+ assertEquals(HttpStatus(200), client.upload(submissionUrl, testPing.toByteArray(), mapOf("test" to "header")))
val request = server.takeRequest()
assertEquals(testPath, request.path)
@@ -236,8 +225,7 @@ class ConceptFetchHttpUploaderTest {
val client = ConceptFetchHttpUploader(lazy { OkHttpClient() })
val submissionUrl = "http://" + server.hostName + ":" + server.port + testPath
- val uploadRequest = CapablePingUploadRequest(PingUploadRequest(url = submissionUrl, data = testPing.toByteArray(), mapOf("test" to "header"), emptyList()))
- assertEquals(HttpStatus(200), client.upload(uploadRequest))
+ assertEquals(HttpStatus(200), client.upload(submissionUrl, testPing.toByteArray(), mapOf("test" to "header")))
val request = server.takeRequest()
assertEquals(testPath, request.path)
@@ -286,8 +274,7 @@ class ConceptFetchHttpUploaderTest {
// Trigger the connection.
val client = ConceptFetchHttpUploader(lazy { HttpURLConnectionClient() })
val submissionUrl = testConfig.serverEndpoint + testPath
- val uploadRequest = CapablePingUploadRequest(PingUploadRequest(url = submissionUrl, data = testPing.toByteArray(), emptyMap(), emptyList()))
- assertEquals(HttpStatus(200), client.upload(uploadRequest))
+ assertEquals(HttpStatus(200), client.upload(submissionUrl, testPing.toByteArray(), emptyMap()))
val request = server.takeRequest()
assertEquals(testPath, request.path)
@@ -311,8 +298,7 @@ class ConceptFetchHttpUploaderTest {
// And IOException during upload is a failed upload that we should retry. The client should
// return false in this case.
- val uploadRequest = CapablePingUploadRequest(PingUploadRequest(url = "path", data = "ping".toByteArray(), emptyMap(), emptyList()))
- assertEquals(RecoverableFailure(0), uploader.upload(uploadRequest))
+ assertEquals(RecoverableFailure(0), uploader.upload("path", "ping".toByteArray(), emptyMap()))
}
@Test
@@ -325,8 +311,7 @@ class ConceptFetchHttpUploaderTest {
assertFalse(uploader.client.isInitialized())
// After calling upload, the client must get instantiated.
- val uploadRequest = CapablePingUploadRequest(PingUploadRequest(url = "path", data = "ping".toByteArray(), emptyMap(), emptyList()))
- uploader.upload(uploadRequest)
+ uploader.upload("path", "ping".toByteArray(), emptyMap())
assertTrue(uploader.client.isInitialized())
}
@@ -344,8 +329,7 @@ class ConceptFetchHttpUploaderTest {
)
val uploader = ConceptFetchHttpUploader(lazy { mockClient }, true)
- val uploadRequest = CapablePingUploadRequest(PingUploadRequest(url = "path", data = "ping".toByteArray(), expectedHeaders, emptyList()))
- uploader.upload(uploadRequest)
+ uploader.upload(testPath, testPing.toByteArray(), expectedHeaders)
val captor = argumentCaptor()
diff --git a/mobile/android/android-components/plugins/dependencies/src/main/java/ApplicationServices.kt b/mobile/android/android-components/plugins/dependencies/src/main/java/ApplicationServices.kt
index ab4337a4b1be..f00387822598 100644
--- a/mobile/android/android-components/plugins/dependencies/src/main/java/ApplicationServices.kt
+++ b/mobile/android/android-components/plugins/dependencies/src/main/java/ApplicationServices.kt
@@ -3,7 +3,7 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// These lines are generated by android-components/automation/application-services-nightly-bump.py
-val VERSION = "138.20250327164940"
+val VERSION = "138.20250325050356"
val CHANNEL = ApplicationServicesChannel.NIGHTLY
object ApplicationServicesConfig {
diff --git a/mobile/android/android-components/plugins/dependencies/src/main/java/moz.yaml b/mobile/android/android-components/plugins/dependencies/src/main/java/moz.yaml
index 731e2d60aca4..058bde214b89 100644
--- a/mobile/android/android-components/plugins/dependencies/src/main/java/moz.yaml
+++ b/mobile/android/android-components/plugins/dependencies/src/main/java/moz.yaml
@@ -31,11 +31,11 @@ origin:
# Human-readable identifier for this version/release
# Generally "version NNN", "tag SSS", "bookmark SSS"
- release: 8e84c588a5cc2686973c5026ecd240d6275d7e94 (2025-03-27T16:49:40).
+ release: 093c4f261a91b2a7d6f52da4f79bef19f61456fb (2025-03-25T05:03:56).
# Revision to pull in
# Must be a long or short commit SHA (long preferred)
- revision: 8e84c588a5cc2686973c5026ecd240d6275d7e94
+ revision: 093c4f261a91b2a7d6f52da4f79bef19f61456fb
# The package's license, where possible using the mnemonic from
# https://spdx.org/licenses/
diff --git a/python/sites/mach.txt b/python/sites/mach.txt
index d213369aa7fc..6e3db1c848f7 100644
--- a/python/sites/mach.txt
+++ b/python/sites/mach.txt
@@ -104,7 +104,7 @@ vendored:third_party/python/wheel
vendored:third_party/python/zipp
# glean-sdk may not be installable if a wheel isn't available
# and it has to be built from source.
-pypi-optional:glean-sdk==64.0.0:telemetry will not be collected
+pypi-optional:glean-sdk==63.1.0:telemetry will not be collected
# Mach gracefully handles the case where `psutil` is unavailable.
# We aren't (yet) able to pin packages in automation, so we have to
# support down to the oldest locally-installed version (5.4.2).
diff --git a/supply-chain/audits.toml b/supply-chain/audits.toml
index 4c4d85ee2b48..5f1c993baf44 100644
--- a/supply-chain/audits.toml
+++ b/supply-chain/audits.toml
@@ -531,13 +531,6 @@ start = "2023-06-21"
end = "2026-02-01"
notes = "Maintained by the Glean and Application Services teams"
-[[wildcard-audits.uniffi_internal_macros]]
-who = "Jan-Erik Rediger "
-criteria = "safe-to-deploy"
-user-id = 111105
-start = "2025-03-18"
-end = "2026-03-25"
-
[[wildcard-audits.uniffi_macros]]
who = "Ben Dean-Kawamura "
criteria = "safe-to-deploy"
@@ -6311,6 +6304,12 @@ non-1-byte-aligned type, however right now that is not the case
(submitted https://github.com/zip-rs/zip2/issues/198).
"""
+[[audits.zlib-rs]]
+who = "Mike Hommey "
+criteria = "safe-to-deploy"
+delta = "0.2.1 -> 0.2.1@git:4aa430ccb77537d0d60dab8db993ca51bb1194c5"
+importable = false
+
[[trusted.aho-corasick]]
criteria = "safe-to-deploy"
user-id = 189 # Andrew Gallant (BurntSushi)
diff --git a/supply-chain/config.toml b/supply-chain/config.toml
index 9a0130747757..d507a95ed1ba 100644
--- a/supply-chain/config.toml
+++ b/supply-chain/config.toml
@@ -297,6 +297,14 @@ criteria = "safe-to-deploy"
version = "0.2.0"
criteria = "safe-to-deploy"
+[[exemptions.askama_derive]]
+version = "0.11.2"
+criteria = "safe-to-deploy"
+
+[[exemptions.askama_escape]]
+version = "0.10.3"
+criteria = "safe-to-deploy"
+
[[exemptions.async-task]]
version = "4.0.3"
criteria = "safe-to-deploy"
@@ -518,6 +526,10 @@ criteria = "safe-to-deploy"
version = "0.10.3"
criteria = "safe-to-deploy"
+[[exemptions.khronos-egl]]
+version = "4.1.0"
+criteria = "safe-to-deploy"
+
[[exemptions.khronos_api]]
version = "3.1.0"
criteria = "safe-to-deploy"
@@ -630,6 +642,10 @@ criteria = "safe-to-deploy"
version = "1.2.3"
criteria = "safe-to-deploy"
+[[exemptions.rand]]
+version = "0.8.5"
+criteria = "safe-to-deploy"
+
[[exemptions.remove_dir_all]]
version = "0.5.3"
criteria = "safe-to-deploy"
diff --git a/supply-chain/imports.lock b/supply-chain/imports.lock
index bdaaaaef168e..c754509e2e1c 100644
--- a/supply-chain/imports.lock
+++ b/supply-chain/imports.lock
@@ -258,15 +258,15 @@ user-login = "jrmuizel"
user-name = "Jeff Muizelaar"
[[publisher.glean]]
-version = "64.0.0"
-when = "2025-03-18"
+version = "63.1.0"
+when = "2025-01-30"
user-id = 48
user-login = "badboy"
user-name = "Jan-Erik Rediger"
[[publisher.glean-core]]
-version = "64.0.0"
-when = "2025-03-18"
+version = "63.1.0"
+when = "2025-01-30"
user-id = 48
user-login = "badboy"
user-name = "Jan-Erik Rediger"
@@ -335,8 +335,8 @@ user-login = "carllerche"
user-name = "Carl Lerche"
[[publisher.itoa]]
-version = "1.0.11"
-when = "2024-03-26"
+version = "1.0.5"
+when = "2022-12-17"
user-id = 3618
user-login = "dtolnay"
user-name = "David Tolnay"
@@ -704,63 +704,63 @@ user-login = "dtolnay"
user-name = "David Tolnay"
[[publisher.unicode-width]]
-version = "0.2.0"
-when = "2024-09-19"
+version = "0.1.10"
+when = "2022-09-13"
user-id = 1139
user-login = "Manishearth"
user-name = "Manish Goregaokar"
[[publisher.uniffi]]
-version = "0.29.0"
-when = "2025-02-06"
+version = "0.28.2"
+when = "2024-10-08"
user-id = 127697
user-login = "bendk"
[[publisher.uniffi_bindgen]]
-version = "0.29.0"
-when = "2025-02-06"
+version = "0.28.2"
+when = "2024-10-08"
user-id = 127697
user-login = "bendk"
[[publisher.uniffi_build]]
-version = "0.29.0"
-when = "2025-02-06"
+version = "0.28.2"
+when = "2024-10-08"
+user-id = 127697
+user-login = "bendk"
+
+[[publisher.uniffi_checksum_derive]]
+version = "0.28.2"
+when = "2024-10-08"
user-id = 127697
user-login = "bendk"
[[publisher.uniffi_core]]
-version = "0.29.0"
-when = "2025-02-06"
-user-id = 127697
-user-login = "bendk"
-
-[[publisher.uniffi_internal_macros]]
-version = "0.29.0"
-when = "2025-02-06"
+version = "0.28.2"
+when = "2024-10-08"
user-id = 127697
user-login = "bendk"
[[publisher.uniffi_macros]]
-version = "0.29.0"
-when = "2025-02-06"
+version = "0.28.2"
+when = "2024-10-08"
user-id = 127697
user-login = "bendk"
[[publisher.uniffi_meta]]
-version = "0.29.0"
-when = "2025-02-06"
+version = "0.28.2"
+when = "2024-10-08"
user-id = 127697
user-login = "bendk"
[[publisher.uniffi_testing]]
-version = "0.29.0"
-when = "2025-02-06"
+version = "0.28.2"
+when = "2024-10-08"
user-id = 127697
user-login = "bendk"
[[publisher.uniffi_udl]]
-version = "0.29.0"
-when = "2025-02-06"
+version = "0.28.2"
+when = "2024-10-08"
user-id = 127697
user-login = "bendk"
@@ -1203,11 +1203,6 @@ crate is broadly used throughout the ecosystem and does not contain anything
suspicious.
"""
-[[audits.bytecode-alliance.audits.itoa]]
-who = "Dan Gohman "
-criteria = "safe-to-deploy"
-delta = "1.0.11 -> 1.0.14"
-
[[audits.bytecode-alliance.audits.jobserver]]
who = "Alex Crichton "
criteria = "safe-to-deploy"
@@ -1550,13 +1545,6 @@ criteria = "safe-to-run"
version = "0.14.20"
aggregated-from = "https://chromium.googlesource.com/chromiumos/third_party/rust_crates/+/refs/heads/main/cargo-vet/audits.toml?format=TEXT"
-[[audits.google.audits.itoa]]
-who = "Daniel Cheng "
-criteria = "safe-to-deploy"
-delta = "1.0.14 -> 1.0.15"
-notes = "Only minor rustdoc changes."
-aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT"
-
[[audits.google.audits.nom]]
who = "danakj@chromium.org"
criteria = "safe-to-deploy"
@@ -1611,15 +1599,6 @@ Config-related changes in `test_size.rs`.
"""
aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT"
-[[audits.google.audits.rand]]
-who = "Lukasz Anforowicz "
-criteria = "safe-to-deploy"
-version = "0.8.5"
-notes = """
-For more detailed unsafe review notes please see https://crrev.com/c/6362797
-"""
-aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT"
-
[[audits.google.audits.rustversion]]
who = "Lukasz Anforowicz "
criteria = "safe-to-deploy"
@@ -1971,14 +1950,6 @@ who = "Ameer Ghani "
criteria = "safe-to-deploy"
delta = "0.4.1 -> 0.4.2"
-[[audits.mozilla.wildcard-audits.uniffi_internal_macros]]
-who = "Jan-Erik Rediger "
-criteria = "safe-to-deploy"
-user-id = 127697 # bendk
-start = "2025-02-06"
-end = "2026-03-14"
-aggregated-from = "https://raw.githubusercontent.com/mozilla/glean/main/supply-chain/audits.toml"
-
[[audits.mozilla.wildcard-audits.weedle2]]
who = "Jan-Erik Rediger "
criteria = "safe-to-deploy"
@@ -1997,6 +1968,20 @@ end = "2025-08-05"
notes = "Maintained by me"
aggregated-from = "https://raw.githubusercontent.com/mozilla/glean/main/supply-chain/audits.toml"
+[[audits.mozilla.audits.askama]]
+who = "Jan-Erik Rediger "
+criteria = "safe-to-deploy"
+delta = "0.11.1 -> 0.12.0"
+notes = "No new unsafe usage, mostly dependency updates and smaller API changes"
+aggregated-from = "https://raw.githubusercontent.com/mozilla/glean/main/supply-chain/audits.toml"
+
+[[audits.mozilla.audits.askama_derive]]
+who = "Jan-Erik Rediger "
+criteria = "safe-to-deploy"
+delta = "0.11.2 -> 0.12.1"
+notes = "Dependency updates, a new toml dependency and some API changes. No unsafe use."
+aggregated-from = "https://raw.githubusercontent.com/mozilla/glean/main/supply-chain/audits.toml"
+
[[audits.mozilla.audits.basic-toml]]
who = "Jan-Erik Rediger "
criteria = "safe-to-deploy"
diff --git a/third_party/python/glean_parser/glean_parser-17.0.1.dist-info/AUTHORS.md b/third_party/python/glean_parser/glean_parser-16.2.0.dist-info/AUTHORS.md
similarity index 100%
rename from third_party/python/glean_parser/glean_parser-17.0.1.dist-info/AUTHORS.md
rename to third_party/python/glean_parser/glean_parser-16.2.0.dist-info/AUTHORS.md
diff --git a/third_party/python/glean_parser/glean_parser-17.0.1.dist-info/LICENSE b/third_party/python/glean_parser/glean_parser-16.2.0.dist-info/LICENSE
similarity index 100%
rename from third_party/python/glean_parser/glean_parser-17.0.1.dist-info/LICENSE
rename to third_party/python/glean_parser/glean_parser-16.2.0.dist-info/LICENSE
diff --git a/third_party/python/glean_parser/glean_parser-17.0.1.dist-info/METADATA b/third_party/python/glean_parser/glean_parser-16.2.0.dist-info/METADATA
similarity index 99%
rename from third_party/python/glean_parser/glean_parser-17.0.1.dist-info/METADATA
rename to third_party/python/glean_parser/glean_parser-16.2.0.dist-info/METADATA
index d85a935efa81..f25f155908a6 100644
--- a/third_party/python/glean_parser/glean_parser-17.0.1.dist-info/METADATA
+++ b/third_party/python/glean_parser/glean_parser-16.2.0.dist-info/METADATA
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: glean_parser
-Version: 17.0.1
+Version: 16.2.0
Summary: Parser tools for Mozilla's Glean telemetry
Home-page: https://github.com/mozilla/glean_parser
Author: The Glean Team
@@ -79,14 +79,6 @@ $ glean_parser check < ping.json
## Unreleased
-## 17.0.1
-
-- BUGFIX: Fix missing `ping_arg` "`uploader_capabilities`" in util.py ([#786](https://github.com/mozilla/glean_parser/pull/786))
-
-## 17.0.0
-
-- BREAKING CHANGE: Support `uploader_capabilities` for pings ([bug 1920732](https://bugzilla.mozilla.org/show_bug.cgi?id=1920732))
-
## 16.2.0
- New lint: error when there are metrics whose names are too similar ([bug 1934099](https://bugzilla.mozilla.org/show_bug.cgi?id=1934099))
diff --git a/third_party/python/glean_parser/glean_parser-17.0.1.dist-info/RECORD b/third_party/python/glean_parser/glean_parser-16.2.0.dist-info/RECORD
similarity index 79%
rename from third_party/python/glean_parser/glean_parser-17.0.1.dist-info/RECORD
rename to third_party/python/glean_parser/glean_parser-16.2.0.dist-info/RECORD
index f5c5be12b7d9..5e5a6212f1d1 100644
--- a/third_party/python/glean_parser/glean_parser-17.0.1.dist-info/RECORD
+++ b/third_party/python/glean_parser/glean_parser-16.2.0.dist-info/RECORD
@@ -10,7 +10,7 @@ glean_parser/lint.py,sha256=ktdkR2GjR0wuR4IpLTiZ-q17vI4dk_Nebp4XU3pqzsk,21103
glean_parser/markdown.py,sha256=GkCr1CrV6mnRQseT6FO1-JJ7Eup8X3lxUfRMBTxXpe4,9066
glean_parser/metrics.py,sha256=3_ERPI63CsH_QvXVKNBVKIQTv4KWir2SfSbtn6J8a9Q,15842
glean_parser/parser.py,sha256=3bJwUGYhnzIHYJ7UBdO63Oi0_n1_Twvr2IOUUe_koME,18132
-glean_parser/pings.py,sha256=l4hKmnKigS46vlHFI4aWueKVHcZQL36QfhK0VC8OiFA,3924
+glean_parser/pings.py,sha256=xUgAunjluLbdLtcSQiUL6HDO5aLYM75MAoIT9H8-lOY,3729
glean_parser/python_server.py,sha256=ERpYcbSwF19xKFagxX0mZAvlR1y6D7Ah5DSvW8LipCY,4791
glean_parser/ruby_server.py,sha256=e5lkfcLQAUMUBQDCjqNU82LkdUzT5x-G6HOnsUInbsU,5190
glean_parser/rust.py,sha256=u1IeluyxFj6NrZZrBQwwa0nWz0TABv93lYxVBx0aN3I,7334
@@ -19,12 +19,12 @@ glean_parser/swift.py,sha256=paUzF6tItdktFwIQYCKsYpqXfn8zxR2coU_jMYrmwlc,8957
glean_parser/tags.py,sha256=bemKYvcbMO4JrghiNSe-A4BNNDtx_FlUPkgrPPJy84Y,1391
glean_parser/translate.py,sha256=itObn41X63koLYjdppLiywIFzPWDvPEx7C13efWpDSE,8444
glean_parser/translation_options.py,sha256=Lxzr6G7MP0tC_ZYlZXftS4j0SLiqO-5mGVTEc7ggXis,2037
-glean_parser/util.py,sha256=yTx_-Q8w8rNNSZ_xbno0B90WR7pZZptG2bUWU0sCHZk,16580
+glean_parser/util.py,sha256=YigUFMhzbXucNx3_bU-SAFWSnnyKS73pQWMBqxRGNn8,16551
glean_parser/validate_ping.py,sha256=0TNvILH6dtzJDys3W8Kqorw6kk03me73OCUDtpoHcXU,2118
glean_parser/schemas/metrics.1-0-0.schema.yaml,sha256=cND3cvi6iBfPUVmtfIBQfGJV9AALpbvN7nu8E33_J-o,19566
glean_parser/schemas/metrics.2-0-0.schema.yaml,sha256=ieFMxezBuySCvUorx8eGqXRUcoeTql4Z9FxkbkG9XFQ,26715
glean_parser/schemas/pings.1-0-0.schema.yaml,sha256=hwCnsKpEysmrmVp-QHGBArEkVY3vaU1rVsxlTwhAzws,4315
-glean_parser/schemas/pings.2-0-0.schema.yaml,sha256=FQBsEt8Eg_ypBUnhJ1THZWXIgtuiYfyXcp_J9pGJUnE,7001
+glean_parser/schemas/pings.2-0-0.schema.yaml,sha256=5k_OKfNkxHed4j1kMFGkxW1yDpvkqst-Vogig_W_JJU,6420
glean_parser/schemas/tags.1-0-0.schema.yaml,sha256=OGXIJlvvVW1vaqB_NVZnwKeZ-sLlfH57vjBSHbj6DNI,1231
glean_parser/templates/data_review.jinja2,sha256=jeYU29T1zLSyu9fKBBFu5BFPfIw8_hmOUXw8RXhRXK8,3287
glean_parser/templates/go_server.jinja2,sha256=t9ZS5AF5JwoVExkSxDoRQdBoTQv1bchJ7oTRB9eP5FI,9241
@@ -37,13 +37,13 @@ glean_parser/templates/markdown.jinja2,sha256=vAHHGGm28HRDPd3zO_wQMAUZIuxE9uQ7hl
glean_parser/templates/python_server.jinja2,sha256=gu2C1rkn760IqBCG2SWaK7o32T1ify94wDEsudLPUg8,7260
glean_parser/templates/qmldir.jinja2,sha256=m6IGsp-tgTiOfQ7VN8XW6GqX0gJqJkt3B6Pkaul6FVo,156
glean_parser/templates/ruby_server.jinja2,sha256=B0pbuld3j_0s7uMjoaCo8_6ehJUZeTXZlZZ9QRS4J_8,6252
-glean_parser/templates/rust.jinja2,sha256=hX8p5HXQNEeVz_sF6SDIyUNus6CcaCG9KWLl6uQLiOU,7285
+glean_parser/templates/rust.jinja2,sha256=mdYEsldHLMb2Hkzly-NJzkFINg7qMZo7MjDI_2ZqS3U,7247
glean_parser/templates/rust_server.jinja2,sha256=JJdeU5jiWx9aWpF0qiXIYztJ14OQKxV3VFdAbCrtR_0,12841
-glean_parser/templates/swift.jinja2,sha256=L_JpwGLVzmOf1FYLoCzFu_RnGTExCIDup7iR1tWzD3o,6912
-glean_parser-17.0.1.dist-info/AUTHORS.md,sha256=yxgj8MioO4wUnrh0gmfb8l3DJJrf-l4HmmEDbQsbbNI,455
-glean_parser-17.0.1.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
-glean_parser-17.0.1.dist-info/METADATA,sha256=fX3p807Z0tgo8pjTGNX4Fxw3gPXT6dJrW_Cw7lnAE_4,36761
-glean_parser-17.0.1.dist-info/WHEEL,sha256=cVxcB9AmuTcXqmwrtPhNK88dr7IR_b6qagTj0UvIEbY,91
-glean_parser-17.0.1.dist-info/entry_points.txt,sha256=mf9d3sv8BwSjjR58x9KDnpVkONCnv3fPQC2NjJl15Xg,68
-glean_parser-17.0.1.dist-info/top_level.txt,sha256=q7T3duD-9tYZFyDry6Wv2LcdMsK2jGnzdDFhxWcT2Z8,13
-glean_parser-17.0.1.dist-info/RECORD,,
+glean_parser/templates/swift.jinja2,sha256=EAenC__ReGA2A4tn-ui3n849PVHxE5IndXUivXDh8AU,6841
+glean_parser-16.2.0.dist-info/AUTHORS.md,sha256=yxgj8MioO4wUnrh0gmfb8l3DJJrf-l4HmmEDbQsbbNI,455
+glean_parser-16.2.0.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
+glean_parser-16.2.0.dist-info/METADATA,sha256=rY8wmuwWXN1DnL3-VguHQr08H2WUoNlzvPgrfYIkVkU,36477
+glean_parser-16.2.0.dist-info/WHEEL,sha256=cVxcB9AmuTcXqmwrtPhNK88dr7IR_b6qagTj0UvIEbY,91
+glean_parser-16.2.0.dist-info/entry_points.txt,sha256=mf9d3sv8BwSjjR58x9KDnpVkONCnv3fPQC2NjJl15Xg,68
+glean_parser-16.2.0.dist-info/top_level.txt,sha256=q7T3duD-9tYZFyDry6Wv2LcdMsK2jGnzdDFhxWcT2Z8,13
+glean_parser-16.2.0.dist-info/RECORD,,
diff --git a/third_party/python/glean_parser/glean_parser-17.0.1.dist-info/WHEEL b/third_party/python/glean_parser/glean_parser-16.2.0.dist-info/WHEEL
similarity index 100%
rename from third_party/python/glean_parser/glean_parser-17.0.1.dist-info/WHEEL
rename to third_party/python/glean_parser/glean_parser-16.2.0.dist-info/WHEEL
diff --git a/third_party/python/glean_parser/glean_parser-17.0.1.dist-info/entry_points.txt b/third_party/python/glean_parser/glean_parser-16.2.0.dist-info/entry_points.txt
similarity index 100%
rename from third_party/python/glean_parser/glean_parser-17.0.1.dist-info/entry_points.txt
rename to third_party/python/glean_parser/glean_parser-16.2.0.dist-info/entry_points.txt
diff --git a/third_party/python/glean_parser/glean_parser-17.0.1.dist-info/top_level.txt b/third_party/python/glean_parser/glean_parser-16.2.0.dist-info/top_level.txt
similarity index 100%
rename from third_party/python/glean_parser/glean_parser-17.0.1.dist-info/top_level.txt
rename to third_party/python/glean_parser/glean_parser-16.2.0.dist-info/top_level.txt
diff --git a/third_party/python/glean_parser/glean_parser/pings.py b/third_party/python/glean_parser/glean_parser/pings.py
index 5091940332df..f773c4ef9e8c 100644
--- a/third_party/python/glean_parser/glean_parser/pings.py
+++ b/third_party/python/glean_parser/glean_parser/pings.py
@@ -26,7 +26,6 @@ class Ping:
notification_emails: List[str],
metadata: Optional[Dict] = None,
data_reviews: Optional[List[str]] = None,
- uploader_capabilities: Optional[List[str]] = None,
include_client_id: bool = False,
send_if_empty: bool = False,
reasons: Optional[Dict[str, str]] = None,
@@ -58,9 +57,6 @@ class Ping:
if data_reviews is None:
data_reviews = []
self.data_reviews = data_reviews
- if not uploader_capabilities:
- uploader_capabilities = []
- self.uploader_capabilities = uploader_capabilities
self.include_client_id = include_client_id
self.send_if_empty = send_if_empty
if reasons is None:
diff --git a/third_party/python/glean_parser/glean_parser/schemas/pings.2-0-0.schema.yaml b/third_party/python/glean_parser/glean_parser/schemas/pings.2-0-0.schema.yaml
index 8b45b7b773dd..91d1efc44192 100644
--- a/third_party/python/glean_parser/glean_parser/schemas/pings.2-0-0.schema.yaml
+++ b/third_party/python/glean_parser/glean_parser/schemas/pings.2-0-0.schema.yaml
@@ -127,21 +127,6 @@ additionalProperties:
only takes effect when `metadata.include_info_sections` is `true`.
type: boolean
- uploader_capabilities:
- title: Uploader Capabilities
- description: |
- **Optional.**
-
- An optional list of capability strings that the ping uploader must be
- capable of supporting in order to upload this ping.
- These are supplied exactly as defined (including order) to the uploader
- every time upload is attempted for this ping.
- The uploader must only attempt upload if it satisfies the supplied
- capabilities. If not, it must refuse to upload the ping.
- type: [array, "null"]
- items:
- type: string
-
send_if_empty:
title: Send if empty
description: |
diff --git a/third_party/python/glean_parser/glean_parser/templates/rust.jinja2 b/third_party/python/glean_parser/glean_parser/templates/rust.jinja2
index d209c6ccabcc..59ff21cd1134 100644
--- a/third_party/python/glean_parser/glean_parser/templates/rust.jinja2
+++ b/third_party/python/glean_parser/glean_parser/templates/rust.jinja2
@@ -97,7 +97,7 @@ CommonMetricData {
/// {{ obj.description|wordwrap() | replace('\n', '\n/// ') }}
#[rustfmt::skip]
pub static {{ obj.name|snake_case }}: ::glean::private::__export::Lazy<::glean::private::PingType> =
- ::glean::private::__export::Lazy::new(|| ::glean::private::PingType::new("{{ obj.name }}", {{ obj.include_client_id|rust }}, {{ obj.send_if_empty|rust }}, {{ obj.precise_timestamps|rust }}, {{ obj.include_info_sections|rust }}, {{ obj.enabled|rust }}, {{ obj.schedules_pings|rust }}, {{ obj.reason_codes|rust }}, {{ obj.follows_collection_enabled|rust }}, {{ obj.uploader_capabilities|rust }}));
+ ::glean::private::__export::Lazy::new(|| ::glean::private::PingType::new("{{ obj.name }}", {{ obj.include_client_id|rust }}, {{ obj.send_if_empty|rust }}, {{ obj.precise_timestamps|rust }}, {{ obj.include_info_sections|rust }}, {{ obj.enabled|rust }}, {{ obj.schedules_pings|rust }}, {{ obj.reason_codes|rust }}, {{ obj.follows_collection_enabled|rust }}));
{% endfor %}
{% else %}
pub mod {{ category.name|snake_case }} {
diff --git a/third_party/python/glean_parser/glean_parser/templates/swift.jinja2 b/third_party/python/glean_parser/glean_parser/templates/swift.jinja2
index ea079f28cd6a..6dc364d4cd7c 100644
--- a/third_party/python/glean_parser/glean_parser/templates/swift.jinja2
+++ b/third_party/python/glean_parser/glean_parser/templates/swift.jinja2
@@ -144,8 +144,7 @@ extension {{ namespace }} {
enabled: {{obj.enabled|swift}},
schedulesPings: {{obj.schedules_pings|swift}},
reasonCodes: {{obj.reason_codes|swift}},
- followsCollectionEnabled: {{obj.follows_collection_enabled|swift}},
- uploaderCapabilities: {{obj.uploader_capabilities|swift}}
+ followsCollectionEnabled: {{obj.follows_collection_enabled|swift}}
)
{% endfor %}
diff --git a/third_party/python/glean_parser/glean_parser/util.py b/third_party/python/glean_parser/glean_parser/util.py
index 561b3932d979..88d624dd10e2 100644
--- a/third_party/python/glean_parser/glean_parser/util.py
+++ b/third_party/python/glean_parser/glean_parser/util.py
@@ -546,7 +546,6 @@ ping_args = [
"schedules_pings",
"reason_codes",
"follows_collection_enabled",
- "uploader_capabilities",
]
diff --git a/third_party/python/pyproject.toml b/third_party/python/pyproject.toml
index 489a71851a3f..cef837bb0726 100644
--- a/third_party/python/pyproject.toml
+++ b/third_party/python/pyproject.toml
@@ -21,7 +21,7 @@ dependencies = [
"filelock~=3.6",
"fluent-migrate==0.13.2",
"fluent-syntax==0.19.0",
- "glean-parser==17.0.1",
+ "glean-parser==16.2.0",
"importlib-metadata==6.0.0",
# Required for compatibility with Flask >= 2 in tools/tryselect/selectors/chooser
"jinja2==3.1.2",
diff --git a/third_party/python/requirements.txt b/third_party/python/requirements.txt
index 0b98e3099cc5..5b668c5429cc 100644
--- a/third_party/python/requirements.txt
+++ b/third_party/python/requirements.txt
@@ -358,9 +358,9 @@ gitignorant==0.3.1 \
giturlparse==0.12.0 \
--hash=sha256:c0fff7c21acc435491b1779566e038757a205c1ffdcb47e4f81ea52ad8c3859a \
--hash=sha256:412b74f2855f1da2fefa89fd8dde62df48476077a72fc19b62039554d27360eb
-glean-parser==17.0.1 \
- --hash=sha256:764a3b5aaa22d6100100d97a6ce58515687f4e0d06660180024e3101a507f580 \
- --hash=sha256:922b75be353461875802a50dfd052215414dbc2965d153b95ff31d85081c40b7
+glean-parser==16.2.0 \
+ --hash=sha256:4f6794b41b6e69cbceaee2a5b835a74cdfe443d1fbf4e2656ac40ba72cc27458 \
+ --hash=sha256:dc521d87b6d9c04f2006509be0aa2cdf0e923338521d9acad221d4e23caaace8
idna==3.10 \
--hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \
--hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3
diff --git a/third_party/python/uv.lock b/third_party/python/uv.lock
index 506c752a66fa..c6880d1bebbd 100644
--- a/third_party/python/uv.lock
+++ b/third_party/python/uv.lock
@@ -598,7 +598,7 @@ wheels = [
[[package]]
name = "glean-parser"
-version = "17.0.1"
+version = "16.2.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "click" },
@@ -608,9 +608,9 @@ dependencies = [
{ name = "platformdirs" },
{ name = "pyyaml" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/7d/fd/895160c0fbc1ced0803bd19a2c2473f537efd8e6afa38aae2af12d5535b9/glean_parser-17.0.1.tar.gz", hash = "sha256:764a3b5aaa22d6100100d97a6ce58515687f4e0d06660180024e3101a507f580", size = 290273 }
+sdist = { url = "https://files.pythonhosted.org/packages/88/74/c9d3ca070ca08399b5ee32d4f85adab9fe95faf44785655fbd283c20f4cb/glean_parser-16.2.0.tar.gz", hash = "sha256:4f6794b41b6e69cbceaee2a5b835a74cdfe443d1fbf4e2656ac40ba72cc27458", size = 289721 }
wheels = [
- { url = "https://files.pythonhosted.org/packages/2d/02/dcc2f155ef74fb2c83d51b7170236f4648adb49f645bac4e5786dc3ac77c/glean_parser-17.0.1-py3-none-any.whl", hash = "sha256:922b75be353461875802a50dfd052215414dbc2965d153b95ff31d85081c40b7", size = 124106 },
+ { url = "https://files.pythonhosted.org/packages/7c/2b/b7cedca86929673c89f4350e257ab720a6ea014a684b99c2814ad279b716/glean_parser-16.2.0-py3-none-any.whl", hash = "sha256:dc521d87b6d9c04f2006509be0aa2cdf0e923338521d9acad221d4e23caaace8", size = 123785 },
]
[[package]]
@@ -925,7 +925,7 @@ requires-dist = [
{ name = "filelock", specifier = "~=3.6" },
{ name = "fluent-migrate", specifier = "==0.13.2" },
{ name = "fluent-syntax", specifier = "==0.19.0" },
- { name = "glean-parser", specifier = "==17.0.1" },
+ { name = "glean-parser", specifier = "==16.2.0" },
{ name = "importlib-metadata", specifier = "==6.0.0" },
{ name = "jinja2", specifier = "==3.1.2" },
{ name = "jsmin", specifier = "==3.0.0" },
diff --git a/third_party/python/uv.lock.hash b/third_party/python/uv.lock.hash
index 54259ca4172b..c69b48504c75 100644
--- a/third_party/python/uv.lock.hash
+++ b/third_party/python/uv.lock.hash
@@ -1 +1 @@
-6811e7a47781ee2d8b491fa59de0ad7a40375d04beeab0f203d5e423f0db5e48
\ No newline at end of file
+dafec412f602d87682118ce75e91a25ed9ad9d9841b6667a3e9727dad246839b
\ No newline at end of file
diff --git a/third_party/rust/askama/.cargo-checksum.json b/third_party/rust/askama/.cargo-checksum.json
new file mode 100644
index 000000000000..889c533360bb
--- /dev/null
+++ b/third_party/rust/askama/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.toml":"fbab611fc3ba2204942300a534b4f030460f33b0606fa50b9ad08ea567ba81e8","LICENSE-APACHE":"87cb0d734c723c083e51c825930ff42bce28596b52dee15567f6b28f19c195e3","LICENSE-MIT":"df20e0180764bf5bd76f74d47bc9e8c0069a666401629c390003a1d5eba99c92","README.md":"6a4430cf614ff9d36ba01463a8f94085ed4b0889fd719793fa914568247acce2","src/error.rs":"1e3f8020092469090f314f60685c077347e730a88222dfdaa38aaf2396507532","src/filters/json.rs":"dccd0a3f1017da9f6cd9650bd39eb1670f4a9833d2f0968614cd8cd65d18a9dd","src/filters/mod.rs":"903d09599e62f56657b00b2aa577c9d2f963348dd12a1029e90e68549f78b1db","src/filters/yaml.rs":"4e641bedbe3666b334836fb6603fe7f718f7e90d8e33419acca624f50a580c3f","src/helpers.rs":"76e0422acd4ccba7b1735d6ab7622a93f6ec5a2fa89531111d877266784d5334","src/lib.rs":"3a6e4d0b3aadc7c391cbe59416504a719406303726122779281a3af1a7ad76a4"},"package":"47cbc3cf73fa8d9833727bbee4835ba5c421a0d65b72daf9a7b5d0e0f9cfb57e"}
\ No newline at end of file
diff --git a/third_party/rust/askama/Cargo.toml b/third_party/rust/askama/Cargo.toml
new file mode 100644
index 000000000000..15129d2dd260
--- /dev/null
+++ b/third_party/rust/askama/Cargo.toml
@@ -0,0 +1,126 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2021"
+rust-version = "1.58"
+name = "askama"
+version = "0.12.0"
+description = "Type-safe, compiled Jinja-like templates for Rust"
+homepage = "https://github.com/djc/askama"
+documentation = "https://docs.rs/askama"
+readme = "README.md"
+keywords = [
+ "markup",
+ "template",
+ "jinja2",
+ "html",
+]
+categories = ["template-engine"]
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/djc/askama"
+resolver = "1"
+
+[package.metadata.docs.rs]
+features = [
+ "config",
+ "humansize",
+ "num-traits",
+ "serde-json",
+ "serde-yaml",
+]
+
+[dependencies.askama_derive]
+version = "0.12.0"
+
+[dependencies.askama_escape]
+version = "0.10.3"
+
+[dependencies.comrak]
+version = "0.16"
+optional = true
+default-features = false
+
+[dependencies.dep_humansize]
+version = "2"
+optional = true
+package = "humansize"
+
+[dependencies.dep_num_traits]
+version = "0.2.6"
+optional = true
+package = "num-traits"
+
+[dependencies.percent-encoding]
+version = "2.1.0"
+optional = true
+
+[dependencies.serde]
+version = "1.0"
+features = ["derive"]
+optional = true
+
+[dependencies.serde_json]
+version = "1.0"
+optional = true
+
+[dependencies.serde_yaml]
+version = "0.9"
+optional = true
+
+[features]
+config = ["askama_derive/config"]
+default = [
+ "config",
+ "humansize",
+ "num-traits",
+ "urlencode",
+]
+humansize = [
+ "askama_derive/humansize",
+ "dep_humansize",
+]
+markdown = [
+ "askama_derive/markdown",
+ "comrak",
+]
+mime = []
+mime_guess = []
+num-traits = [
+ "askama_derive/num-traits",
+ "dep_num_traits",
+]
+serde-json = [
+ "askama_derive/serde-json",
+ "askama_escape/json",
+ "serde",
+ "serde_json",
+]
+serde-yaml = [
+ "askama_derive/serde-yaml",
+ "serde",
+ "serde_yaml",
+]
+urlencode = [
+ "askama_derive/urlencode",
+ "percent-encoding",
+]
+with-actix-web = ["askama_derive/with-actix-web"]
+with-axum = ["askama_derive/with-axum"]
+with-gotham = ["askama_derive/with-gotham"]
+with-hyper = ["askama_derive/with-hyper"]
+with-mendes = ["askama_derive/with-mendes"]
+with-rocket = ["askama_derive/with-rocket"]
+with-tide = ["askama_derive/with-tide"]
+with-warp = ["askama_derive/with-warp"]
+
+[badges.maintenance]
+status = "actively-developed"
diff --git a/third_party/rust/rinja/LICENSE-APACHE b/third_party/rust/askama/LICENSE-APACHE
similarity index 100%
rename from third_party/rust/rinja/LICENSE-APACHE
rename to third_party/rust/askama/LICENSE-APACHE
diff --git a/third_party/rust/rinja/LICENSE-MIT b/third_party/rust/askama/LICENSE-MIT
similarity index 100%
rename from third_party/rust/rinja/LICENSE-MIT
rename to third_party/rust/askama/LICENSE-MIT
diff --git a/third_party/rust/askama/README.md b/third_party/rust/askama/README.md
new file mode 100644
index 000000000000..9055004f066e
--- /dev/null
+++ b/third_party/rust/askama/README.md
@@ -0,0 +1,96 @@
+# Askama
+
+[](https://docs.rs/askama/)
+[](https://crates.io/crates/askama)
+[](https://github.com/djc/askama/actions?query=workflow%3ACI)
+[](https://gitter.im/djc/askama)
+
+Askama implements a template rendering engine based on [Jinja](https://jinja.palletsprojects.com/).
+It generates Rust code from your templates at compile time
+based on a user-defined `struct` to hold the template's context.
+See below for an example, or read [the book][docs].
+
+**"Pretty exciting. I would love to use this already."** --
+[Armin Ronacher][mitsuhiko], creator of Jinja
+
+All feedback welcome. Feel free to file bugs, requests for documentation and
+any other feedback to the [issue tracker][issues] or [tweet me][twitter].
+
+Askama was created by and is maintained by Dirkjan Ochtman. If you are in a
+position to support ongoing maintenance and further development or use it
+in a for-profit context, please consider supporting my open source work on
+[Patreon][patreon].
+
+### Feature highlights
+
+* Construct templates using a familiar, easy-to-use syntax
+* Benefit from the safety provided by Rust's type system
+* Template code is compiled into your crate for [optimal performance][benchmarks]
+* Optional built-in support for Actix, Axum, Gotham, Mendes, Rocket, tide, and warp web frameworks
+* Debugging features to assist you in template development
+* Templates must be valid UTF-8 and produce UTF-8 when rendered
+* IDE support available in [JetBrains products](https://plugins.jetbrains.com/plugin/16591-askama-template-support)
+* Works on stable Rust
+
+### Supported in templates
+
+* Template inheritance
+* Loops, if/else statements and include support
+* Macro support
+* Variables (no mutability allowed)
+* Some built-in filters, and the ability to use your own
+* Whitespace suppressing with '-' markers
+* Opt-out HTML escaping
+* Syntax customization
+
+[docs]: https://djc.github.io/askama/
+[fafhrd91]: https://github.com/fafhrd91
+[mitsuhiko]: http://lucumr.pocoo.org/
+[issues]: https://github.com/djc/askama/issues
+[twitter]: https://twitter.com/djco/
+[patreon]: https://www.patreon.com/dochtman
+[benchmarks]: https://github.com/djc/template-benchmarks-rs
+
+
+How to get started
+------------------
+
+First, add the following to your crate's `Cargo.toml`:
+
+```toml
+# in section [dependencies]
+askama = "0.11.2"
+
+```
+
+Now create a directory called `templates` in your crate root.
+In it, create a file called `hello.html`, containing the following:
+
+```
+Hello, {{ name }}!
+```
+
+In any Rust file inside your crate, add the following:
+
+```rust
+use askama::Template; // bring trait in scope
+
+#[derive(Template)] // this will generate the code...
+#[template(path = "hello.html")] // using the template in this path, relative
+ // to the `templates` dir in the crate root
+struct HelloTemplate<'a> { // the name of the struct can be anything
+ name: &'a str, // the field name should match the variable name
+ // in your template
+}
+
+fn main() {
+ let hello = HelloTemplate { name: "world" }; // instantiate your struct
+ println!("{}", hello.render().unwrap()); // then render it.
+}
+```
+
+You should now be able to compile and run this code.
+
+Review the [test cases] for more examples.
+
+[test cases]: https://github.com/djc/askama/tree/main/testing
diff --git a/third_party/rust/rinja/src/error.rs b/third_party/rust/askama/src/error.rs
similarity index 67%
rename from third_party/rust/rinja/src/error.rs
rename to third_party/rust/askama/src/error.rs
index e0648394420b..406b1485a798 100644
--- a/third_party/rust/rinja/src/error.rs
+++ b/third_party/rust/askama/src/error.rs
@@ -1,10 +1,8 @@
-use std::convert::Infallible;
use std::fmt::{self, Display};
-/// The [`Result`](std::result::Result) type with [`Error`] as default error type
-pub type Result = std::result::Result;
+pub type Result = ::std::result::Result;
-/// rinja error type
+/// askama error type
///
/// # Feature Interaction
///
@@ -23,25 +21,34 @@ pub type Result = std::result::Result;
/// using a adapter the benefits `failure` would
/// bring to this crate are small, which is why
/// `std::error::Error` was used.
+///
#[non_exhaustive]
#[derive(Debug)]
pub enum Error {
/// formatting error
- Fmt,
+ Fmt(fmt::Error),
+
/// an error raised by using `?` in a template
Custom(Box),
+
/// json conversion error
#[cfg(feature = "serde_json")]
- Json(serde_json::Error),
+ Json(::serde_json::Error),
+
+ /// yaml conversion error
+ #[cfg(feature = "serde_yaml")]
+ Yaml(::serde_yaml::Error),
}
impl std::error::Error for Error {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match *self {
- Error::Fmt => None,
+ Error::Fmt(ref err) => Some(err),
Error::Custom(ref err) => Some(err.as_ref()),
#[cfg(feature = "serde_json")]
Error::Json(ref err) => Some(err),
+ #[cfg(feature = "serde_yaml")]
+ Error::Yaml(ref err) => Some(err),
}
}
}
@@ -49,33 +56,33 @@ impl std::error::Error for Error {
impl Display for Error {
fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
- Error::Fmt => write!(formatter, "formatting error"),
+ Error::Fmt(err) => write!(formatter, "formatting error: {err}"),
Error::Custom(err) => write!(formatter, "{err}"),
#[cfg(feature = "serde_json")]
Error::Json(err) => write!(formatter, "json conversion error: {err}"),
+ #[cfg(feature = "serde_yaml")]
+ Error::Yaml(err) => write!(formatter, "yaml conversion error: {}", err),
}
}
}
impl From for Error {
- #[inline]
- fn from(_: fmt::Error) -> Self {
- Error::Fmt
+ fn from(err: fmt::Error) -> Self {
+ Error::Fmt(err)
}
}
#[cfg(feature = "serde_json")]
-impl From for Error {
- #[inline]
- fn from(err: serde_json::Error) -> Self {
+impl From<::serde_json::Error> for Error {
+ fn from(err: ::serde_json::Error) -> Self {
Error::Json(err)
}
}
-impl From for Error {
- #[inline]
- fn from(value: Infallible) -> Self {
- match value {}
+#[cfg(feature = "serde_yaml")]
+impl From<::serde_yaml::Error> for Error {
+ fn from(err: ::serde_yaml::Error) -> Self {
+ Error::Yaml(err)
}
}
@@ -83,7 +90,6 @@ impl From for Error {
mod tests {
use super::Error;
- #[allow(dead_code)]
trait AssertSendSyncStatic: Send + Sync + 'static {}
impl AssertSendSyncStatic for Error {}
}
diff --git a/third_party/rust/askama/src/filters/json.rs b/third_party/rust/askama/src/filters/json.rs
new file mode 100644
index 000000000000..809be91b0c80
--- /dev/null
+++ b/third_party/rust/askama/src/filters/json.rs
@@ -0,0 +1,44 @@
+use crate::error::{Error, Result};
+use askama_escape::JsonEscapeBuffer;
+use serde::Serialize;
+use serde_json::to_writer_pretty;
+
+/// Serialize to JSON (requires `json` feature)
+///
+/// The generated string does not contain ampersands `&`, chevrons `< >`, or apostrophes `'`.
+/// To use it in a `
+/// ```
+///
+/// To use it in HTML attributes, you can either use it in quotation marks `"{{data|json}}"` as is,
+/// or in apostrophes with the (optional) safe filter `'{{data|json|safe}}'`.
+/// In HTML texts the output of e.g. `{{data|json|safe}} ` is safe, too.
+pub fn json(s: S) -> Result {
+ let mut writer = JsonEscapeBuffer::new();
+ to_writer_pretty(&mut writer, &s).map_err(Error::from)?;
+ Ok(writer.finish())
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_json() {
+ assert_eq!(json(true).unwrap(), "true");
+ assert_eq!(json("foo").unwrap(), r#""foo""#);
+ assert_eq!(json(true).unwrap(), "true");
+ assert_eq!(json("foo").unwrap(), r#""foo""#);
+ assert_eq!(
+ json(vec!["foo", "bar"]).unwrap(),
+ r#"[
+ "foo",
+ "bar"
+]"#
+ );
+ }
+}
diff --git a/third_party/rust/askama/src/filters/mod.rs b/third_party/rust/askama/src/filters/mod.rs
new file mode 100644
index 000000000000..f76a463c5d98
--- /dev/null
+++ b/third_party/rust/askama/src/filters/mod.rs
@@ -0,0 +1,640 @@
+//! Module for built-in filter functions
+//!
+//! Contains all the built-in filter functions for use in templates.
+//! You can define your own filters, as well.
+//! For more information, read the [book](https://djc.github.io/askama/filters.html).
+#![allow(clippy::trivially_copy_pass_by_ref)]
+
+use std::fmt::{self, Write};
+
+#[cfg(feature = "serde-json")]
+mod json;
+#[cfg(feature = "serde-json")]
+pub use self::json::json;
+
+#[cfg(feature = "serde-yaml")]
+mod yaml;
+#[cfg(feature = "serde-yaml")]
+pub use self::yaml::yaml;
+
+#[allow(unused_imports)]
+use crate::error::Error::Fmt;
+use askama_escape::{Escaper, MarkupDisplay};
+#[cfg(feature = "humansize")]
+use dep_humansize::{format_size_i, ToF64, DECIMAL};
+#[cfg(feature = "num-traits")]
+use dep_num_traits::{cast::NumCast, Signed};
+#[cfg(feature = "percent-encoding")]
+use percent_encoding::{utf8_percent_encode, AsciiSet, NON_ALPHANUMERIC};
+
+use super::Result;
+
+#[cfg(feature = "percent-encoding")]
+// Urlencode char encoding set. Only the characters in the unreserved set don't
+// have any special purpose in any part of a URI and can be safely left
+// unencoded as specified in https://tools.ietf.org/html/rfc3986.html#section-2.3
+const URLENCODE_STRICT_SET: &AsciiSet = &NON_ALPHANUMERIC
+ .remove(b'_')
+ .remove(b'.')
+ .remove(b'-')
+ .remove(b'~');
+
+#[cfg(feature = "percent-encoding")]
+// Same as URLENCODE_STRICT_SET, but preserves forward slashes for encoding paths
+const URLENCODE_SET: &AsciiSet = &URLENCODE_STRICT_SET.remove(b'/');
+
+/// Marks a string (or other `Display` type) as safe
+///
+/// Use this is you want to allow markup in an expression, or if you know
+/// that the expression's contents don't need to be escaped.
+///
+/// Askama will automatically insert the first (`Escaper`) argument,
+/// so this filter only takes a single argument of any type that implements
+/// `Display`.
+pub fn safe(e: E, v: T) -> Result>
+where
+ E: Escaper,
+ T: fmt::Display,
+{
+ Ok(MarkupDisplay::new_safe(v, e))
+}
+
+/// Escapes strings according to the escape mode.
+///
+/// Askama will automatically insert the first (`Escaper`) argument,
+/// so this filter only takes a single argument of any type that implements
+/// `Display`.
+///
+/// It is possible to optionally specify an escaper other than the default for
+/// the template's extension, like `{{ val|escape("txt") }}`.
+pub fn escape(e: E, v: T) -> Result>
+where
+ E: Escaper,
+ T: fmt::Display,
+{
+ Ok(MarkupDisplay::new_unsafe(v, e))
+}
+
+#[cfg(feature = "humansize")]
+/// Returns adequate string representation (in KB, ..) of number of bytes
+pub fn filesizeformat(b: &(impl ToF64 + Copy)) -> Result {
+ Ok(format_size_i(*b, DECIMAL))
+}
+
+#[cfg(feature = "percent-encoding")]
+/// Percent-encodes the argument for safe use in URI; does not encode `/`.
+///
+/// This should be safe for all parts of URI (paths segments, query keys, query
+/// values). In the rare case that the server can't deal with forward slashes in
+/// the query string, use [`urlencode_strict`], which encodes them as well.
+///
+/// Encodes all characters except ASCII letters, digits, and `_.-~/`. In other
+/// words, encodes all characters which are not in the unreserved set,
+/// as specified by [RFC3986](https://tools.ietf.org/html/rfc3986#section-2.3),
+/// with the exception of `/`.
+///
+/// ```none,ignore
+/// Station
+/// Page
+/// ```
+///
+/// To encode `/` as well, see [`urlencode_strict`](./fn.urlencode_strict.html).
+///
+/// [`urlencode_strict`]: ./fn.urlencode_strict.html
+pub fn urlencode(s: T) -> Result {
+ let s = s.to_string();
+ Ok(utf8_percent_encode(&s, URLENCODE_SET).to_string())
+}
+
+#[cfg(feature = "percent-encoding")]
+/// Percent-encodes the argument for safe use in URI; encodes `/`.
+///
+/// Use this filter for encoding query keys and values in the rare case that
+/// the server can't process them unencoded.
+///
+/// Encodes all characters except ASCII letters, digits, and `_.-~`. In other
+/// words, encodes all characters which are not in the unreserved set,
+/// as specified by [RFC3986](https://tools.ietf.org/html/rfc3986#section-2.3).
+///
+/// ```none,ignore
+/// Page
+/// ```
+///
+/// If you want to preserve `/`, see [`urlencode`](./fn.urlencode.html).
+pub fn urlencode_strict(s: T) -> Result {
+ let s = s.to_string();
+ Ok(utf8_percent_encode(&s, URLENCODE_STRICT_SET).to_string())
+}
+
+/// Formats arguments according to the specified format
+///
+/// The *second* argument to this filter must be a string literal (as in normal
+/// Rust). The two arguments are passed through to the `format!()`
+/// [macro](https://doc.rust-lang.org/stable/std/macro.format.html) by
+/// the Askama code generator, but the order is swapped to support filter
+/// composition.
+///
+/// ```ignore
+/// {{ value | fmt("{:?}") }}
+/// ```
+///
+/// Compare with [format](./fn.format.html).
+pub fn fmt() {}
+
+/// Formats arguments according to the specified format
+///
+/// The first argument to this filter must be a string literal (as in normal
+/// Rust). All arguments are passed through to the `format!()`
+/// [macro](https://doc.rust-lang.org/stable/std/macro.format.html) by
+/// the Askama code generator.
+///
+/// ```ignore
+/// {{ "{:?}{:?}" | format(value, other_value) }}
+/// ```
+///
+/// Compare with [fmt](./fn.fmt.html).
+pub fn format() {}
+
+/// Replaces line breaks in plain text with appropriate HTML
+///
+/// A single newline becomes an HTML line break ` ` and a new line
+/// followed by a blank line becomes a paragraph break ``.
+pub fn linebreaks(s: T) -> Result {
+ let s = s.to_string();
+ let linebroken = s.replace("\n\n", "
").replace('\n', " ");
+
+ Ok(format!("
{linebroken}
"))
+}
+
+/// Converts all newlines in a piece of plain text to HTML line breaks
+pub fn linebreaksbr(s: T) -> Result {
+ let s = s.to_string();
+ Ok(s.replace('\n', " "))
+}
+
+/// Replaces only paragraph breaks in plain text with appropriate HTML
+///
+/// A new line followed by a blank line becomes a paragraph break ``.
+/// Paragraph tags only wrap content; empty paragraphs are removed.
+/// No ` ` tags are added.
+pub fn paragraphbreaks(s: T) -> Result {
+ let s = s.to_string();
+ let linebroken = s.replace("\n\n", "
").replace("
", "");
+
+ Ok(format!("{linebroken}
"))
+}
+
+/// Converts to lowercase
+pub fn lower(s: T) -> Result {
+ let s = s.to_string();
+ Ok(s.to_lowercase())
+}
+
+/// Alias for the `lower()` filter
+pub fn lowercase(s: T) -> Result {
+ lower(s)
+}
+
+/// Converts to uppercase
+pub fn upper(s: T) -> Result {
+ let s = s.to_string();
+ Ok(s.to_uppercase())
+}
+
+/// Alias for the `upper()` filter
+pub fn uppercase(s: T) -> Result {
+ upper(s)
+}
+
+/// Strip leading and trailing whitespace
+pub fn trim(s: T) -> Result {
+ let s = s.to_string();
+ Ok(s.trim().to_owned())
+}
+
+/// Limit string length, appends '...' if truncated
+pub fn truncate(s: T, len: usize) -> Result {
+ let mut s = s.to_string();
+ if s.len() > len {
+ let mut real_len = len;
+ while !s.is_char_boundary(real_len) {
+ real_len += 1;
+ }
+ s.truncate(real_len);
+ s.push_str("...");
+ }
+ Ok(s)
+}
+
+/// Indent lines with `width` spaces
+pub fn indent(s: T, width: usize) -> Result {
+ let s = s.to_string();
+
+ let mut indented = String::new();
+
+ for (i, c) in s.char_indices() {
+ indented.push(c);
+
+ if c == '\n' && i < s.len() - 1 {
+ for _ in 0..width {
+ indented.push(' ');
+ }
+ }
+ }
+
+ Ok(indented)
+}
+
+#[cfg(feature = "num-traits")]
+/// Casts number to f64
+pub fn into_f64(number: T) -> Result
+where
+ T: NumCast,
+{
+ number.to_f64().ok_or(Fmt(fmt::Error))
+}
+
+#[cfg(feature = "num-traits")]
+/// Casts number to isize
+pub fn into_isize(number: T) -> Result
+where
+ T: NumCast,
+{
+ number.to_isize().ok_or(Fmt(fmt::Error))
+}
+
+/// Joins iterable into a string separated by provided argument
+pub fn join(input: I, separator: S) -> Result
+where
+ T: fmt::Display,
+ I: Iterator- ,
+ S: AsRef
,
+{
+ let separator: &str = separator.as_ref();
+
+ let mut rv = String::new();
+
+ for (num, item) in input.enumerate() {
+ if num > 0 {
+ rv.push_str(separator);
+ }
+
+ write!(rv, "{item}")?;
+ }
+
+ Ok(rv)
+}
+
+#[cfg(feature = "num-traits")]
+/// Absolute value
+pub fn abs(number: T) -> Result
+where
+ T: Signed,
+{
+ Ok(number.abs())
+}
+
+/// Capitalize a value. The first character will be uppercase, all others lowercase.
+pub fn capitalize(s: T) -> Result {
+ let s = s.to_string();
+ match s.chars().next() {
+ Some(c) => {
+ let mut replacement: String = c.to_uppercase().collect();
+ replacement.push_str(&s[c.len_utf8()..].to_lowercase());
+ Ok(replacement)
+ }
+ _ => Ok(s),
+ }
+}
+
+/// Centers the value in a field of a given width
+pub fn center(src: &dyn fmt::Display, dst_len: usize) -> Result {
+ let src = src.to_string();
+ let len = src.len();
+
+ if dst_len <= len {
+ Ok(src)
+ } else {
+ let diff = dst_len - len;
+ let mid = diff / 2;
+ let r = diff % 2;
+ let mut buf = String::with_capacity(dst_len);
+
+ for _ in 0..mid {
+ buf.push(' ');
+ }
+
+ buf.push_str(&src);
+
+ for _ in 0..mid + r {
+ buf.push(' ');
+ }
+
+ Ok(buf)
+ }
+}
+
+/// Count the words in that string
+pub fn wordcount(s: T) -> Result {
+ let s = s.to_string();
+
+ Ok(s.split_whitespace().count())
+}
+
+#[cfg(feature = "markdown")]
+pub fn markdown(
+ e: E,
+ s: S,
+ options: Option<&comrak::ComrakOptions>,
+) -> Result>
+where
+ E: Escaper,
+ S: AsRef,
+{
+ use comrak::{
+ markdown_to_html, ComrakExtensionOptions, ComrakOptions, ComrakParseOptions,
+ ComrakRenderOptions, ListStyleType,
+ };
+
+ const DEFAULT_OPTIONS: ComrakOptions = ComrakOptions {
+ extension: ComrakExtensionOptions {
+ strikethrough: true,
+ tagfilter: true,
+ table: true,
+ autolink: true,
+ // default:
+ tasklist: false,
+ superscript: false,
+ header_ids: None,
+ footnotes: false,
+ description_lists: false,
+ front_matter_delimiter: None,
+ },
+ parse: ComrakParseOptions {
+ // default:
+ smart: false,
+ default_info_string: None,
+ relaxed_tasklist_matching: false,
+ },
+ render: ComrakRenderOptions {
+ unsafe_: false,
+ escape: true,
+ // default:
+ hardbreaks: false,
+ github_pre_lang: false,
+ width: 0,
+ list_style: ListStyleType::Dash,
+ },
+ };
+
+ let s = markdown_to_html(s.as_ref(), options.unwrap_or(&DEFAULT_OPTIONS));
+ Ok(MarkupDisplay::new_safe(s, e))
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ #[cfg(feature = "num-traits")]
+ use std::f64::INFINITY;
+
+ #[cfg(feature = "humansize")]
+ #[test]
+ fn test_filesizeformat() {
+ assert_eq!(filesizeformat(&0).unwrap(), "0 B");
+ assert_eq!(filesizeformat(&999u64).unwrap(), "999 B");
+ assert_eq!(filesizeformat(&1000i32).unwrap(), "1 kB");
+ assert_eq!(filesizeformat(&1023).unwrap(), "1.02 kB");
+ assert_eq!(filesizeformat(&1024usize).unwrap(), "1.02 kB");
+ }
+
+ #[cfg(feature = "percent-encoding")]
+ #[test]
+ fn test_urlencoding() {
+ // Unreserved (https://tools.ietf.org/html/rfc3986.html#section-2.3)
+ // alpha / digit
+ assert_eq!(urlencode("AZaz09").unwrap(), "AZaz09");
+ assert_eq!(urlencode_strict("AZaz09").unwrap(), "AZaz09");
+ // other
+ assert_eq!(urlencode("_.-~").unwrap(), "_.-~");
+ assert_eq!(urlencode_strict("_.-~").unwrap(), "_.-~");
+
+ // Reserved (https://tools.ietf.org/html/rfc3986.html#section-2.2)
+ // gen-delims
+ assert_eq!(urlencode(":/?#[]@").unwrap(), "%3A/%3F%23%5B%5D%40");
+ assert_eq!(
+ urlencode_strict(":/?#[]@").unwrap(),
+ "%3A%2F%3F%23%5B%5D%40"
+ );
+ // sub-delims
+ assert_eq!(
+ urlencode("!$&'()*+,;=").unwrap(),
+ "%21%24%26%27%28%29%2A%2B%2C%3B%3D"
+ );
+ assert_eq!(
+ urlencode_strict("!$&'()*+,;=").unwrap(),
+ "%21%24%26%27%28%29%2A%2B%2C%3B%3D"
+ );
+
+ // Other
+ assert_eq!(
+ urlencode("žŠďŤňĚáÉóŮ").unwrap(),
+ "%C5%BE%C5%A0%C4%8F%C5%A4%C5%88%C4%9A%C3%A1%C3%89%C3%B3%C5%AE"
+ );
+ assert_eq!(
+ urlencode_strict("žŠďŤňĚáÉóŮ").unwrap(),
+ "%C5%BE%C5%A0%C4%8F%C5%A4%C5%88%C4%9A%C3%A1%C3%89%C3%B3%C5%AE"
+ );
+
+ // Ferris
+ assert_eq!(urlencode("🦀").unwrap(), "%F0%9F%A6%80");
+ assert_eq!(urlencode_strict("🦀").unwrap(), "%F0%9F%A6%80");
+ }
+
+ #[test]
+ fn test_linebreaks() {
+ assert_eq!(
+ linebreaks("Foo\nBar Baz").unwrap(),
+ "Foo Bar Baz
"
+ );
+ assert_eq!(
+ linebreaks("Foo\nBar\n\nBaz").unwrap(),
+ "Foo Bar
Baz
"
+ );
+ }
+
+ #[test]
+ fn test_linebreaksbr() {
+ assert_eq!(linebreaksbr("Foo\nBar").unwrap(), "Foo Bar");
+ assert_eq!(
+ linebreaksbr("Foo\nBar\n\nBaz").unwrap(),
+ "Foo Bar Baz"
+ );
+ }
+
+ #[test]
+ fn test_paragraphbreaks() {
+ assert_eq!(
+ paragraphbreaks("Foo\nBar Baz").unwrap(),
+ "Foo\nBar Baz
"
+ );
+ assert_eq!(
+ paragraphbreaks("Foo\nBar\n\nBaz").unwrap(),
+ "Foo\nBar
Baz
"
+ );
+ assert_eq!(
+ paragraphbreaks("Foo\n\n\n\n\nBar\n\nBaz").unwrap(),
+ "Foo
\nBar
Baz
"
+ );
+ }
+
+ #[test]
+ fn test_lower() {
+ assert_eq!(lower("Foo").unwrap(), "foo");
+ assert_eq!(lower("FOO").unwrap(), "foo");
+ assert_eq!(lower("FooBar").unwrap(), "foobar");
+ assert_eq!(lower("foo").unwrap(), "foo");
+ }
+
+ #[test]
+ fn test_upper() {
+ assert_eq!(upper("Foo").unwrap(), "FOO");
+ assert_eq!(upper("FOO").unwrap(), "FOO");
+ assert_eq!(upper("FooBar").unwrap(), "FOOBAR");
+ assert_eq!(upper("foo").unwrap(), "FOO");
+ }
+
+ #[test]
+ fn test_trim() {
+ assert_eq!(trim(" Hello\tworld\t").unwrap(), "Hello\tworld");
+ }
+
+ #[test]
+ fn test_truncate() {
+ assert_eq!(truncate("hello", 2).unwrap(), "he...");
+ let a = String::from("您好");
+ assert_eq!(a.len(), 6);
+ assert_eq!(String::from("您").len(), 3);
+ assert_eq!(truncate("您好", 1).unwrap(), "您...");
+ assert_eq!(truncate("您好", 2).unwrap(), "您...");
+ assert_eq!(truncate("您好", 3).unwrap(), "您...");
+ assert_eq!(truncate("您好", 4).unwrap(), "您好...");
+ assert_eq!(truncate("您好", 6).unwrap(), "您好");
+ assert_eq!(truncate("您好", 7).unwrap(), "您好");
+ let s = String::from("🤚a🤚");
+ assert_eq!(s.len(), 9);
+ assert_eq!(String::from("🤚").len(), 4);
+ assert_eq!(truncate("🤚a🤚", 1).unwrap(), "🤚...");
+ assert_eq!(truncate("🤚a🤚", 2).unwrap(), "🤚...");
+ assert_eq!(truncate("🤚a🤚", 3).unwrap(), "🤚...");
+ assert_eq!(truncate("🤚a🤚", 4).unwrap(), "🤚...");
+ assert_eq!(truncate("🤚a🤚", 5).unwrap(), "🤚a...");
+ assert_eq!(truncate("🤚a🤚", 6).unwrap(), "🤚a🤚...");
+ assert_eq!(truncate("🤚a🤚", 9).unwrap(), "🤚a🤚");
+ assert_eq!(truncate("🤚a🤚", 10).unwrap(), "🤚a🤚");
+ }
+
+ #[test]
+ fn test_indent() {
+ assert_eq!(indent("hello", 2).unwrap(), "hello");
+ assert_eq!(indent("hello\n", 2).unwrap(), "hello\n");
+ assert_eq!(indent("hello\nfoo", 2).unwrap(), "hello\n foo");
+ assert_eq!(
+ indent("hello\nfoo\n bar", 4).unwrap(),
+ "hello\n foo\n bar"
+ );
+ }
+
+ #[cfg(feature = "num-traits")]
+ #[test]
+ #[allow(clippy::float_cmp)]
+ fn test_into_f64() {
+ assert_eq!(into_f64(1).unwrap(), 1.0_f64);
+ assert_eq!(into_f64(1.9).unwrap(), 1.9_f64);
+ assert_eq!(into_f64(-1.9).unwrap(), -1.9_f64);
+ assert_eq!(into_f64(INFINITY as f32).unwrap(), INFINITY);
+ assert_eq!(into_f64(-INFINITY as f32).unwrap(), -INFINITY);
+ }
+
+ #[cfg(feature = "num-traits")]
+ #[test]
+ fn test_into_isize() {
+ assert_eq!(into_isize(1).unwrap(), 1_isize);
+ assert_eq!(into_isize(1.9).unwrap(), 1_isize);
+ assert_eq!(into_isize(-1.9).unwrap(), -1_isize);
+ assert_eq!(into_isize(1.5_f64).unwrap(), 1_isize);
+ assert_eq!(into_isize(-1.5_f64).unwrap(), -1_isize);
+ match into_isize(INFINITY) {
+ Err(Fmt(fmt::Error)) => {}
+ _ => panic!("Should return error of type Err(Fmt(fmt::Error))"),
+ };
+ }
+
+ #[allow(clippy::needless_borrow)]
+ #[test]
+ fn test_join() {
+ assert_eq!(
+ join((&["hello", "world"]).iter(), ", ").unwrap(),
+ "hello, world"
+ );
+ assert_eq!(join((&["hello"]).iter(), ", ").unwrap(), "hello");
+
+ let empty: &[&str] = &[];
+ assert_eq!(join(empty.iter(), ", ").unwrap(), "");
+
+ let input: Vec = vec!["foo".into(), "bar".into(), "bazz".into()];
+ assert_eq!(join(input.iter(), ":").unwrap(), "foo:bar:bazz");
+
+ let input: &[String] = &["foo".into(), "bar".into()];
+ assert_eq!(join(input.iter(), ":").unwrap(), "foo:bar");
+
+ let real: String = "blah".into();
+ let input: Vec<&str> = vec![&real];
+ assert_eq!(join(input.iter(), ";").unwrap(), "blah");
+
+ assert_eq!(
+ join((&&&&&["foo", "bar"]).iter(), ", ").unwrap(),
+ "foo, bar"
+ );
+ }
+
+ #[cfg(feature = "num-traits")]
+ #[test]
+ #[allow(clippy::float_cmp)]
+ fn test_abs() {
+ assert_eq!(abs(1).unwrap(), 1);
+ assert_eq!(abs(-1).unwrap(), 1);
+ assert_eq!(abs(1.0).unwrap(), 1.0);
+ assert_eq!(abs(-1.0).unwrap(), 1.0);
+ assert_eq!(abs(1.0_f64).unwrap(), 1.0_f64);
+ assert_eq!(abs(-1.0_f64).unwrap(), 1.0_f64);
+ }
+
+ #[test]
+ fn test_capitalize() {
+ assert_eq!(capitalize("foo").unwrap(), "Foo".to_string());
+ assert_eq!(capitalize("f").unwrap(), "F".to_string());
+ assert_eq!(capitalize("fO").unwrap(), "Fo".to_string());
+ assert_eq!(capitalize("").unwrap(), "".to_string());
+ assert_eq!(capitalize("FoO").unwrap(), "Foo".to_string());
+ assert_eq!(capitalize("foO BAR").unwrap(), "Foo bar".to_string());
+ assert_eq!(capitalize("äØÄÅÖ").unwrap(), "Äøäåö".to_string());
+ assert_eq!(capitalize("ß").unwrap(), "SS".to_string());
+ assert_eq!(capitalize("ßß").unwrap(), "SSß".to_string());
+ }
+
+ #[test]
+ fn test_center() {
+ assert_eq!(center(&"f", 3).unwrap(), " f ".to_string());
+ assert_eq!(center(&"f", 4).unwrap(), " f ".to_string());
+ assert_eq!(center(&"foo", 1).unwrap(), "foo".to_string());
+ assert_eq!(center(&"foo bar", 8).unwrap(), "foo bar ".to_string());
+ }
+
+ #[test]
+ fn test_wordcount() {
+ assert_eq!(wordcount("").unwrap(), 0);
+ assert_eq!(wordcount(" \n\t").unwrap(), 0);
+ assert_eq!(wordcount("foo").unwrap(), 1);
+ assert_eq!(wordcount("foo bar").unwrap(), 2);
+ }
+}
diff --git a/third_party/rust/askama/src/filters/yaml.rs b/third_party/rust/askama/src/filters/yaml.rs
new file mode 100644
index 000000000000..9f4c8021ce7c
--- /dev/null
+++ b/third_party/rust/askama/src/filters/yaml.rs
@@ -0,0 +1,34 @@
+use crate::error::{Error, Result};
+use askama_escape::{Escaper, MarkupDisplay};
+use serde::Serialize;
+
+/// Serialize to YAML (requires `serde_yaml` feature)
+///
+/// ## Errors
+///
+/// This will panic if `S`'s implementation of `Serialize` decides to fail,
+/// or if `T` contains a map with non-string keys.
+pub fn yaml(e: E, s: S) -> Result> {
+ match serde_yaml::to_string(&s) {
+ Ok(s) => Ok(MarkupDisplay::new_safe(s, e)),
+ Err(e) => Err(Error::from(e)),
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use askama_escape::Html;
+
+ #[test]
+ fn test_yaml() {
+ assert_eq!(yaml(Html, true).unwrap().to_string(), "true\n");
+ assert_eq!(yaml(Html, "foo").unwrap().to_string(), "foo\n");
+ assert_eq!(yaml(Html, true).unwrap().to_string(), "true\n");
+ assert_eq!(yaml(Html, "foo").unwrap().to_string(), "foo\n");
+ assert_eq!(
+ yaml(Html, &vec!["foo", "bar"]).unwrap().to_string(),
+ "- foo\n- bar\n"
+ );
+ }
+}
diff --git a/third_party/rust/askama/src/helpers.rs b/third_party/rust/askama/src/helpers.rs
new file mode 100644
index 000000000000..79a1ada20691
--- /dev/null
+++ b/third_party/rust/askama/src/helpers.rs
@@ -0,0 +1,48 @@
+use std::iter::{Enumerate, Peekable};
+
+pub struct TemplateLoop
+where
+ I: Iterator,
+{
+ iter: Peekable>,
+}
+
+impl TemplateLoop
+where
+ I: Iterator,
+{
+ #[inline]
+ pub fn new(iter: I) -> Self {
+ TemplateLoop {
+ iter: iter.enumerate().peekable(),
+ }
+ }
+}
+
+impl Iterator for TemplateLoop
+where
+ I: Iterator,
+{
+ type Item = (::Item, LoopItem);
+
+ #[inline]
+ fn next(&mut self) -> Option<(::Item, LoopItem)> {
+ self.iter.next().map(|(index, item)| {
+ (
+ item,
+ LoopItem {
+ index,
+ first: index == 0,
+ last: self.iter.peek().is_none(),
+ },
+ )
+ })
+ }
+}
+
+#[derive(Copy, Clone)]
+pub struct LoopItem {
+ pub index: usize,
+ pub first: bool,
+ pub last: bool,
+}
diff --git a/third_party/rust/askama/src/lib.rs b/third_party/rust/askama/src/lib.rs
new file mode 100644
index 000000000000..17085b5d6322
--- /dev/null
+++ b/third_party/rust/askama/src/lib.rs
@@ -0,0 +1,219 @@
+//! Askama implements a type-safe compiler for Jinja-like templates.
+//! It lets you write templates in a Jinja-like syntax,
+//! which are linked to a `struct` defining the template context.
+//! This is done using a custom derive implementation (implemented
+//! in [`askama_derive`](https://crates.io/crates/askama_derive)).
+//!
+//! For feature highlights and a quick start, please review the
+//! [README](https://github.com/djc/askama/blob/main/README.md).
+//!
+//! The primary documentation for this crate now lives in
+//! [the book](https://djc.github.io/askama/).
+//!
+//! # Creating Askama templates
+//!
+//! An Askama template is a `struct` definition which provides the template
+//! context combined with a UTF-8 encoded text file (or inline source, see
+//! below). Askama can be used to generate any kind of text-based format.
+//! The template file's extension may be used to provide content type hints.
+//!
+//! A template consists of **text contents**, which are passed through as-is,
+//! **expressions**, which get replaced with content while being rendered, and
+//! **tags**, which control the template's logic.
+//! The template syntax is very similar to [Jinja](http://jinja.pocoo.org/),
+//! as well as Jinja-derivatives like [Twig](http://twig.sensiolabs.org/) or
+//! [Tera](https://github.com/Keats/tera).
+//!
+//! ## The `template()` attribute
+//!
+//! Askama works by generating one or more trait implementations for any
+//! `struct` type decorated with the `#[derive(Template)]` attribute. The
+//! code generation process takes some options that can be specified through
+//! the `template()` attribute. The following sub-attributes are currently
+//! recognized:
+//!
+//! * `path` (as `path = "foo.html"`): sets the path to the template file. The
+//! path is interpreted as relative to the configured template directories
+//! (by default, this is a `templates` directory next to your `Cargo.toml`).
+//! The file name extension is used to infer an escape mode (see below). In
+//! web framework integrations, the path's extension may also be used to
+//! infer the content type of the resulting response.
+//! Cannot be used together with `source`.
+//! * `source` (as `source = "{{ foo }}"`): directly sets the template source.
+//! This can be useful for test cases or short templates. The generated path
+//! is undefined, which generally makes it impossible to refer to this
+//! template from other templates. If `source` is specified, `ext` must also
+//! be specified (see below). Cannot be used together with `path`.
+//! * `ext` (as `ext = "txt"`): lets you specify the content type as a file
+//! extension. This is used to infer an escape mode (see below), and some
+//! web framework integrations use it to determine the content type.
+//! Cannot be used together with `path`.
+//! * `print` (as `print = "code"`): enable debugging by printing nothing
+//! (`none`), the parsed syntax tree (`ast`), the generated code (`code`)
+//! or `all` for both. The requested data will be printed to stdout at
+//! compile time.
+//! * `escape` (as `escape = "none"`): override the template's extension used for
+//! the purpose of determining the escaper for this template. See the section
+//! on configuring custom escapers for more information.
+//! * `syntax` (as `syntax = "foo"`): set the syntax name for a parser defined
+//! in the configuration file. The default syntax , "default", is the one
+//! provided by Askama.
+
+#![forbid(unsafe_code)]
+#![deny(elided_lifetimes_in_paths)]
+#![deny(unreachable_pub)]
+
+mod error;
+pub mod filters;
+pub mod helpers;
+
+use std::fmt;
+
+pub use askama_derive::Template;
+pub use askama_escape::{Html, MarkupDisplay, Text};
+
+#[doc(hidden)]
+pub use crate as shared;
+pub use crate::error::{Error, Result};
+
+/// Main `Template` trait; implementations are generally derived
+///
+/// If you need an object-safe template, use [`DynTemplate`].
+pub trait Template: fmt::Display {
+ /// Helper method which allocates a new `String` and renders into it
+ fn render(&self) -> Result {
+ let mut buf = String::with_capacity(Self::SIZE_HINT);
+ self.render_into(&mut buf)?;
+ Ok(buf)
+ }
+
+ /// Renders the template to the given `writer` fmt buffer
+ fn render_into(&self, writer: &mut (impl std::fmt::Write + ?Sized)) -> Result<()>;
+
+ /// Renders the template to the given `writer` io buffer
+ #[inline]
+ fn write_into(&self, writer: &mut (impl std::io::Write + ?Sized)) -> std::io::Result<()> {
+ writer.write_fmt(format_args!("{self}"))
+ }
+
+ /// The template's extension, if provided
+ const EXTENSION: Option<&'static str>;
+
+ /// Provides a conservative estimate of the expanded length of the rendered template
+ const SIZE_HINT: usize;
+
+ /// The MIME type (Content-Type) of the data that gets rendered by this Template
+ const MIME_TYPE: &'static str;
+}
+
+/// Object-safe wrapper trait around [`Template`] implementers
+///
+/// This trades reduced performance (mostly due to writing into `dyn Write`) for object safety.
+pub trait DynTemplate {
+ /// Helper method which allocates a new `String` and renders into it
+ fn dyn_render(&self) -> Result;
+
+ /// Renders the template to the given `writer` fmt buffer
+ fn dyn_render_into(&self, writer: &mut dyn std::fmt::Write) -> Result<()>;
+
+ /// Renders the template to the given `writer` io buffer
+ fn dyn_write_into(&self, writer: &mut dyn std::io::Write) -> std::io::Result<()>;
+
+ /// Helper function to inspect the template's extension
+ fn extension(&self) -> Option<&'static str>;
+
+ /// Provides a conservative estimate of the expanded length of the rendered template
+ fn size_hint(&self) -> usize;
+
+ /// The MIME type (Content-Type) of the data that gets rendered by this Template
+ fn mime_type(&self) -> &'static str;
+}
+
+impl DynTemplate for T {
+ fn dyn_render(&self) -> Result {
+ ::render(self)
+ }
+
+ fn dyn_render_into(&self, writer: &mut dyn std::fmt::Write) -> Result<()> {
+ ::render_into(self, writer)
+ }
+
+ #[inline]
+ fn dyn_write_into(&self, writer: &mut dyn std::io::Write) -> std::io::Result<()> {
+ writer.write_fmt(format_args!("{self}"))
+ }
+
+ fn extension(&self) -> Option<&'static str> {
+ Self::EXTENSION
+ }
+
+ fn size_hint(&self) -> usize {
+ Self::SIZE_HINT
+ }
+
+ fn mime_type(&self) -> &'static str {
+ Self::MIME_TYPE
+ }
+}
+
+impl fmt::Display for dyn DynTemplate {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.dyn_render_into(f).map_err(|_| ::std::fmt::Error {})
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use std::fmt;
+
+ use super::*;
+ use crate::{DynTemplate, Template};
+
+ #[test]
+ fn dyn_template() {
+ struct Test;
+ impl Template for Test {
+ fn render_into(&self, writer: &mut (impl std::fmt::Write + ?Sized)) -> Result<()> {
+ Ok(writer.write_str("test")?)
+ }
+
+ const EXTENSION: Option<&'static str> = Some("txt");
+
+ const SIZE_HINT: usize = 4;
+
+ const MIME_TYPE: &'static str = "text/plain; charset=utf-8";
+ }
+
+ impl fmt::Display for Test {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.render_into(f).map_err(|_| fmt::Error {})
+ }
+ }
+
+ fn render(t: &dyn DynTemplate) -> String {
+ t.dyn_render().unwrap()
+ }
+
+ let test = &Test as &dyn DynTemplate;
+
+ assert_eq!(render(test), "test");
+
+ assert_eq!(test.to_string(), "test");
+
+ assert_eq!(format!("{test}"), "test");
+
+ let mut vec = Vec::new();
+ test.dyn_write_into(&mut vec).unwrap();
+ assert_eq!(vec, vec![b't', b'e', b's', b't']);
+ }
+}
+
+/// Old build script helper to rebuild crates if contained templates have changed
+///
+/// This function is now deprecated and does nothing.
+#[deprecated(
+ since = "0.8.1",
+ note = "file-level dependency tracking is handled automatically without build script"
+)]
+pub fn rerun_if_templates_changed() {}
diff --git a/third_party/rust/askama_derive/.cargo-checksum.json b/third_party/rust/askama_derive/.cargo-checksum.json
new file mode 100644
index 000000000000..7c28fca6fef0
--- /dev/null
+++ b/third_party/rust/askama_derive/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.toml":"f293fbc41371fb46f5b68775b158d8da37c09453dc9356ee8e97fce3d1021b2d","LICENSE-APACHE":"87cb0d734c723c083e51c825930ff42bce28596b52dee15567f6b28f19c195e3","LICENSE-MIT":"df20e0180764bf5bd76f74d47bc9e8c0069a666401629c390003a1d5eba99c92","README.md":"dd3e4e203eeca91219fd57c0ca1f92b413176f406df19568d0fe33d7905123e4","src/config.rs":"de4202804d32cc4da044ed41140ef987056f44116b1bbfac53001e07133e52b9","src/generator.rs":"4fec224dd261bc96a63b831f0692a62d9f8d19566377b39dd69bc0f3de4ab033","src/heritage.rs":"fceb0ac86034b8eb902212f9a78a6fb7d19688c3ccdb117099f15933073bf7bb","src/input.rs":"53afae3f73e2b52d83d73c1b38893677992a5ee04927e8b905198b742b1546ae","src/lib.rs":"003e91569575b72a9587796c82c9f9c0e5e9f3dc8db6b659735cf58f68504b76","src/parser/expr.rs":"3b8178398a293910df161ddd769d2efc7ae8dff03e7313f033149a38a6d81983","src/parser/mod.rs":"3afc065cdc69dc1498ddf9a04a77f56d807ed14653828918d36529a441fb6c48","src/parser/node.rs":"c5437e2525e245b6fcd358696f3607c50ef82cf649a66b6bef7816232c3220fa","src/parser/tests.rs":"81fb02f8cab87c93575fdb6b7d6e9cae6fa3b69173f5f5a76d214f5316ca66ca","templates/a.html":"b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c","templates/b.html":"7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730","templates/sub/b.html":"7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730","templates/sub/c.html":"bf07a7fbb825fc0aae7bf4a1177b2b31fcf8a3feeaf7092761e18c859ee52a9c","templates/sub/sub1/d.html":"86b0c5a1e2b73b08fd54c727f4458649ed9fe3ad1b6e8ac9460c070113509a1e"},"package":"c22fbe0413545c098358e56966ff22cdd039e10215ae213cfbd65032b119fc94"}
\ No newline at end of file
diff --git a/third_party/rust/askama_derive/Cargo.toml b/third_party/rust/askama_derive/Cargo.toml
new file mode 100644
index 000000000000..d780c24c6a6d
--- /dev/null
+++ b/third_party/rust/askama_derive/Cargo.toml
@@ -0,0 +1,72 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2021"
+rust-version = "1.58"
+name = "askama_derive"
+version = "0.12.1"
+description = "Procedural macro package for Askama"
+homepage = "https://github.com/djc/askama"
+readme = "README.md"
+license = "MIT/Apache-2.0"
+repository = "https://github.com/djc/askama"
+resolver = "1"
+
+[lib]
+proc-macro = true
+
+[dependencies.basic-toml]
+version = "0.1.1"
+optional = true
+
+[dependencies.mime]
+version = "0.3"
+
+[dependencies.mime_guess]
+version = "2"
+
+[dependencies.nom]
+version = "7"
+
+[dependencies.proc-macro2]
+version = "1"
+
+[dependencies.quote]
+version = "1"
+
+[dependencies.serde]
+version = "1.0"
+features = ["derive"]
+optional = true
+
+[dependencies.syn]
+version = "2"
+
+[features]
+config = [
+ "serde",
+ "basic-toml",
+]
+humansize = []
+markdown = []
+num-traits = []
+serde-json = []
+serde-yaml = []
+urlencode = []
+with-actix-web = []
+with-axum = []
+with-gotham = []
+with-hyper = []
+with-mendes = []
+with-rocket = []
+with-tide = []
+with-warp = []
diff --git a/third_party/rust/rinja_derive/LICENSE-APACHE b/third_party/rust/askama_derive/LICENSE-APACHE
similarity index 100%
rename from third_party/rust/rinja_derive/LICENSE-APACHE
rename to third_party/rust/askama_derive/LICENSE-APACHE
diff --git a/third_party/rust/rinja_derive/LICENSE-MIT b/third_party/rust/askama_derive/LICENSE-MIT
similarity index 100%
rename from third_party/rust/rinja_derive/LICENSE-MIT
rename to third_party/rust/askama_derive/LICENSE-MIT
diff --git a/third_party/rust/askama_derive/README.md b/third_party/rust/askama_derive/README.md
new file mode 100644
index 000000000000..e27f1107d1f9
--- /dev/null
+++ b/third_party/rust/askama_derive/README.md
@@ -0,0 +1,9 @@
+# askama_derive: procedural macros for the Askama templating engine
+
+[](https://docs.rs/askama_derive/)
+[](https://crates.io/crates/askama_derive)
+[](https://github.com/djc/askama/actions?query=workflow%3ACI)
+[](https://gitter.im/djc/askama)
+
+This crate contains the procedural macros used by the
+[Askama](https://github.com/djc/askama) templating engine.
diff --git a/third_party/rust/askama_derive/src/config.rs b/third_party/rust/askama_derive/src/config.rs
new file mode 100644
index 000000000000..cf22a720f0ea
--- /dev/null
+++ b/third_party/rust/askama_derive/src/config.rs
@@ -0,0 +1,582 @@
+use std::collections::{BTreeMap, HashSet};
+use std::convert::TryFrom;
+use std::path::{Path, PathBuf};
+use std::{env, fs};
+
+#[cfg(feature = "serde")]
+use serde::Deserialize;
+
+use crate::CompileError;
+
+#[derive(Debug)]
+pub(crate) struct Config<'a> {
+ pub(crate) dirs: Vec,
+ pub(crate) syntaxes: BTreeMap>,
+ pub(crate) default_syntax: &'a str,
+ pub(crate) escapers: Vec<(HashSet, String)>,
+ pub(crate) whitespace: WhitespaceHandling,
+}
+
+impl<'a> Config<'a> {
+ pub(crate) fn new(
+ s: &'a str,
+ template_whitespace: Option<&String>,
+ ) -> std::result::Result, CompileError> {
+ let root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
+ let default_dirs = vec![root.join("templates")];
+
+ let mut syntaxes = BTreeMap::new();
+ syntaxes.insert(DEFAULT_SYNTAX_NAME.to_string(), Syntax::default());
+
+ let raw = if s.is_empty() {
+ RawConfig::default()
+ } else {
+ RawConfig::from_toml_str(s)?
+ };
+
+ let (dirs, default_syntax, mut whitespace) = match raw.general {
+ Some(General {
+ dirs,
+ default_syntax,
+ whitespace,
+ }) => (
+ dirs.map_or(default_dirs, |v| {
+ v.into_iter().map(|dir| root.join(dir)).collect()
+ }),
+ default_syntax.unwrap_or(DEFAULT_SYNTAX_NAME),
+ whitespace,
+ ),
+ None => (
+ default_dirs,
+ DEFAULT_SYNTAX_NAME,
+ WhitespaceHandling::default(),
+ ),
+ };
+ if let Some(template_whitespace) = template_whitespace {
+ whitespace = match template_whitespace.as_str() {
+ "suppress" => WhitespaceHandling::Suppress,
+ "minimize" => WhitespaceHandling::Minimize,
+ "preserve" => WhitespaceHandling::Preserve,
+ s => return Err(format!("invalid value for `whitespace`: \"{s}\"").into()),
+ };
+ }
+
+ if let Some(raw_syntaxes) = raw.syntax {
+ for raw_s in raw_syntaxes {
+ let name = raw_s.name;
+
+ if syntaxes
+ .insert(name.to_string(), Syntax::try_from(raw_s)?)
+ .is_some()
+ {
+ return Err(format!("syntax \"{name}\" is already defined").into());
+ }
+ }
+ }
+
+ if !syntaxes.contains_key(default_syntax) {
+ return Err(format!("default syntax \"{default_syntax}\" not found").into());
+ }
+
+ let mut escapers = Vec::new();
+ if let Some(configured) = raw.escaper {
+ for escaper in configured {
+ escapers.push((
+ escaper
+ .extensions
+ .iter()
+ .map(|ext| (*ext).to_string())
+ .collect(),
+ escaper.path.to_string(),
+ ));
+ }
+ }
+ for (extensions, path) in DEFAULT_ESCAPERS {
+ escapers.push((str_set(extensions), (*path).to_string()));
+ }
+
+ Ok(Config {
+ dirs,
+ syntaxes,
+ default_syntax,
+ escapers,
+ whitespace,
+ })
+ }
+
+ pub(crate) fn find_template(
+ &self,
+ path: &str,
+ start_at: Option<&Path>,
+ ) -> std::result::Result {
+ if let Some(root) = start_at {
+ let relative = root.with_file_name(path);
+ if relative.exists() {
+ return Ok(relative);
+ }
+ }
+
+ for dir in &self.dirs {
+ let rooted = dir.join(path);
+ if rooted.exists() {
+ return Ok(rooted);
+ }
+ }
+
+ Err(format!(
+ "template {:?} not found in directories {:?}",
+ path, self.dirs
+ )
+ .into())
+ }
+}
+
+#[derive(Debug)]
+pub(crate) struct Syntax<'a> {
+ pub(crate) block_start: &'a str,
+ pub(crate) block_end: &'a str,
+ pub(crate) expr_start: &'a str,
+ pub(crate) expr_end: &'a str,
+ pub(crate) comment_start: &'a str,
+ pub(crate) comment_end: &'a str,
+}
+
+impl Default for Syntax<'static> {
+ fn default() -> Self {
+ Self {
+ block_start: "{%",
+ block_end: "%}",
+ expr_start: "{{",
+ expr_end: "}}",
+ comment_start: "{#",
+ comment_end: "#}",
+ }
+ }
+}
+
+impl<'a> TryFrom> for Syntax<'a> {
+ type Error = CompileError;
+
+ fn try_from(raw: RawSyntax<'a>) -> std::result::Result {
+ let default = Syntax::default();
+ let syntax = Self {
+ block_start: raw.block_start.unwrap_or(default.block_start),
+ block_end: raw.block_end.unwrap_or(default.block_end),
+ expr_start: raw.expr_start.unwrap_or(default.expr_start),
+ expr_end: raw.expr_end.unwrap_or(default.expr_end),
+ comment_start: raw.comment_start.unwrap_or(default.comment_start),
+ comment_end: raw.comment_end.unwrap_or(default.comment_end),
+ };
+
+ if syntax.block_start.len() != 2
+ || syntax.block_end.len() != 2
+ || syntax.expr_start.len() != 2
+ || syntax.expr_end.len() != 2
+ || syntax.comment_start.len() != 2
+ || syntax.comment_end.len() != 2
+ {
+ return Err("length of delimiters must be two".into());
+ }
+
+ let bs = syntax.block_start.as_bytes()[0];
+ let be = syntax.block_start.as_bytes()[1];
+ let cs = syntax.comment_start.as_bytes()[0];
+ let ce = syntax.comment_start.as_bytes()[1];
+ let es = syntax.expr_start.as_bytes()[0];
+ let ee = syntax.expr_start.as_bytes()[1];
+ if !((bs == cs && bs == es) || (be == ce && be == ee)) {
+ return Err(format!("bad delimiters block_start: {}, comment_start: {}, expr_start: {}, needs one of the two characters in common", syntax.block_start, syntax.comment_start, syntax.expr_start).into());
+ }
+
+ Ok(syntax)
+ }
+}
+
+#[cfg_attr(feature = "serde", derive(Deserialize))]
+#[derive(Default)]
+struct RawConfig<'a> {
+ #[cfg_attr(feature = "serde", serde(borrow))]
+ general: Option>,
+ syntax: Option>>,
+ escaper: Option>>,
+}
+
+impl RawConfig<'_> {
+ #[cfg(feature = "config")]
+ fn from_toml_str(s: &str) -> std::result::Result, CompileError> {
+ basic_toml::from_str(s)
+ .map_err(|e| format!("invalid TOML in {CONFIG_FILE_NAME}: {e}").into())
+ }
+
+ #[cfg(not(feature = "config"))]
+ fn from_toml_str(_: &str) -> std::result::Result, CompileError> {
+ Err("TOML support not available".into())
+ }
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, Debug)]
+#[cfg_attr(feature = "serde", derive(Deserialize))]
+#[cfg_attr(feature = "serde", serde(field_identifier, rename_all = "lowercase"))]
+pub(crate) enum WhitespaceHandling {
+ /// The default behaviour. It will leave the whitespace characters "as is".
+ Preserve,
+ /// It'll remove all the whitespace characters before and after the jinja block.
+ Suppress,
+ /// It'll remove all the whitespace characters except one before and after the jinja blocks.
+ /// If there is a newline character, the preserved character in the trimmed characters, it will
+ /// the one preserved.
+ Minimize,
+}
+
+impl Default for WhitespaceHandling {
+ fn default() -> Self {
+ WhitespaceHandling::Preserve
+ }
+}
+
+#[cfg_attr(feature = "serde", derive(Deserialize))]
+struct General<'a> {
+ #[cfg_attr(feature = "serde", serde(borrow))]
+ dirs: Option>,
+ default_syntax: Option<&'a str>,
+ #[cfg_attr(feature = "serde", serde(default))]
+ whitespace: WhitespaceHandling,
+}
+
+#[cfg_attr(feature = "serde", derive(Deserialize))]
+struct RawSyntax<'a> {
+ name: &'a str,
+ block_start: Option<&'a str>,
+ block_end: Option<&'a str>,
+ expr_start: Option<&'a str>,
+ expr_end: Option<&'a str>,
+ comment_start: Option<&'a str>,
+ comment_end: Option<&'a str>,
+}
+
+#[cfg_attr(feature = "serde", derive(Deserialize))]
+struct RawEscaper<'a> {
+ path: &'a str,
+ extensions: Vec<&'a str>,
+}
+
+pub(crate) fn read_config_file(
+ config_path: Option<&str>,
+) -> std::result::Result {
+ let root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
+ let filename = match config_path {
+ Some(config_path) => root.join(config_path),
+ None => root.join(CONFIG_FILE_NAME),
+ };
+
+ if filename.exists() {
+ fs::read_to_string(&filename)
+ .map_err(|_| format!("unable to read {:?}", filename.to_str().unwrap()).into())
+ } else if config_path.is_some() {
+ Err(format!("`{}` does not exist", root.display()).into())
+ } else {
+ Ok("".to_string())
+ }
+}
+
+fn str_set(vals: &[T]) -> HashSet
+where
+ T: ToString,
+{
+ vals.iter().map(|s| s.to_string()).collect()
+}
+
+#[allow(clippy::match_wild_err_arm)]
+pub(crate) fn get_template_source(tpl_path: &Path) -> std::result::Result {
+ match fs::read_to_string(tpl_path) {
+ Err(_) => Err(format!(
+ "unable to open template file '{}'",
+ tpl_path.to_str().unwrap()
+ )
+ .into()),
+ Ok(mut source) => {
+ if source.ends_with('\n') {
+ let _ = source.pop();
+ }
+ Ok(source)
+ }
+ }
+}
+
+static CONFIG_FILE_NAME: &str = "askama.toml";
+static DEFAULT_SYNTAX_NAME: &str = "default";
+static DEFAULT_ESCAPERS: &[(&[&str], &str)] = &[
+ (&["html", "htm", "xml"], "::askama::Html"),
+ (&["md", "none", "txt", "yml", ""], "::askama::Text"),
+ (&["j2", "jinja", "jinja2"], "::askama::Html"),
+];
+
+#[cfg(test)]
+mod tests {
+ use std::env;
+ use std::path::{Path, PathBuf};
+
+ use super::*;
+
+ #[test]
+ fn get_source() {
+ let path = Config::new("", None)
+ .and_then(|config| config.find_template("b.html", None))
+ .unwrap();
+ assert_eq!(get_template_source(&path).unwrap(), "bar");
+ }
+
+ #[test]
+ fn test_default_config() {
+ let mut root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
+ root.push("templates");
+ let config = Config::new("", None).unwrap();
+ assert_eq!(config.dirs, vec![root]);
+ }
+
+ #[cfg(feature = "config")]
+ #[test]
+ fn test_config_dirs() {
+ let mut root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
+ root.push("tpl");
+ let config = Config::new("[general]\ndirs = [\"tpl\"]", None).unwrap();
+ assert_eq!(config.dirs, vec![root]);
+ }
+
+ fn assert_eq_rooted(actual: &Path, expected: &str) {
+ let mut root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
+ root.push("templates");
+ let mut inner = PathBuf::new();
+ inner.push(expected);
+ assert_eq!(actual.strip_prefix(root).unwrap(), inner);
+ }
+
+ #[test]
+ fn find_absolute() {
+ let config = Config::new("", None).unwrap();
+ let root = config.find_template("a.html", None).unwrap();
+ let path = config.find_template("sub/b.html", Some(&root)).unwrap();
+ assert_eq_rooted(&path, "sub/b.html");
+ }
+
+ #[test]
+ #[should_panic]
+ fn find_relative_nonexistent() {
+ let config = Config::new("", None).unwrap();
+ let root = config.find_template("a.html", None).unwrap();
+ config.find_template("c.html", Some(&root)).unwrap();
+ }
+
+ #[test]
+ fn find_relative() {
+ let config = Config::new("", None).unwrap();
+ let root = config.find_template("sub/b.html", None).unwrap();
+ let path = config.find_template("c.html", Some(&root)).unwrap();
+ assert_eq_rooted(&path, "sub/c.html");
+ }
+
+ #[test]
+ fn find_relative_sub() {
+ let config = Config::new("", None).unwrap();
+ let root = config.find_template("sub/b.html", None).unwrap();
+ let path = config.find_template("sub1/d.html", Some(&root)).unwrap();
+ assert_eq_rooted(&path, "sub/sub1/d.html");
+ }
+
+ #[cfg(feature = "config")]
+ #[test]
+ fn add_syntax() {
+ let raw_config = r#"
+ [general]
+ default_syntax = "foo"
+
+ [[syntax]]
+ name = "foo"
+ block_start = "{<"
+
+ [[syntax]]
+ name = "bar"
+ expr_start = "{!"
+ "#;
+
+ let default_syntax = Syntax::default();
+ let config = Config::new(raw_config, None).unwrap();
+ assert_eq!(config.default_syntax, "foo");
+
+ let foo = config.syntaxes.get("foo").unwrap();
+ assert_eq!(foo.block_start, "{<");
+ assert_eq!(foo.block_end, default_syntax.block_end);
+ assert_eq!(foo.expr_start, default_syntax.expr_start);
+ assert_eq!(foo.expr_end, default_syntax.expr_end);
+ assert_eq!(foo.comment_start, default_syntax.comment_start);
+ assert_eq!(foo.comment_end, default_syntax.comment_end);
+
+ let bar = config.syntaxes.get("bar").unwrap();
+ assert_eq!(bar.block_start, default_syntax.block_start);
+ assert_eq!(bar.block_end, default_syntax.block_end);
+ assert_eq!(bar.expr_start, "{!");
+ assert_eq!(bar.expr_end, default_syntax.expr_end);
+ assert_eq!(bar.comment_start, default_syntax.comment_start);
+ assert_eq!(bar.comment_end, default_syntax.comment_end);
+ }
+
+ #[cfg(feature = "config")]
+ #[test]
+ fn add_syntax_two() {
+ let raw_config = r#"
+ syntax = [{ name = "foo", block_start = "{<" },
+ { name = "bar", expr_start = "{!" } ]
+
+ [general]
+ default_syntax = "foo"
+ "#;
+
+ let default_syntax = Syntax::default();
+ let config = Config::new(raw_config, None).unwrap();
+ assert_eq!(config.default_syntax, "foo");
+
+ let foo = config.syntaxes.get("foo").unwrap();
+ assert_eq!(foo.block_start, "{<");
+ assert_eq!(foo.block_end, default_syntax.block_end);
+ assert_eq!(foo.expr_start, default_syntax.expr_start);
+ assert_eq!(foo.expr_end, default_syntax.expr_end);
+ assert_eq!(foo.comment_start, default_syntax.comment_start);
+ assert_eq!(foo.comment_end, default_syntax.comment_end);
+
+ let bar = config.syntaxes.get("bar").unwrap();
+ assert_eq!(bar.block_start, default_syntax.block_start);
+ assert_eq!(bar.block_end, default_syntax.block_end);
+ assert_eq!(bar.expr_start, "{!");
+ assert_eq!(bar.expr_end, default_syntax.expr_end);
+ assert_eq!(bar.comment_start, default_syntax.comment_start);
+ assert_eq!(bar.comment_end, default_syntax.comment_end);
+ }
+
+ #[cfg(feature = "toml")]
+ #[should_panic]
+ #[test]
+ fn use_default_at_syntax_name() {
+ let raw_config = r#"
+ syntax = [{ name = "default" }]
+ "#;
+
+ let _config = Config::new(raw_config, None).unwrap();
+ }
+
+ #[cfg(feature = "toml")]
+ #[should_panic]
+ #[test]
+ fn duplicated_syntax_name_on_list() {
+ let raw_config = r#"
+ syntax = [{ name = "foo", block_start = "~<" },
+ { name = "foo", block_start = "%%" } ]
+ "#;
+
+ let _config = Config::new(raw_config, None).unwrap();
+ }
+
+ #[cfg(feature = "toml")]
+ #[should_panic]
+ #[test]
+ fn is_not_exist_default_syntax() {
+ let raw_config = r#"
+ [general]
+ default_syntax = "foo"
+ "#;
+
+ let _config = Config::new(raw_config, None).unwrap();
+ }
+
+ #[cfg(feature = "config")]
+ #[test]
+ fn escape_modes() {
+ let config = Config::new(
+ r#"
+ [[escaper]]
+ path = "::askama::Js"
+ extensions = ["js"]
+ "#,
+ None,
+ )
+ .unwrap();
+ assert_eq!(
+ config.escapers,
+ vec![
+ (str_set(&["js"]), "::askama::Js".into()),
+ (str_set(&["html", "htm", "xml"]), "::askama::Html".into()),
+ (
+ str_set(&["md", "none", "txt", "yml", ""]),
+ "::askama::Text".into()
+ ),
+ (str_set(&["j2", "jinja", "jinja2"]), "::askama::Html".into()),
+ ]
+ );
+ }
+
+ #[cfg(feature = "config")]
+ #[test]
+ fn test_whitespace_parsing() {
+ let config = Config::new(
+ r#"
+ [general]
+ whitespace = "suppress"
+ "#,
+ None,
+ )
+ .unwrap();
+ assert_eq!(config.whitespace, WhitespaceHandling::Suppress);
+
+ let config = Config::new(r#""#, None).unwrap();
+ assert_eq!(config.whitespace, WhitespaceHandling::Preserve);
+
+ let config = Config::new(
+ r#"
+ [general]
+ whitespace = "preserve"
+ "#,
+ None,
+ )
+ .unwrap();
+ assert_eq!(config.whitespace, WhitespaceHandling::Preserve);
+
+ let config = Config::new(
+ r#"
+ [general]
+ whitespace = "minimize"
+ "#,
+ None,
+ )
+ .unwrap();
+ assert_eq!(config.whitespace, WhitespaceHandling::Minimize);
+ }
+
+ #[cfg(feature = "toml")]
+ #[test]
+ fn test_whitespace_in_template() {
+ // Checking that template arguments have precedence over general configuration.
+ // So in here, in the template arguments, there is `whitespace = "minimize"` so
+ // the `WhitespaceHandling` should be `Minimize` as well.
+ let config = Config::new(
+ r#"
+ [general]
+ whitespace = "suppress"
+ "#,
+ Some(&"minimize".to_owned()),
+ )
+ .unwrap();
+ assert_eq!(config.whitespace, WhitespaceHandling::Minimize);
+
+ let config = Config::new(r#""#, Some(&"minimize".to_owned())).unwrap();
+ assert_eq!(config.whitespace, WhitespaceHandling::Minimize);
+ }
+
+ #[test]
+ fn test_config_whitespace_error() {
+ let config = Config::new(r#""#, Some(&"trim".to_owned()));
+ if let Err(err) = config {
+ assert_eq!(err.msg, "invalid value for `whitespace`: \"trim\"");
+ } else {
+ panic!("Config::new should have return an error");
+ }
+ }
+}
diff --git a/third_party/rust/askama_derive/src/generator.rs b/third_party/rust/askama_derive/src/generator.rs
new file mode 100644
index 000000000000..05b3fc32454f
--- /dev/null
+++ b/third_party/rust/askama_derive/src/generator.rs
@@ -0,0 +1,2171 @@
+use crate::config::{get_template_source, read_config_file, Config, WhitespaceHandling};
+use crate::heritage::{Context, Heritage};
+use crate::input::{Print, Source, TemplateInput};
+use crate::parser::{parse, Cond, CondTest, Expr, Loop, Node, Target, When, Whitespace, Ws};
+use crate::CompileError;
+
+use proc_macro::TokenStream;
+use quote::{quote, ToTokens};
+use syn::punctuated::Punctuated;
+
+use std::collections::hash_map::{Entry, HashMap};
+use std::path::{Path, PathBuf};
+use std::{cmp, hash, mem, str};
+
+/// The actual implementation for askama_derive::Template
+pub(crate) fn derive_template(input: TokenStream) -> TokenStream {
+ let ast: syn::DeriveInput = syn::parse(input).unwrap();
+ match build_template(&ast) {
+ Ok(source) => source.parse().unwrap(),
+ Err(e) => e.into_compile_error(),
+ }
+}
+
+/// Takes a `syn::DeriveInput` and generates source code for it
+///
+/// Reads the metadata from the `template()` attribute to get the template
+/// metadata, then fetches the source from the filesystem. The source is
+/// parsed, and the parse tree is fed to the code generator. Will print
+/// the parse tree and/or generated source according to the `print` key's
+/// value as passed to the `template()` attribute.
+fn build_template(ast: &syn::DeriveInput) -> Result {
+ let template_args = TemplateArgs::new(ast)?;
+ let config_toml = read_config_file(template_args.config_path.as_deref())?;
+ let config = Config::new(&config_toml, template_args.whitespace.as_ref())?;
+ let input = TemplateInput::new(ast, &config, template_args)?;
+ let source: String = match input.source {
+ Source::Source(ref s) => s.clone(),
+ Source::Path(_) => get_template_source(&input.path)?,
+ };
+
+ let mut sources = HashMap::new();
+ find_used_templates(&input, &mut sources, source)?;
+
+ let mut parsed = HashMap::new();
+ for (path, src) in &sources {
+ parsed.insert(path.as_path(), parse(src, input.syntax)?);
+ }
+
+ let mut contexts = HashMap::new();
+ for (path, nodes) in &parsed {
+ contexts.insert(*path, Context::new(input.config, path, nodes)?);
+ }
+
+ let ctx = &contexts[input.path.as_path()];
+ let heritage = if !ctx.blocks.is_empty() || ctx.extends.is_some() {
+ Some(Heritage::new(ctx, &contexts))
+ } else {
+ None
+ };
+
+ if input.print == Print::Ast || input.print == Print::All {
+ eprintln!("{:?}", parsed[input.path.as_path()]);
+ }
+
+ let code = Generator::new(
+ &input,
+ &contexts,
+ heritage.as_ref(),
+ MapChain::new(),
+ config.whitespace,
+ )
+ .build(&contexts[input.path.as_path()])?;
+ if input.print == Print::Code || input.print == Print::All {
+ eprintln!("{code}");
+ }
+ Ok(code)
+}
+
+#[derive(Default)]
+pub(crate) struct TemplateArgs {
+ pub(crate) source: Option,
+ pub(crate) print: Print,
+ pub(crate) escaping: Option,
+ pub(crate) ext: Option,
+ pub(crate) syntax: Option,
+ pub(crate) config_path: Option,
+ pub(crate) whitespace: Option,
+}
+
+impl TemplateArgs {
+ fn new(ast: &'_ syn::DeriveInput) -> Result {
+ // Check that an attribute called `template()` exists once and that it is
+ // the proper type (list).
+ let mut template_args = None;
+ for attr in &ast.attrs {
+ if !attr.path().is_ident("template") {
+ continue;
+ }
+
+ match attr.parse_args_with(Punctuated::::parse_terminated) {
+ Ok(args) if template_args.is_none() => template_args = Some(args),
+ Ok(_) => return Err("duplicated 'template' attribute".into()),
+ Err(e) => return Err(format!("unable to parse template arguments: {e}").into()),
+ };
+ }
+
+ let template_args =
+ template_args.ok_or_else(|| CompileError::from("no attribute 'template' found"))?;
+
+ let mut args = Self::default();
+ // Loop over the meta attributes and find everything that we
+ // understand. Return a CompileError if something is not right.
+ // `source` contains an enum that can represent `path` or `source`.
+ for item in template_args {
+ let pair = match item {
+ syn::Meta::NameValue(pair) => pair,
+ _ => {
+ return Err(format!(
+ "unsupported attribute argument {:?}",
+ item.to_token_stream()
+ )
+ .into())
+ }
+ };
+
+ let ident = match pair.path.get_ident() {
+ Some(ident) => ident,
+ None => unreachable!("not possible in syn::Meta::NameValue(…)"),
+ };
+
+ let value = match pair.value {
+ syn::Expr::Lit(lit) => lit,
+ syn::Expr::Group(group) => match *group.expr {
+ syn::Expr::Lit(lit) => lit,
+ _ => {
+ return Err(format!("unsupported argument value type for {ident:?}").into())
+ }
+ },
+ _ => return Err(format!("unsupported argument value type for {ident:?}").into()),
+ };
+
+ if ident == "path" {
+ if let syn::Lit::Str(s) = value.lit {
+ if args.source.is_some() {
+ return Err("must specify 'source' or 'path', not both".into());
+ }
+ args.source = Some(Source::Path(s.value()));
+ } else {
+ return Err("template path must be string literal".into());
+ }
+ } else if ident == "source" {
+ if let syn::Lit::Str(s) = value.lit {
+ if args.source.is_some() {
+ return Err("must specify 'source' or 'path', not both".into());
+ }
+ args.source = Some(Source::Source(s.value()));
+ } else {
+ return Err("template source must be string literal".into());
+ }
+ } else if ident == "print" {
+ if let syn::Lit::Str(s) = value.lit {
+ args.print = s.value().parse()?;
+ } else {
+ return Err("print value must be string literal".into());
+ }
+ } else if ident == "escape" {
+ if let syn::Lit::Str(s) = value.lit {
+ args.escaping = Some(s.value());
+ } else {
+ return Err("escape value must be string literal".into());
+ }
+ } else if ident == "ext" {
+ if let syn::Lit::Str(s) = value.lit {
+ args.ext = Some(s.value());
+ } else {
+ return Err("ext value must be string literal".into());
+ }
+ } else if ident == "syntax" {
+ if let syn::Lit::Str(s) = value.lit {
+ args.syntax = Some(s.value())
+ } else {
+ return Err("syntax value must be string literal".into());
+ }
+ } else if ident == "config" {
+ if let syn::Lit::Str(s) = value.lit {
+ args.config_path = Some(s.value())
+ } else {
+ return Err("config value must be string literal".into());
+ }
+ } else if ident == "whitespace" {
+ if let syn::Lit::Str(s) = value.lit {
+ args.whitespace = Some(s.value())
+ } else {
+ return Err("whitespace value must be string literal".into());
+ }
+ } else {
+ return Err(format!("unsupported attribute key {ident:?} found").into());
+ }
+ }
+
+ Ok(args)
+ }
+}
+
+fn find_used_templates(
+ input: &TemplateInput<'_>,
+ map: &mut HashMap,
+ source: String,
+) -> Result<(), CompileError> {
+ let mut dependency_graph = Vec::new();
+ let mut check = vec![(input.path.clone(), source)];
+ while let Some((path, source)) = check.pop() {
+ for n in parse(&source, input.syntax)? {
+ match n {
+ Node::Extends(extends) => {
+ let extends = input.config.find_template(extends, Some(&path))?;
+ let dependency_path = (path.clone(), extends.clone());
+ if dependency_graph.contains(&dependency_path) {
+ return Err(format!(
+ "cyclic dependency in graph {:#?}",
+ dependency_graph
+ .iter()
+ .map(|e| format!("{:#?} --> {:#?}", e.0, e.1))
+ .collect::>()
+ )
+ .into());
+ }
+ dependency_graph.push(dependency_path);
+ let source = get_template_source(&extends)?;
+ check.push((extends, source));
+ }
+ Node::Import(_, import, _) => {
+ let import = input.config.find_template(import, Some(&path))?;
+ let source = get_template_source(&import)?;
+ check.push((import, source));
+ }
+ _ => {}
+ }
+ }
+ map.insert(path, source);
+ }
+ Ok(())
+}
+
+struct Generator<'a> {
+ // The template input state: original struct AST and attributes
+ input: &'a TemplateInput<'a>,
+ // All contexts, keyed by the package-relative template path
+ contexts: &'a HashMap<&'a Path, Context<'a>>,
+ // The heritage contains references to blocks and their ancestry
+ heritage: Option<&'a Heritage<'a>>,
+ // Variables accessible directly from the current scope (not redirected to context)
+ locals: MapChain<'a, &'a str, LocalMeta>,
+ // Suffix whitespace from the previous literal. Will be flushed to the
+ // output buffer unless suppressed by whitespace suppression on the next
+ // non-literal.
+ next_ws: Option<&'a str>,
+ // Whitespace suppression from the previous non-literal. Will be used to
+ // determine whether to flush prefix whitespace from the next literal.
+ skip_ws: WhitespaceHandling,
+ // If currently in a block, this will contain the name of a potential parent block
+ super_block: Option<(&'a str, usize)>,
+ // buffer for writable
+ buf_writable: Vec>,
+ // Counter for write! hash named arguments
+ named: usize,
+ // If set to `suppress`, the whitespace characters will be removed by default unless `+` is
+ // used.
+ whitespace: WhitespaceHandling,
+}
+
+impl<'a> Generator<'a> {
+ fn new<'n>(
+ input: &'n TemplateInput<'_>,
+ contexts: &'n HashMap<&'n Path, Context<'n>>,
+ heritage: Option<&'n Heritage<'_>>,
+ locals: MapChain<'n, &'n str, LocalMeta>,
+ whitespace: WhitespaceHandling,
+ ) -> Generator<'n> {
+ Generator {
+ input,
+ contexts,
+ heritage,
+ locals,
+ next_ws: None,
+ skip_ws: WhitespaceHandling::Preserve,
+ super_block: None,
+ buf_writable: vec![],
+ named: 0,
+ whitespace,
+ }
+ }
+
+ fn child(&mut self) -> Generator<'_> {
+ let locals = MapChain::with_parent(&self.locals);
+ Self::new(
+ self.input,
+ self.contexts,
+ self.heritage,
+ locals,
+ self.whitespace,
+ )
+ }
+
+ // Takes a Context and generates the relevant implementations.
+ fn build(mut self, ctx: &'a Context<'_>) -> Result {
+ let mut buf = Buffer::new(0);
+
+ self.impl_template(ctx, &mut buf)?;
+ self.impl_display(&mut buf)?;
+
+ #[cfg(feature = "with-actix-web")]
+ self.impl_actix_web_responder(&mut buf)?;
+ #[cfg(feature = "with-axum")]
+ self.impl_axum_into_response(&mut buf)?;
+ #[cfg(feature = "with-gotham")]
+ self.impl_gotham_into_response(&mut buf)?;
+ #[cfg(feature = "with-hyper")]
+ self.impl_hyper_into_response(&mut buf)?;
+ #[cfg(feature = "with-mendes")]
+ self.impl_mendes_responder(&mut buf)?;
+ #[cfg(feature = "with-rocket")]
+ self.impl_rocket_responder(&mut buf)?;
+ #[cfg(feature = "with-tide")]
+ self.impl_tide_integrations(&mut buf)?;
+ #[cfg(feature = "with-warp")]
+ self.impl_warp_reply(&mut buf)?;
+
+ Ok(buf.buf)
+ }
+
+ // Implement `Template` for the given context struct.
+ fn impl_template(
+ &mut self,
+ ctx: &'a Context<'_>,
+ buf: &mut Buffer,
+ ) -> Result<(), CompileError> {
+ self.write_header(buf, "::askama::Template", None)?;
+ buf.writeln(
+ "fn render_into(&self, writer: &mut (impl ::std::fmt::Write + ?Sized)) -> \
+ ::askama::Result<()> {",
+ )?;
+
+ // Make sure the compiler understands that the generated code depends on the template files.
+ for path in self.contexts.keys() {
+ // Skip the fake path of templates defined in rust source.
+ let path_is_valid = match self.input.source {
+ Source::Path(_) => true,
+ Source::Source(_) => path != &self.input.path,
+ };
+ if path_is_valid {
+ let path = path.to_str().unwrap();
+ buf.writeln(
+ "e! {
+ include_bytes!(#path);
+ }
+ .to_string(),
+ )?;
+ }
+ }
+
+ let size_hint = if let Some(heritage) = self.heritage {
+ self.handle(heritage.root, heritage.root.nodes, buf, AstLevel::Top)
+ } else {
+ self.handle(ctx, ctx.nodes, buf, AstLevel::Top)
+ }?;
+
+ self.flush_ws(Ws(None, None));
+ buf.writeln("::askama::Result::Ok(())")?;
+ buf.writeln("}")?;
+
+ buf.writeln("const EXTENSION: ::std::option::Option<&'static ::std::primitive::str> = ")?;
+ buf.writeln(&format!("{:?}", self.input.extension()))?;
+ buf.writeln(";")?;
+
+ buf.writeln("const SIZE_HINT: ::std::primitive::usize = ")?;
+ buf.writeln(&format!("{size_hint}"))?;
+ buf.writeln(";")?;
+
+ buf.writeln("const MIME_TYPE: &'static ::std::primitive::str = ")?;
+ buf.writeln(&format!("{:?}", &self.input.mime_type))?;
+ buf.writeln(";")?;
+
+ buf.writeln("}")?;
+ Ok(())
+ }
+
+ // Implement `Display` for the given context struct.
+ fn impl_display(&mut self, buf: &mut Buffer) -> Result<(), CompileError> {
+ self.write_header(buf, "::std::fmt::Display", None)?;
+ buf.writeln("#[inline]")?;
+ buf.writeln("fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {")?;
+ buf.writeln("::askama::Template::render_into(self, f).map_err(|_| ::std::fmt::Error {})")?;
+ buf.writeln("}")?;
+ buf.writeln("}")
+ }
+
+ // Implement Actix-web's `Responder`.
+ #[cfg(feature = "with-actix-web")]
+ fn impl_actix_web_responder(&mut self, buf: &mut Buffer) -> Result<(), CompileError> {
+ self.write_header(buf, "::askama_actix::actix_web::Responder", None)?;
+ buf.writeln("type Body = ::askama_actix::actix_web::body::BoxBody;")?;
+ buf.writeln("#[inline]")?;
+ buf.writeln(
+ "fn respond_to(self, _req: &::askama_actix::actix_web::HttpRequest) \
+ -> ::askama_actix::actix_web::HttpResponse {",
+ )?;
+ buf.writeln("::to_response(&self)")?;
+ buf.writeln("}")?;
+ buf.writeln("}")
+ }
+
+ // Implement Axum's `IntoResponse`.
+ #[cfg(feature = "with-axum")]
+ fn impl_axum_into_response(&mut self, buf: &mut Buffer) -> Result<(), CompileError> {
+ self.write_header(buf, "::askama_axum::IntoResponse", None)?;
+ buf.writeln("#[inline]")?;
+ buf.writeln(
+ "fn into_response(self)\
+ -> ::askama_axum::Response {",
+ )?;
+ buf.writeln("::askama_axum::into_response(&self)")?;
+ buf.writeln("}")?;
+ buf.writeln("}")
+ }
+
+ // Implement gotham's `IntoResponse`.
+ #[cfg(feature = "with-gotham")]
+ fn impl_gotham_into_response(&mut self, buf: &mut Buffer) -> Result<(), CompileError> {
+ self.write_header(buf, "::askama_gotham::IntoResponse", None)?;
+ buf.writeln("#[inline]")?;
+ buf.writeln(
+ "fn into_response(self, _state: &::askama_gotham::State)\
+ -> ::askama_gotham::Response<::askama_gotham::Body> {",
+ )?;
+ buf.writeln("::askama_gotham::respond(&self)")?;
+ buf.writeln("}")?;
+ buf.writeln("}")
+ }
+
+ // Implement `From for hyper::Response` and `From for hyper::Body.
+ #[cfg(feature = "with-hyper")]
+ fn impl_hyper_into_response(&mut self, buf: &mut Buffer) -> Result<(), CompileError> {
+ let (impl_generics, orig_ty_generics, where_clause) =
+ self.input.ast.generics.split_for_impl();
+ let ident = &self.input.ast.ident;
+ // From for hyper::Response
+ buf.writeln(&format!(
+ "{} {{",
+ quote!(
+ impl #impl_generics ::core::convert::From<ident #orig_ty_generics>
+ for ::askama_hyper::hyper::Response<::askama_hyper::hyper::Body>
+ #where_clause
+ )
+ ))?;
+ buf.writeln("#[inline]")?;
+ buf.writeln(&format!(
+ "{} {{",
+ quote!(fn from(value: ident #orig_ty_generics) -> Self)
+ ))?;
+ buf.writeln("::askama_hyper::respond(value)")?;
+ buf.writeln("}")?;
+ buf.writeln("}")?;
+
+ // TryFrom for hyper::Body
+ buf.writeln(&format!(
+ "{} {{",
+ quote!(
+ impl #impl_generics ::core::convert::TryFrom<ident #orig_ty_generics>
+ for ::askama_hyper::hyper::Body
+ #where_clause
+ )
+ ))?;
+ buf.writeln("type Error = ::askama::Error;")?;
+ buf.writeln("#[inline]")?;
+ buf.writeln(&format!(
+ "{} {{",
+ quote!(fn try_from(value: ident #orig_ty_generics) -> Result)
+ ))?;
+ buf.writeln("::askama::Template::render(value).map(Into::into)")?;
+ buf.writeln("}")?;
+ buf.writeln("}")
+ }
+
+ // Implement mendes' `Responder`.
+ #[cfg(feature = "with-mendes")]
+ fn impl_mendes_responder(&mut self, buf: &mut Buffer) -> Result<(), CompileError> {
+ let param = syn::parse_str("A: ::mendes::Application").unwrap();
+
+ let mut generics = self.input.ast.generics.clone();
+ generics.params.push(param);
+ let (_, orig_ty_generics, _) = self.input.ast.generics.split_for_impl();
+ let (impl_generics, _, where_clause) = generics.split_for_impl();
+
+ let mut where_clause = match where_clause {
+ Some(clause) => clause.clone(),
+ None => syn::WhereClause {
+ where_token: syn::Token),
+ predicates: syn::punctuated::Punctuated::new(),
+ },
+ };
+
+ where_clause
+ .predicates
+ .push(syn::parse_str("A::ResponseBody: From").unwrap());
+ where_clause
+ .predicates
+ .push(syn::parse_str("A::Error: From<::askama_mendes::Error>").unwrap());
+
+ buf.writeln(
+ format!(
+ "{} {} for {} {} {{",
+ quote!(impl #impl_generics),
+ "::mendes::application::IntoResponse",
+ self.input.ast.ident,
+ quote!(#orig_ty_generics #where_clause),
+ )
+ .as_ref(),
+ )?;
+
+ buf.writeln(
+ "fn into_response(self, app: &A, req: &::mendes::http::request::Parts) \
+ -> ::mendes::http::Response {",
+ )?;
+
+ buf.writeln("::askama_mendes::into_response(app, req, &self)")?;
+ buf.writeln("}")?;
+ buf.writeln("}")?;
+ Ok(())
+ }
+
+ // Implement Rocket's `Responder`.
+ #[cfg(feature = "with-rocket")]
+ fn impl_rocket_responder(&mut self, buf: &mut Buffer) -> Result<(), CompileError> {
+ let lifetime = syn::Lifetime::new("'askama", proc_macro2::Span::call_site());
+ let param = syn::GenericParam::Lifetime(syn::LifetimeParam::new(lifetime));
+ self.write_header(
+ buf,
+ "::askama_rocket::Responder<'askama, 'askama>",
+ Some(vec![param]),
+ )?;
+
+ buf.writeln("#[inline]")?;
+ buf.writeln(
+ "fn respond_to(self, _: &::askama_rocket::Request) \
+ -> ::askama_rocket::Result<'askama> {",
+ )?;
+ buf.writeln("::askama_rocket::respond(&self)")?;
+
+ buf.writeln("}")?;
+ buf.writeln("}")?;
+ Ok(())
+ }
+
+ #[cfg(feature = "with-tide")]
+ fn impl_tide_integrations(&mut self, buf: &mut Buffer) -> Result<(), CompileError> {
+ self.write_header(
+ buf,
+ "::std::convert::TryInto<::askama_tide::tide::Body>",
+ None,
+ )?;
+ buf.writeln(
+ "type Error = ::askama_tide::askama::Error;\n\
+ #[inline]\n\
+ fn try_into(self) -> ::askama_tide::askama::Result<::askama_tide::tide::Body> {",
+ )?;
+ buf.writeln("::askama_tide::try_into_body(&self)")?;
+ buf.writeln("}")?;
+ buf.writeln("}")?;
+
+ buf.writeln("#[allow(clippy::from_over_into)]")?;
+ self.write_header(buf, "Into<::askama_tide::tide::Response>", None)?;
+ buf.writeln("#[inline]")?;
+ buf.writeln("fn into(self) -> ::askama_tide::tide::Response {")?;
+ buf.writeln("::askama_tide::into_response(&self)")?;
+ buf.writeln("}\n}")
+ }
+
+ #[cfg(feature = "with-warp")]
+ fn impl_warp_reply(&mut self, buf: &mut Buffer) -> Result<(), CompileError> {
+ self.write_header(buf, "::askama_warp::warp::reply::Reply", None)?;
+ buf.writeln("#[inline]")?;
+ buf.writeln("fn into_response(self) -> ::askama_warp::warp::reply::Response {")?;
+ buf.writeln("::askama_warp::reply(&self)")?;
+ buf.writeln("}")?;
+ buf.writeln("}")
+ }
+
+ // Writes header for the `impl` for `TraitFromPathName` or `Template`
+ // for the given context struct.
+ fn write_header(
+ &mut self,
+ buf: &mut Buffer,
+ target: &str,
+ params: Option>,
+ ) -> Result<(), CompileError> {
+ let mut generics = self.input.ast.generics.clone();
+ if let Some(params) = params {
+ for param in params {
+ generics.params.push(param);
+ }
+ }
+ let (_, orig_ty_generics, _) = self.input.ast.generics.split_for_impl();
+ let (impl_generics, _, where_clause) = generics.split_for_impl();
+ buf.writeln(
+ format!(
+ "{} {} for {}{} {{",
+ quote!(impl #impl_generics),
+ target,
+ self.input.ast.ident,
+ quote!(#orig_ty_generics #where_clause),
+ )
+ .as_ref(),
+ )
+ }
+
+ /* Helper methods for handling node types */
+
+ fn handle(
+ &mut self,
+ ctx: &'a Context<'_>,
+ nodes: &'a [Node<'_>],
+ buf: &mut Buffer,
+ level: AstLevel,
+ ) -> Result {
+ let mut size_hint = 0;
+ for n in nodes {
+ match *n {
+ Node::Lit(lws, val, rws) => {
+ self.visit_lit(lws, val, rws);
+ }
+ Node::Comment(ws) => {
+ self.write_comment(ws);
+ }
+ Node::Expr(ws, ref val) => {
+ self.write_expr(ws, val);
+ }
+ Node::LetDecl(ws, ref var) => {
+ self.write_let_decl(buf, ws, var)?;
+ }
+ Node::Let(ws, ref var, ref val) => {
+ self.write_let(buf, ws, var, val)?;
+ }
+ Node::Cond(ref conds, ws) => {
+ size_hint += self.write_cond(ctx, buf, conds, ws)?;
+ }
+ Node::Match(ws1, ref expr, ref arms, ws2) => {
+ size_hint += self.write_match(ctx, buf, ws1, expr, arms, ws2)?;
+ }
+ Node::Loop(ref loop_block) => {
+ size_hint += self.write_loop(ctx, buf, loop_block)?;
+ }
+ Node::BlockDef(ws1, name, _, ws2) => {
+ size_hint += self.write_block(buf, Some(name), Ws(ws1.0, ws2.1))?;
+ }
+ Node::Include(ws, path) => {
+ size_hint += self.handle_include(ctx, buf, ws, path)?;
+ }
+ Node::Call(ws, scope, name, ref args) => {
+ size_hint += self.write_call(ctx, buf, ws, scope, name, args)?;
+ }
+ Node::Macro(_, ref m) => {
+ if level != AstLevel::Top {
+ return Err("macro blocks only allowed at the top level".into());
+ }
+ self.flush_ws(m.ws1);
+ self.prepare_ws(m.ws2);
+ }
+ Node::Raw(ws1, lws, val, rws, ws2) => {
+ self.handle_ws(ws1);
+ self.visit_lit(lws, val, rws);
+ self.handle_ws(ws2);
+ }
+ Node::Import(ws, _, _) => {
+ if level != AstLevel::Top {
+ return Err("import blocks only allowed at the top level".into());
+ }
+ self.handle_ws(ws);
+ }
+ Node::Extends(_) => {
+ if level != AstLevel::Top {
+ return Err("extend blocks only allowed at the top level".into());
+ }
+ // No whitespace handling: child template top-level is not used,
+ // except for the blocks defined in it.
+ }
+ Node::Break(ws) => {
+ self.handle_ws(ws);
+ self.write_buf_writable(buf)?;
+ buf.writeln("break;")?;
+ }
+ Node::Continue(ws) => {
+ self.handle_ws(ws);
+ self.write_buf_writable(buf)?;
+ buf.writeln("continue;")?;
+ }
+ }
+ }
+
+ if AstLevel::Top == level {
+ // Handle any pending whitespace.
+ if self.next_ws.is_some() {
+ self.flush_ws(Ws(Some(self.skip_ws.into()), None));
+ }
+
+ size_hint += self.write_buf_writable(buf)?;
+ }
+ Ok(size_hint)
+ }
+
+ fn write_cond(
+ &mut self,
+ ctx: &'a Context<'_>,
+ buf: &mut Buffer,
+ conds: &'a [Cond<'_>],
+ ws: Ws,
+ ) -> Result {
+ let mut flushed = 0;
+ let mut arm_sizes = Vec::new();
+ let mut has_else = false;
+ for (i, &(cws, ref cond, ref nodes)) in conds.iter().enumerate() {
+ self.handle_ws(cws);
+ flushed += self.write_buf_writable(buf)?;
+ if i > 0 {
+ self.locals.pop();
+ }
+
+ self.locals.push();
+ let mut arm_size = 0;
+ if let Some(CondTest { target, expr }) = cond {
+ if i == 0 {
+ buf.write("if ");
+ } else {
+ buf.dedent()?;
+ buf.write("} else if ");
+ }
+
+ if let Some(target) = target {
+ let mut expr_buf = Buffer::new(0);
+ self.visit_expr(&mut expr_buf, expr)?;
+ buf.write("let ");
+ self.visit_target(buf, true, true, target);
+ buf.write(" = &(");
+ buf.write(&expr_buf.buf);
+ buf.write(")");
+ } else {
+ // The following syntax `*(&(...) as &bool)` is used to
+ // trigger Rust's automatic dereferencing, to coerce
+ // e.g. `&&&&&bool` to `bool`. First `&(...) as &bool`
+ // coerces e.g. `&&&bool` to `&bool`. Then `*(&bool)`
+ // finally dereferences it to `bool`.
+ buf.write("*(&(");
+ let expr_code = self.visit_expr_root(expr)?;
+ buf.write(&expr_code);
+ buf.write(") as &bool)");
+ }
+ } else {
+ buf.dedent()?;
+ buf.write("} else");
+ has_else = true;
+ }
+
+ buf.writeln(" {")?;
+
+ arm_size += self.handle(ctx, nodes, buf, AstLevel::Nested)?;
+ arm_sizes.push(arm_size);
+ }
+ self.handle_ws(ws);
+ flushed += self.write_buf_writable(buf)?;
+ buf.writeln("}")?;
+
+ self.locals.pop();
+
+ if !has_else {
+ arm_sizes.push(0);
+ }
+ Ok(flushed + median(&mut arm_sizes))
+ }
+
+ #[allow(clippy::too_many_arguments)]
+ fn write_match(
+ &mut self,
+ ctx: &'a Context<'_>,
+ buf: &mut Buffer,
+ ws1: Ws,
+ expr: &Expr<'_>,
+ arms: &'a [When<'_>],
+ ws2: Ws,
+ ) -> Result {
+ self.flush_ws(ws1);
+ let flushed = self.write_buf_writable(buf)?;
+ let mut arm_sizes = Vec::new();
+
+ let expr_code = self.visit_expr_root(expr)?;
+ buf.writeln(&format!("match &{expr_code} {{"))?;
+
+ let mut arm_size = 0;
+ for (i, arm) in arms.iter().enumerate() {
+ let &(ws, ref target, ref body) = arm;
+ self.handle_ws(ws);
+
+ if i > 0 {
+ arm_sizes.push(arm_size + self.write_buf_writable(buf)?);
+
+ buf.writeln("}")?;
+ self.locals.pop();
+ }
+
+ self.locals.push();
+ self.visit_target(buf, true, true, target);
+ buf.writeln(" => {")?;
+
+ arm_size = self.handle(ctx, body, buf, AstLevel::Nested)?;
+ }
+
+ self.handle_ws(ws2);
+ arm_sizes.push(arm_size + self.write_buf_writable(buf)?);
+ buf.writeln("}")?;
+ self.locals.pop();
+
+ buf.writeln("}")?;
+
+ Ok(flushed + median(&mut arm_sizes))
+ }
+
+ #[allow(clippy::too_many_arguments)]
+ fn write_loop(
+ &mut self,
+ ctx: &'a Context<'_>,
+ buf: &mut Buffer,
+ loop_block: &'a Loop<'_>,
+ ) -> Result {
+ self.handle_ws(loop_block.ws1);
+ self.locals.push();
+
+ let expr_code = self.visit_expr_root(&loop_block.iter)?;
+
+ let flushed = self.write_buf_writable(buf)?;
+ buf.writeln("{")?;
+ buf.writeln("let mut _did_loop = false;")?;
+ match loop_block.iter {
+ Expr::Range(_, _, _) => buf.writeln(&format!("let _iter = {expr_code};")),
+ Expr::Array(..) => buf.writeln(&format!("let _iter = {expr_code}.iter();")),
+ // If `iter` is a call then we assume it's something that returns
+ // an iterator. If not then the user can explicitly add the needed
+ // call without issues.
+ Expr::Call(..) | Expr::Index(..) => {
+ buf.writeln(&format!("let _iter = ({expr_code}).into_iter();"))
+ }
+ // If accessing `self` then it most likely needs to be
+ // borrowed, to prevent an attempt of moving.
+ _ if expr_code.starts_with("self.") => {
+ buf.writeln(&format!("let _iter = (&{expr_code}).into_iter();"))
+ }
+ // If accessing a field then it most likely needs to be
+ // borrowed, to prevent an attempt of moving.
+ Expr::Attr(..) => buf.writeln(&format!("let _iter = (&{expr_code}).into_iter();")),
+ // Otherwise, we borrow `iter` assuming that it implements `IntoIterator`.
+ _ => buf.writeln(&format!("let _iter = ({expr_code}).into_iter();")),
+ }?;
+ if let Some(cond) = &loop_block.cond {
+ self.locals.push();
+ buf.write("let _iter = _iter.filter(|");
+ self.visit_target(buf, true, true, &loop_block.var);
+ buf.write("| -> bool {");
+ self.visit_expr(buf, cond)?;
+ buf.writeln("});")?;
+ self.locals.pop();
+ }
+
+ self.locals.push();
+ buf.write("for (");
+ self.visit_target(buf, true, true, &loop_block.var);
+ buf.writeln(", _loop_item) in ::askama::helpers::TemplateLoop::new(_iter) {")?;
+
+ buf.writeln("_did_loop = true;")?;
+ let mut size_hint1 = self.handle(ctx, &loop_block.body, buf, AstLevel::Nested)?;
+ self.handle_ws(loop_block.ws2);
+ size_hint1 += self.write_buf_writable(buf)?;
+ self.locals.pop();
+ buf.writeln("}")?;
+
+ buf.writeln("if !_did_loop {")?;
+ self.locals.push();
+ let mut size_hint2 = self.handle(ctx, &loop_block.else_block, buf, AstLevel::Nested)?;
+ self.handle_ws(loop_block.ws3);
+ size_hint2 += self.write_buf_writable(buf)?;
+ self.locals.pop();
+ buf.writeln("}")?;
+
+ buf.writeln("}")?;
+
+ Ok(flushed + ((size_hint1 * 3) + size_hint2) / 2)
+ }
+
+ fn write_call(
+ &mut self,
+ ctx: &'a Context<'_>,
+ buf: &mut Buffer,
+ ws: Ws,
+ scope: Option<&str>,
+ name: &str,
+ args: &[Expr<'_>],
+ ) -> Result {
+ if name == "super" {
+ return self.write_block(buf, None, ws);
+ }
+
+ let (def, own_ctx) = match scope {
+ Some(s) => {
+ let path = ctx.imports.get(s).ok_or_else(|| {
+ CompileError::from(format!("no import found for scope {s:?}"))
+ })?;
+ let mctx = self
+ .contexts
+ .get(path.as_path())
+ .ok_or_else(|| CompileError::from(format!("context for {path:?} not found")))?;
+ let def = mctx.macros.get(name).ok_or_else(|| {
+ CompileError::from(format!("macro {name:?} not found in scope {s:?}"))
+ })?;
+ (def, mctx)
+ }
+ None => {
+ let def = ctx
+ .macros
+ .get(name)
+ .ok_or_else(|| CompileError::from(format!("macro {name:?} not found")))?;
+ (def, ctx)
+ }
+ };
+
+ self.flush_ws(ws); // Cannot handle_ws() here: whitespace from macro definition comes first
+ self.locals.push();
+ self.write_buf_writable(buf)?;
+ buf.writeln("{")?;
+ self.prepare_ws(def.ws1);
+
+ let mut names = Buffer::new(0);
+ let mut values = Buffer::new(0);
+ let mut is_first_variable = true;
+ for (i, arg) in def.args.iter().enumerate() {
+ let expr = args.get(i).ok_or_else(|| {
+ CompileError::from(format!("macro {name:?} takes more than {i} arguments"))
+ })?;
+
+ match expr {
+ // If `expr` is already a form of variable then
+ // don't reintroduce a new variable. This is
+ // to avoid moving non-copyable values.
+ Expr::Var(name) => {
+ let var = self.locals.resolve_or_self(name);
+ self.locals.insert(arg, LocalMeta::with_ref(var));
+ }
+ Expr::Attr(obj, attr) => {
+ let mut attr_buf = Buffer::new(0);
+ self.visit_attr(&mut attr_buf, obj, attr)?;
+
+ let var = self.locals.resolve(&attr_buf.buf).unwrap_or(attr_buf.buf);
+ self.locals.insert(arg, LocalMeta::with_ref(var));
+ }
+ // Everything else still needs to become variables,
+ // to avoid having the same logic be executed
+ // multiple times, e.g. in the case of macro
+ // parameters being used multiple times.
+ _ => {
+ if is_first_variable {
+ is_first_variable = false
+ } else {
+ names.write(", ");
+ values.write(", ");
+ }
+ names.write(arg);
+
+ values.write("(");
+ values.write(&self.visit_expr_root(expr)?);
+ values.write(")");
+ self.locals.insert_with_default(arg);
+ }
+ }
+ }
+
+ debug_assert_eq!(names.buf.is_empty(), values.buf.is_empty());
+ if !names.buf.is_empty() {
+ buf.writeln(&format!("let ({}) = ({});", names.buf, values.buf))?;
+ }
+
+ let mut size_hint = self.handle(own_ctx, &def.nodes, buf, AstLevel::Nested)?;
+
+ self.flush_ws(def.ws2);
+ size_hint += self.write_buf_writable(buf)?;
+ buf.writeln("}")?;
+ self.locals.pop();
+ self.prepare_ws(ws);
+ Ok(size_hint)
+ }
+
+ fn handle_include(
+ &mut self,
+ ctx: &'a Context<'_>,
+ buf: &mut Buffer,
+ ws: Ws,
+ path: &str,
+ ) -> Result {
+ self.flush_ws(ws);
+ self.write_buf_writable(buf)?;
+ let path = self
+ .input
+ .config
+ .find_template(path, Some(&self.input.path))?;
+ let src = get_template_source(&path)?;
+ let nodes = parse(&src, self.input.syntax)?;
+
+ // Make sure the compiler understands that the generated code depends on the template file.
+ {
+ let path = path.to_str().unwrap();
+ buf.writeln(
+ "e! {
+ include_bytes!(#path);
+ }
+ .to_string(),
+ )?;
+ }
+
+ let size_hint = {
+ // Since nodes must not outlive the Generator, we instantiate
+ // a nested Generator here to handle the include's nodes.
+ let mut gen = self.child();
+ let mut size_hint = gen.handle(ctx, &nodes, buf, AstLevel::Nested)?;
+ size_hint += gen.write_buf_writable(buf)?;
+ size_hint
+ };
+ self.prepare_ws(ws);
+ Ok(size_hint)
+ }
+
+ fn write_let_decl(
+ &mut self,
+ buf: &mut Buffer,
+ ws: Ws,
+ var: &'a Target<'_>,
+ ) -> Result<(), CompileError> {
+ self.handle_ws(ws);
+ self.write_buf_writable(buf)?;
+ buf.write("let ");
+ self.visit_target(buf, false, true, var);
+ buf.writeln(";")
+ }
+
+ fn is_shadowing_variable(&self, var: &Target<'a>) -> Result {
+ match var {
+ Target::Name(name) => {
+ let name = normalize_identifier(name);
+ match self.locals.get(&name) {
+ // declares a new variable
+ None => Ok(false),
+ // an initialized variable gets shadowed
+ Some(meta) if meta.initialized => Ok(true),
+ // initializes a variable that was introduced in a LetDecl before
+ _ => Ok(false),
+ }
+ }
+ Target::Tuple(_, targets) => {
+ for target in targets {
+ match self.is_shadowing_variable(target) {
+ Ok(false) => continue,
+ outcome => return outcome,
+ }
+ }
+ Ok(false)
+ }
+ Target::Struct(_, named_targets) => {
+ for (_, target) in named_targets {
+ match self.is_shadowing_variable(target) {
+ Ok(false) => continue,
+ outcome => return outcome,
+ }
+ }
+ Ok(false)
+ }
+ _ => Err("literals are not allowed on the left-hand side of an assignment".into()),
+ }
+ }
+
+ fn write_let(
+ &mut self,
+ buf: &mut Buffer,
+ ws: Ws,
+ var: &'a Target<'_>,
+ val: &Expr<'_>,
+ ) -> Result<(), CompileError> {
+ self.handle_ws(ws);
+ let mut expr_buf = Buffer::new(0);
+ self.visit_expr(&mut expr_buf, val)?;
+
+ let shadowed = self.is_shadowing_variable(var)?;
+ if shadowed {
+ // Need to flush the buffer if the variable is being shadowed,
+ // to ensure the old variable is used.
+ self.write_buf_writable(buf)?;
+ }
+ if shadowed
+ || !matches!(var, &Target::Name(_))
+ || matches!(var, Target::Name(name) if self.locals.get(name).is_none())
+ {
+ buf.write("let ");
+ }
+
+ self.visit_target(buf, true, true, var);
+ buf.writeln(&format!(" = {};", &expr_buf.buf))
+ }
+
+ // If `name` is `Some`, this is a call to a block definition, and we have to find
+ // the first block for that name from the ancestry chain. If name is `None`, this
+ // is from a `super()` call, and we can get the name from `self.super_block`.
+ fn write_block(
+ &mut self,
+ buf: &mut Buffer,
+ name: Option<&'a str>,
+ outer: Ws,
+ ) -> Result {
+ // Flush preceding whitespace according to the outer WS spec
+ self.flush_ws(outer);
+
+ let prev_block = self.super_block;
+ let cur = match (name, prev_block) {
+ // The top-level context contains a block definition
+ (Some(cur_name), None) => (cur_name, 0),
+ // A block definition contains a block definition of the same name
+ (Some(cur_name), Some((prev_name, _))) if cur_name == prev_name => {
+ return Err(format!("cannot define recursive blocks ({cur_name})").into());
+ }
+ // A block definition contains a definition of another block
+ (Some(cur_name), Some((_, _))) => (cur_name, 0),
+ // `super()` was called inside a block
+ (None, Some((prev_name, gen))) => (prev_name, gen + 1),
+ // `super()` is called from outside a block
+ (None, None) => return Err("cannot call 'super()' outside block".into()),
+ };
+ self.super_block = Some(cur);
+
+ // Get the block definition from the heritage chain
+ let heritage = self
+ .heritage
+ .as_ref()
+ .ok_or_else(|| CompileError::from("no block ancestors available"))?;
+ let (ctx, def) = heritage.blocks[cur.0].get(cur.1).ok_or_else(|| {
+ CompileError::from(match name {
+ None => format!("no super() block found for block '{}'", cur.0),
+ Some(name) => format!("no block found for name '{name}'"),
+ })
+ })?;
+
+ // Get the nodes and whitespace suppression data from the block definition
+ let (ws1, nodes, ws2) = if let Node::BlockDef(ws1, _, nodes, ws2) = def {
+ (ws1, nodes, ws2)
+ } else {
+ unreachable!()
+ };
+
+ // Handle inner whitespace suppression spec and process block nodes
+ self.prepare_ws(*ws1);
+ self.locals.push();
+ let size_hint = self.handle(ctx, nodes, buf, AstLevel::Block)?;
+
+ if !self.locals.is_current_empty() {
+ // Need to flush the buffer before popping the variable stack
+ self.write_buf_writable(buf)?;
+ }
+
+ self.locals.pop();
+ self.flush_ws(*ws2);
+
+ // Restore original block context and set whitespace suppression for
+ // succeeding whitespace according to the outer WS spec
+ self.super_block = prev_block;
+ self.prepare_ws(outer);
+ Ok(size_hint)
+ }
+
+ fn write_expr(&mut self, ws: Ws, s: &'a Expr<'a>) {
+ self.handle_ws(ws);
+ self.buf_writable.push(Writable::Expr(s));
+ }
+
+ // Write expression buffer and empty
+ fn write_buf_writable(&mut self, buf: &mut Buffer) -> Result {
+ if self.buf_writable.is_empty() {
+ return Ok(0);
+ }
+
+ if self
+ .buf_writable
+ .iter()
+ .all(|w| matches!(w, Writable::Lit(_)))
+ {
+ let mut buf_lit = Buffer::new(0);
+ for s in mem::take(&mut self.buf_writable) {
+ if let Writable::Lit(s) = s {
+ buf_lit.write(s);
+ };
+ }
+ buf.writeln(&format!("writer.write_str({:#?})?;", &buf_lit.buf))?;
+ return Ok(buf_lit.buf.len());
+ }
+
+ let mut size_hint = 0;
+ let mut buf_format = Buffer::new(0);
+ let mut buf_expr = Buffer::new(buf.indent + 1);
+ let mut expr_cache = HashMap::with_capacity(self.buf_writable.len());
+ for s in mem::take(&mut self.buf_writable) {
+ match s {
+ Writable::Lit(s) => {
+ buf_format.write(&s.replace('{', "{{").replace('}', "}}"));
+ size_hint += s.len();
+ }
+ Writable::Expr(s) => {
+ use self::DisplayWrap::*;
+ let mut expr_buf = Buffer::new(0);
+ let wrapped = self.visit_expr(&mut expr_buf, s)?;
+ let expression = match wrapped {
+ Wrapped => expr_buf.buf,
+ Unwrapped => format!(
+ "::askama::MarkupDisplay::new_unsafe(&({}), {})",
+ expr_buf.buf, self.input.escaper
+ ),
+ };
+
+ let id = match expr_cache.entry(expression.clone()) {
+ Entry::Occupied(e) if s.is_cacheable() => *e.get(),
+ e => {
+ let id = self.named;
+ self.named += 1;
+
+ buf_expr.write(&format!("expr{id} = "));
+ buf_expr.write("&");
+ buf_expr.write(&expression);
+ buf_expr.writeln(",")?;
+
+ if let Entry::Vacant(e) = e {
+ e.insert(id);
+ }
+
+ id
+ }
+ };
+
+ buf_format.write(&format!("{{expr{id}}}"));
+ size_hint += 3;
+ }
+ }
+ }
+
+ buf.writeln("::std::write!(")?;
+ buf.indent();
+ buf.writeln("writer,")?;
+ buf.writeln(&format!("{:#?},", &buf_format.buf))?;
+ buf.writeln(buf_expr.buf.trim())?;
+ buf.dedent()?;
+ buf.writeln(")?;")?;
+ Ok(size_hint)
+ }
+
+ fn visit_lit(&mut self, lws: &'a str, val: &'a str, rws: &'a str) {
+ assert!(self.next_ws.is_none());
+ if !lws.is_empty() {
+ match self.skip_ws {
+ WhitespaceHandling::Suppress => {}
+ _ if val.is_empty() => {
+ assert!(rws.is_empty());
+ self.next_ws = Some(lws);
+ }
+ WhitespaceHandling::Preserve => self.buf_writable.push(Writable::Lit(lws)),
+ WhitespaceHandling::Minimize => {
+ self.buf_writable
+ .push(Writable::Lit(match lws.contains('\n') {
+ true => "\n",
+ false => " ",
+ }));
+ }
+ }
+ }
+
+ if !val.is_empty() {
+ self.skip_ws = WhitespaceHandling::Preserve;
+ self.buf_writable.push(Writable::Lit(val));
+ }
+
+ if !rws.is_empty() {
+ self.next_ws = Some(rws);
+ }
+ }
+
+ fn write_comment(&mut self, ws: Ws) {
+ self.handle_ws(ws);
+ }
+
+ /* Visitor methods for expression types */
+
+ fn visit_expr_root(&mut self, expr: &Expr<'_>) -> Result {
+ let mut buf = Buffer::new(0);
+ self.visit_expr(&mut buf, expr)?;
+ Ok(buf.buf)
+ }
+
+ fn visit_expr(
+ &mut self,
+ buf: &mut Buffer,
+ expr: &Expr<'_>,
+ ) -> Result {
+ Ok(match *expr {
+ Expr::BoolLit(s) => self.visit_bool_lit(buf, s),
+ Expr::NumLit(s) => self.visit_num_lit(buf, s),
+ Expr::StrLit(s) => self.visit_str_lit(buf, s),
+ Expr::CharLit(s) => self.visit_char_lit(buf, s),
+ Expr::Var(s) => self.visit_var(buf, s),
+ Expr::Path(ref path) => self.visit_path(buf, path),
+ Expr::Array(ref elements) => self.visit_array(buf, elements)?,
+ Expr::Attr(ref obj, name) => self.visit_attr(buf, obj, name)?,
+ Expr::Index(ref obj, ref key) => self.visit_index(buf, obj, key)?,
+ Expr::Filter(name, ref args) => self.visit_filter(buf, name, args)?,
+ Expr::Unary(op, ref inner) => self.visit_unary(buf, op, inner)?,
+ Expr::BinOp(op, ref left, ref right) => self.visit_binop(buf, op, left, right)?,
+ Expr::Range(op, ref left, ref right) => {
+ self.visit_range(buf, op, left.as_deref(), right.as_deref())?
+ }
+ Expr::Group(ref inner) => self.visit_group(buf, inner)?,
+ Expr::Call(ref obj, ref args) => self.visit_call(buf, obj, args)?,
+ Expr::RustMacro(name, args) => self.visit_rust_macro(buf, name, args),
+ Expr::Try(ref expr) => self.visit_try(buf, expr.as_ref())?,
+ Expr::Tuple(ref exprs) => self.visit_tuple(buf, exprs)?,
+ })
+ }
+
+ fn visit_try(
+ &mut self,
+ buf: &mut Buffer,
+ expr: &Expr<'_>,
+ ) -> Result {
+ buf.write("::core::result::Result::map_err(");
+ self.visit_expr(buf, expr)?;
+ buf.write(", |err| ::askama::shared::Error::Custom(::core::convert::Into::into(err)))?");
+ Ok(DisplayWrap::Unwrapped)
+ }
+
+ fn visit_rust_macro(&mut self, buf: &mut Buffer, name: &str, args: &str) -> DisplayWrap {
+ buf.write(name);
+ buf.write("!(");
+ buf.write(args);
+ buf.write(")");
+
+ DisplayWrap::Unwrapped
+ }
+
+ #[cfg(not(feature = "markdown"))]
+ fn _visit_markdown_filter(
+ &mut self,
+ _buf: &mut Buffer,
+ _args: &[Expr<'_>],
+ ) -> Result {
+ Err("the `markdown` filter requires the `markdown` feature to be enabled".into())
+ }
+
+ #[cfg(feature = "markdown")]
+ fn _visit_markdown_filter(
+ &mut self,
+ buf: &mut Buffer,
+ args: &[Expr<'_>],
+ ) -> Result {
+ let (md, options) = match args {
+ [md] => (md, None),
+ [md, options] => (md, Some(options)),
+ _ => return Err("markdown filter expects no more than one option argument".into()),
+ };
+
+ buf.write(&format!(
+ "::askama::filters::markdown({}, ",
+ self.input.escaper
+ ));
+ self.visit_expr(buf, md)?;
+ match options {
+ Some(options) => {
+ buf.write(", ::core::option::Option::Some(");
+ self.visit_expr(buf, options)?;
+ buf.write(")");
+ }
+ None => buf.write(", ::core::option::Option::None"),
+ }
+ buf.write(")?");
+
+ Ok(DisplayWrap::Wrapped)
+ }
+
+ fn visit_filter(
+ &mut self,
+ buf: &mut Buffer,
+ mut name: &str,
+ args: &[Expr<'_>],
+ ) -> Result {
+ if matches!(name, "escape" | "e") {
+ self._visit_escape_filter(buf, args)?;
+ return Ok(DisplayWrap::Wrapped);
+ } else if name == "format" {
+ self._visit_format_filter(buf, args)?;
+ return Ok(DisplayWrap::Unwrapped);
+ } else if name == "fmt" {
+ self._visit_fmt_filter(buf, args)?;
+ return Ok(DisplayWrap::Unwrapped);
+ } else if name == "join" {
+ self._visit_join_filter(buf, args)?;
+ return Ok(DisplayWrap::Unwrapped);
+ } else if name == "markdown" {
+ return self._visit_markdown_filter(buf, args);
+ }
+
+ if name == "tojson" {
+ name = "json";
+ }
+
+ #[cfg(not(feature = "serde-json"))]
+ if name == "json" {
+ return Err("the `json` filter requires the `serde-json` feature to be enabled".into());
+ }
+ #[cfg(not(feature = "serde-yaml"))]
+ if name == "yaml" {
+ return Err("the `yaml` filter requires the `serde-yaml` feature to be enabled".into());
+ }
+
+ const FILTERS: [&str; 2] = ["safe", "yaml"];
+ if FILTERS.contains(&name) {
+ buf.write(&format!(
+ "::askama::filters::{}({}, ",
+ name, self.input.escaper
+ ));
+ } else if crate::BUILT_IN_FILTERS.contains(&name) {
+ buf.write(&format!("::askama::filters::{name}("));
+ } else {
+ buf.write(&format!("filters::{name}("));
+ }
+
+ self._visit_args(buf, args)?;
+ buf.write(")?");
+ Ok(match FILTERS.contains(&name) {
+ true => DisplayWrap::Wrapped,
+ false => DisplayWrap::Unwrapped,
+ })
+ }
+
+ fn _visit_escape_filter(
+ &mut self,
+ buf: &mut Buffer,
+ args: &[Expr<'_>],
+ ) -> Result<(), CompileError> {
+ if args.len() > 2 {
+ return Err("only two arguments allowed to escape filter".into());
+ }
+ let opt_escaper = match args.get(1) {
+ Some(Expr::StrLit(name)) => Some(*name),
+ Some(_) => return Err("invalid escaper type for escape filter".into()),
+ None => None,
+ };
+ let escaper = match opt_escaper {
+ Some(name) => self
+ .input
+ .config
+ .escapers
+ .iter()
+ .find_map(|(escapers, escaper)| escapers.contains(name).then_some(escaper))
+ .ok_or_else(|| CompileError::from("invalid escaper for escape filter"))?,
+ None => self.input.escaper,
+ };
+ buf.write("::askama::filters::escape(");
+ buf.write(escaper);
+ buf.write(", ");
+ self._visit_args(buf, &args[..1])?;
+ buf.write(")?");
+ Ok(())
+ }
+
+ fn _visit_format_filter(
+ &mut self,
+ buf: &mut Buffer,
+ args: &[Expr<'_>],
+ ) -> Result<(), CompileError> {
+ buf.write("format!(");
+ if let Some(Expr::StrLit(v)) = args.first() {
+ self.visit_str_lit(buf, v);
+ if args.len() > 1 {
+ buf.write(", ");
+ }
+ } else {
+ return Err("invalid expression type for format filter".into());
+ }
+ self._visit_args(buf, &args[1..])?;
+ buf.write(")");
+ Ok(())
+ }
+
+ fn _visit_fmt_filter(
+ &mut self,
+ buf: &mut Buffer,
+ args: &[Expr<'_>],
+ ) -> Result<(), CompileError> {
+ buf.write("format!(");
+ if let Some(Expr::StrLit(v)) = args.get(1) {
+ self.visit_str_lit(buf, v);
+ buf.write(", ");
+ } else {
+ return Err("invalid expression type for fmt filter".into());
+ }
+ self._visit_args(buf, &args[0..1])?;
+ if args.len() > 2 {
+ return Err("only two arguments allowed to fmt filter".into());
+ }
+ buf.write(")");
+ Ok(())
+ }
+
+ // Force type coercion on first argument to `join` filter (see #39).
+ fn _visit_join_filter(
+ &mut self,
+ buf: &mut Buffer,
+ args: &[Expr<'_>],
+ ) -> Result<(), CompileError> {
+ buf.write("::askama::filters::join((&");
+ for (i, arg) in args.iter().enumerate() {
+ if i > 0 {
+ buf.write(", &");
+ }
+ self.visit_expr(buf, arg)?;
+ if i == 0 {
+ buf.write(").into_iter()");
+ }
+ }
+ buf.write(")?");
+ Ok(())
+ }
+
+ fn _visit_args(&mut self, buf: &mut Buffer, args: &[Expr<'_>]) -> Result<(), CompileError> {
+ if args.is_empty() {
+ return Ok(());
+ }
+
+ for (i, arg) in args.iter().enumerate() {
+ if i > 0 {
+ buf.write(", ");
+ }
+
+ let borrow = !arg.is_copyable();
+ if borrow {
+ buf.write("&(");
+ }
+
+ match arg {
+ Expr::Call(left, _) if !matches!(left.as_ref(), Expr::Path(_)) => {
+ buf.writeln("{")?;
+ self.visit_expr(buf, arg)?;
+ buf.writeln("}")?;
+ }
+ _ => {
+ self.visit_expr(buf, arg)?;
+ }
+ }
+
+ if borrow {
+ buf.write(")");
+ }
+ }
+ Ok(())
+ }
+
+ fn visit_attr(
+ &mut self,
+ buf: &mut Buffer,
+ obj: &Expr<'_>,
+ attr: &str,
+ ) -> Result {
+ if let Expr::Var(name) = *obj {
+ if name == "loop" {
+ if attr == "index" {
+ buf.write("(_loop_item.index + 1)");
+ return Ok(DisplayWrap::Unwrapped);
+ } else if attr == "index0" {
+ buf.write("_loop_item.index");
+ return Ok(DisplayWrap::Unwrapped);
+ } else if attr == "first" {
+ buf.write("_loop_item.first");
+ return Ok(DisplayWrap::Unwrapped);
+ } else if attr == "last" {
+ buf.write("_loop_item.last");
+ return Ok(DisplayWrap::Unwrapped);
+ } else {
+ return Err("unknown loop variable".into());
+ }
+ }
+ }
+ self.visit_expr(buf, obj)?;
+ buf.write(&format!(".{}", normalize_identifier(attr)));
+ Ok(DisplayWrap::Unwrapped)
+ }
+
+ fn visit_index(
+ &mut self,
+ buf: &mut Buffer,
+ obj: &Expr<'_>,
+ key: &Expr<'_>,
+ ) -> Result {
+ buf.write("&");
+ self.visit_expr(buf, obj)?;
+ buf.write("[");
+ self.visit_expr(buf, key)?;
+ buf.write("]");
+ Ok(DisplayWrap::Unwrapped)
+ }
+
+ fn visit_call(
+ &mut self,
+ buf: &mut Buffer,
+ left: &Expr<'_>,
+ args: &[Expr<'_>],
+ ) -> Result {
+ match left {
+ Expr::Attr(left, method) if **left == Expr::Var("loop") => match *method {
+ "cycle" => match args {
+ [arg] => {
+ if matches!(arg, Expr::Array(arr) if arr.is_empty()) {
+ return Err("loop.cycle(…) cannot use an empty array".into());
+ }
+ buf.write("({");
+ buf.write("let _cycle = &(");
+ self.visit_expr(buf, arg)?;
+ buf.writeln(");")?;
+ buf.writeln("let _len = _cycle.len();")?;
+ buf.writeln("if _len == 0 {")?;
+ buf.writeln("return ::core::result::Result::Err(::askama::Error::Fmt(::core::fmt::Error));")?;
+ buf.writeln("}")?;
+ buf.writeln("_cycle[_loop_item.index % _len]")?;
+ buf.writeln("})")?;
+ }
+ _ => return Err("loop.cycle(…) expects exactly one argument".into()),
+ },
+ s => return Err(format!("unknown loop method: {s:?}").into()),
+ },
+ left => {
+ match left {
+ Expr::Var(name) => match self.locals.resolve(name) {
+ Some(resolved) => buf.write(&resolved),
+ None => buf.write(&format!("(&self.{})", normalize_identifier(name))),
+ },
+ left => {
+ self.visit_expr(buf, left)?;
+ }
+ }
+
+ buf.write("(");
+ self._visit_args(buf, args)?;
+ buf.write(")");
+ }
+ }
+ Ok(DisplayWrap::Unwrapped)
+ }
+
+ fn visit_unary(
+ &mut self,
+ buf: &mut Buffer,
+ op: &str,
+ inner: &Expr<'_>,
+ ) -> Result {
+ buf.write(op);
+ self.visit_expr(buf, inner)?;
+ Ok(DisplayWrap::Unwrapped)
+ }
+
+ fn visit_range(
+ &mut self,
+ buf: &mut Buffer,
+ op: &str,
+ left: Option<&Expr<'_>>,
+ right: Option<&Expr<'_>>,
+ ) -> Result {
+ if let Some(left) = left {
+ self.visit_expr(buf, left)?;
+ }
+ buf.write(op);
+ if let Some(right) = right {
+ self.visit_expr(buf, right)?;
+ }
+ Ok(DisplayWrap::Unwrapped)
+ }
+
+ fn visit_binop(
+ &mut self,
+ buf: &mut Buffer,
+ op: &str,
+ left: &Expr<'_>,
+ right: &Expr<'_>,
+ ) -> Result {
+ self.visit_expr(buf, left)?;
+ buf.write(&format!(" {op} "));
+ self.visit_expr(buf, right)?;
+ Ok(DisplayWrap::Unwrapped)
+ }
+
+ fn visit_group(
+ &mut self,
+ buf: &mut Buffer,
+ inner: &Expr<'_>,
+ ) -> Result {
+ buf.write("(");
+ self.visit_expr(buf, inner)?;
+ buf.write(")");
+ Ok(DisplayWrap::Unwrapped)
+ }
+
+ fn visit_tuple(
+ &mut self,
+ buf: &mut Buffer,
+ exprs: &[Expr<'_>],
+ ) -> Result {
+ buf.write("(");
+ for (index, expr) in exprs.iter().enumerate() {
+ if index > 0 {
+ buf.write(" ");
+ }
+ self.visit_expr(buf, expr)?;
+ buf.write(",");
+ }
+ buf.write(")");
+ Ok(DisplayWrap::Unwrapped)
+ }
+
+ fn visit_array(
+ &mut self,
+ buf: &mut Buffer,
+ elements: &[Expr<'_>],
+ ) -> Result {
+ buf.write("[");
+ for (i, el) in elements.iter().enumerate() {
+ if i > 0 {
+ buf.write(", ");
+ }
+ self.visit_expr(buf, el)?;
+ }
+ buf.write("]");
+ Ok(DisplayWrap::Unwrapped)
+ }
+
+ fn visit_path(&mut self, buf: &mut Buffer, path: &[&str]) -> DisplayWrap {
+ for (i, part) in path.iter().enumerate() {
+ if i > 0 {
+ buf.write("::");
+ }
+ buf.write(part);
+ }
+ DisplayWrap::Unwrapped
+ }
+
+ fn visit_var(&mut self, buf: &mut Buffer, s: &str) -> DisplayWrap {
+ if s == "self" {
+ buf.write(s);
+ return DisplayWrap::Unwrapped;
+ }
+
+ buf.write(normalize_identifier(&self.locals.resolve_or_self(s)));
+ DisplayWrap::Unwrapped
+ }
+
+ fn visit_bool_lit(&mut self, buf: &mut Buffer, s: &str) -> DisplayWrap {
+ buf.write(s);
+ DisplayWrap::Unwrapped
+ }
+
+ fn visit_str_lit(&mut self, buf: &mut Buffer, s: &str) -> DisplayWrap {
+ buf.write(&format!("\"{s}\""));
+ DisplayWrap::Unwrapped
+ }
+
+ fn visit_char_lit(&mut self, buf: &mut Buffer, s: &str) -> DisplayWrap {
+ buf.write(&format!("'{s}'"));
+ DisplayWrap::Unwrapped
+ }
+
+ fn visit_num_lit(&mut self, buf: &mut Buffer, s: &str) -> DisplayWrap {
+ buf.write(s);
+ DisplayWrap::Unwrapped
+ }
+
+ fn visit_target(
+ &mut self,
+ buf: &mut Buffer,
+ initialized: bool,
+ first_level: bool,
+ target: &Target<'a>,
+ ) {
+ match target {
+ Target::Name("_") => {
+ buf.write("_");
+ }
+ Target::Name(name) => {
+ let name = normalize_identifier(name);
+ match initialized {
+ true => self.locals.insert(name, LocalMeta::initialized()),
+ false => self.locals.insert_with_default(name),
+ }
+ buf.write(name);
+ }
+ Target::Tuple(path, targets) => {
+ buf.write(&path.join("::"));
+ buf.write("(");
+ for target in targets {
+ self.visit_target(buf, initialized, false, target);
+ buf.write(",");
+ }
+ buf.write(")");
+ }
+ Target::Struct(path, targets) => {
+ buf.write(&path.join("::"));
+ buf.write(" { ");
+ for (name, target) in targets {
+ buf.write(normalize_identifier(name));
+ buf.write(": ");
+ self.visit_target(buf, initialized, false, target);
+ buf.write(",");
+ }
+ buf.write(" }");
+ }
+ Target::Path(path) => {
+ self.visit_path(buf, path);
+ }
+ Target::StrLit(s) => {
+ if first_level {
+ buf.write("&");
+ }
+ self.visit_str_lit(buf, s);
+ }
+ Target::NumLit(s) => {
+ if first_level {
+ buf.write("&");
+ }
+ self.visit_num_lit(buf, s);
+ }
+ Target::CharLit(s) => {
+ if first_level {
+ buf.write("&");
+ }
+ self.visit_char_lit(buf, s);
+ }
+ Target::BoolLit(s) => {
+ if first_level {
+ buf.write("&");
+ }
+ buf.write(s);
+ }
+ }
+ }
+
+ /* Helper methods for dealing with whitespace nodes */
+
+ // Combines `flush_ws()` and `prepare_ws()` to handle both trailing whitespace from the
+ // preceding literal and leading whitespace from the succeeding literal.
+ fn handle_ws(&mut self, ws: Ws) {
+ self.flush_ws(ws);
+ self.prepare_ws(ws);
+ }
+
+ fn should_trim_ws(&self, ws: Option) -> WhitespaceHandling {
+ match ws {
+ Some(Whitespace::Suppress) => WhitespaceHandling::Suppress,
+ Some(Whitespace::Preserve) => WhitespaceHandling::Preserve,
+ Some(Whitespace::Minimize) => WhitespaceHandling::Minimize,
+ None => self.whitespace,
+ }
+ }
+
+ // If the previous literal left some trailing whitespace in `next_ws` and the
+ // prefix whitespace suppressor from the given argument, flush that whitespace.
+ // In either case, `next_ws` is reset to `None` (no trailing whitespace).
+ fn flush_ws(&mut self, ws: Ws) {
+ if self.next_ws.is_none() {
+ return;
+ }
+
+ // If `whitespace` is set to `suppress`, we keep the whitespace characters only if there is
+ // a `+` character.
+ match self.should_trim_ws(ws.0) {
+ WhitespaceHandling::Preserve => {
+ let val = self.next_ws.unwrap();
+ if !val.is_empty() {
+ self.buf_writable.push(Writable::Lit(val));
+ }
+ }
+ WhitespaceHandling::Minimize => {
+ let val = self.next_ws.unwrap();
+ if !val.is_empty() {
+ self.buf_writable
+ .push(Writable::Lit(match val.contains('\n') {
+ true => "\n",
+ false => " ",
+ }));
+ }
+ }
+ WhitespaceHandling::Suppress => {}
+ }
+ self.next_ws = None;
+ }
+
+ // Sets `skip_ws` to match the suffix whitespace suppressor from the given
+ // argument, to determine whether to suppress leading whitespace from the
+ // next literal.
+ fn prepare_ws(&mut self, ws: Ws) {
+ self.skip_ws = self.should_trim_ws(ws.1);
+ }
+}
+
+struct Buffer {
+ // The buffer to generate the code into
+ buf: String,
+ // The current level of indentation (in spaces)
+ indent: u8,
+ // Whether the output buffer is currently at the start of a line
+ start: bool,
+}
+
+impl Buffer {
+ fn new(indent: u8) -> Self {
+ Self {
+ buf: String::new(),
+ indent,
+ start: true,
+ }
+ }
+
+ fn writeln(&mut self, s: &str) -> Result<(), CompileError> {
+ if s == "}" {
+ self.dedent()?;
+ }
+ if !s.is_empty() {
+ self.write(s);
+ }
+ self.buf.push('\n');
+ if s.ends_with('{') {
+ self.indent();
+ }
+ self.start = true;
+ Ok(())
+ }
+
+ fn write(&mut self, s: &str) {
+ if self.start {
+ for _ in 0..(self.indent * 4) {
+ self.buf.push(' ');
+ }
+ self.start = false;
+ }
+ self.buf.push_str(s);
+ }
+
+ fn indent(&mut self) {
+ self.indent += 1;
+ }
+
+ fn dedent(&mut self) -> Result<(), CompileError> {
+ if self.indent == 0 {
+ return Err("dedent() called while indentation == 0".into());
+ }
+ self.indent -= 1;
+ Ok(())
+ }
+}
+
+#[derive(Clone, Default)]
+struct LocalMeta {
+ refs: Option,
+ initialized: bool,
+}
+
+impl LocalMeta {
+ fn initialized() -> Self {
+ Self {
+ refs: None,
+ initialized: true,
+ }
+ }
+
+ fn with_ref(refs: String) -> Self {
+ Self {
+ refs: Some(refs),
+ initialized: true,
+ }
+ }
+}
+
+// type SetChain<'a, T> = MapChain<'a, T, ()>;
+
+#[derive(Debug)]
+struct MapChain<'a, K, V>
+where
+ K: cmp::Eq + hash::Hash,
+{
+ parent: Option<&'a MapChain<'a, K, V>>,
+ scopes: Vec>,
+}
+
+impl<'a, K: 'a, V: 'a> MapChain<'a, K, V>
+where
+ K: cmp::Eq + hash::Hash,
+{
+ fn new() -> MapChain<'a, K, V> {
+ MapChain {
+ parent: None,
+ scopes: vec![HashMap::new()],
+ }
+ }
+
+ fn with_parent<'p>(parent: &'p MapChain<'_, K, V>) -> MapChain<'p, K, V> {
+ MapChain {
+ parent: Some(parent),
+ scopes: vec![HashMap::new()],
+ }
+ }
+
+ /// Iterates the scopes in reverse and returns `Some(LocalMeta)`
+ /// from the first scope where `key` exists.
+ fn get(&self, key: &K) -> Option<&V> {
+ let mut scopes = self.scopes.iter().rev();
+ scopes
+ .find_map(|set| set.get(key))
+ .or_else(|| self.parent.and_then(|set| set.get(key)))
+ }
+
+ fn is_current_empty(&self) -> bool {
+ self.scopes.last().unwrap().is_empty()
+ }
+
+ fn insert(&mut self, key: K, val: V) {
+ self.scopes.last_mut().unwrap().insert(key, val);
+
+ // Note that if `insert` returns `Some` then it implies
+ // an identifier is reused. For e.g. `{% macro f(a, a) %}`
+ // and `{% let (a, a) = ... %}` then this results in a
+ // generated template, which when compiled fails with the
+ // compile error "identifier `a` used more than once".
+ }
+
+ fn insert_with_default(&mut self, key: K)
+ where
+ V: Default,
+ {
+ self.insert(key, V::default());
+ }
+
+ fn push(&mut self) {
+ self.scopes.push(HashMap::new());
+ }
+
+ fn pop(&mut self) {
+ self.scopes.pop().unwrap();
+ assert!(!self.scopes.is_empty());
+ }
+}
+
+impl MapChain<'_, &str, LocalMeta> {
+ fn resolve(&self, name: &str) -> Option {
+ let name = normalize_identifier(name);
+ self.get(&name).map(|meta| match &meta.refs {
+ Some(expr) => expr.clone(),
+ None => name.to_string(),
+ })
+ }
+
+ fn resolve_or_self(&self, name: &str) -> String {
+ let name = normalize_identifier(name);
+ self.resolve(name).unwrap_or_else(|| format!("self.{name}"))
+ }
+}
+
+fn median(sizes: &mut [usize]) -> usize {
+ sizes.sort_unstable();
+ if sizes.len() % 2 == 1 {
+ sizes[sizes.len() / 2]
+ } else {
+ (sizes[sizes.len() / 2 - 1] + sizes[sizes.len() / 2]) / 2
+ }
+}
+
+#[derive(Clone, Copy, PartialEq)]
+enum AstLevel {
+ Top,
+ Block,
+ Nested,
+}
+
+#[derive(Clone, Copy)]
+enum DisplayWrap {
+ Wrapped,
+ Unwrapped,
+}
+
+#[derive(Debug)]
+enum Writable<'a> {
+ Lit(&'a str),
+ Expr(&'a Expr<'a>),
+}
+
+// Identifiers to be replaced with raw identifiers, so as to avoid
+// collisions between template syntax and Rust's syntax. In particular
+// [Rust keywords](https://doc.rust-lang.org/reference/keywords.html)
+// should be replaced, since they're not reserved words in Askama
+// syntax but have a high probability of causing problems in the
+// generated code.
+//
+// This list excludes the Rust keywords *self*, *Self*, and *super*
+// because they are not allowed to be raw identifiers, and *loop*
+// because it's used something like a keyword in the template
+// language.
+static USE_RAW: [(&str, &str); 47] = [
+ ("as", "r#as"),
+ ("break", "r#break"),
+ ("const", "r#const"),
+ ("continue", "r#continue"),
+ ("crate", "r#crate"),
+ ("else", "r#else"),
+ ("enum", "r#enum"),
+ ("extern", "r#extern"),
+ ("false", "r#false"),
+ ("fn", "r#fn"),
+ ("for", "r#for"),
+ ("if", "r#if"),
+ ("impl", "r#impl"),
+ ("in", "r#in"),
+ ("let", "r#let"),
+ ("match", "r#match"),
+ ("mod", "r#mod"),
+ ("move", "r#move"),
+ ("mut", "r#mut"),
+ ("pub", "r#pub"),
+ ("ref", "r#ref"),
+ ("return", "r#return"),
+ ("static", "r#static"),
+ ("struct", "r#struct"),
+ ("trait", "r#trait"),
+ ("true", "r#true"),
+ ("type", "r#type"),
+ ("unsafe", "r#unsafe"),
+ ("use", "r#use"),
+ ("where", "r#where"),
+ ("while", "r#while"),
+ ("async", "r#async"),
+ ("await", "r#await"),
+ ("dyn", "r#dyn"),
+ ("abstract", "r#abstract"),
+ ("become", "r#become"),
+ ("box", "r#box"),
+ ("do", "r#do"),
+ ("final", "r#final"),
+ ("macro", "r#macro"),
+ ("override", "r#override"),
+ ("priv", "r#priv"),
+ ("typeof", "r#typeof"),
+ ("unsized", "r#unsized"),
+ ("virtual", "r#virtual"),
+ ("yield", "r#yield"),
+ ("try", "r#try"),
+];
+
+fn normalize_identifier(ident: &str) -> &str {
+ if let Some(word) = USE_RAW.iter().find(|x| x.0 == ident) {
+ word.1
+ } else {
+ ident
+ }
+}
diff --git a/third_party/rust/askama_derive/src/heritage.rs b/third_party/rust/askama_derive/src/heritage.rs
new file mode 100644
index 000000000000..dbb2b1fa0091
--- /dev/null
+++ b/third_party/rust/askama_derive/src/heritage.rs
@@ -0,0 +1,126 @@
+use std::collections::HashMap;
+use std::path::{Path, PathBuf};
+
+use crate::config::Config;
+use crate::parser::{Loop, Macro, Node};
+use crate::CompileError;
+
+pub(crate) struct Heritage<'a> {
+ pub(crate) root: &'a Context<'a>,
+ pub(crate) blocks: BlockAncestry<'a>,
+}
+
+impl Heritage<'_> {
+ pub(crate) fn new<'n>(
+ mut ctx: &'n Context<'n>,
+ contexts: &'n HashMap<&'n Path, Context<'n>>,
+ ) -> Heritage<'n> {
+ let mut blocks: BlockAncestry<'n> = ctx
+ .blocks
+ .iter()
+ .map(|(name, def)| (*name, vec![(ctx, *def)]))
+ .collect();
+
+ while let Some(ref path) = ctx.extends {
+ ctx = &contexts[path.as_path()];
+ for (name, def) in &ctx.blocks {
+ blocks.entry(name).or_insert_with(Vec::new).push((ctx, def));
+ }
+ }
+
+ Heritage { root: ctx, blocks }
+ }
+}
+
+type BlockAncestry<'a> = HashMap<&'a str, Vec<(&'a Context<'a>, &'a Node<'a>)>>;
+
+pub(crate) struct Context<'a> {
+ pub(crate) nodes: &'a [Node<'a>],
+ pub(crate) extends: Option,
+ pub(crate) blocks: HashMap<&'a str, &'a Node<'a>>,
+ pub(crate) macros: HashMap<&'a str, &'a Macro<'a>>,
+ pub(crate) imports: HashMap<&'a str, PathBuf>,
+}
+
+impl Context<'_> {
+ pub(crate) fn new<'n>(
+ config: &Config<'_>,
+ path: &Path,
+ nodes: &'n [Node<'n>],
+ ) -> Result, CompileError> {
+ let mut extends = None;
+ let mut blocks = Vec::new();
+ let mut macros = HashMap::new();
+ let mut imports = HashMap::new();
+ let mut nested = vec![nodes];
+ let mut top = true;
+
+ while let Some(nodes) = nested.pop() {
+ for n in nodes {
+ match n {
+ Node::Extends(extends_path) if top => match extends {
+ Some(_) => return Err("multiple extend blocks found".into()),
+ None => {
+ extends = Some(config.find_template(extends_path, Some(path))?);
+ }
+ },
+ Node::Macro(name, m) if top => {
+ macros.insert(*name, m);
+ }
+ Node::Import(_, import_path, scope) if top => {
+ let path = config.find_template(import_path, Some(path))?;
+ imports.insert(*scope, path);
+ }
+ Node::Extends(_) | Node::Macro(_, _) | Node::Import(_, _, _) if !top => {
+ return Err(
+ "extends, macro or import blocks not allowed below top level".into(),
+ );
+ }
+ def @ Node::BlockDef(_, _, _, _) => {
+ blocks.push(def);
+ if let Node::BlockDef(_, _, nodes, _) = def {
+ nested.push(nodes);
+ }
+ }
+ Node::Cond(branches, _) => {
+ for (_, _, nodes) in branches {
+ nested.push(nodes);
+ }
+ }
+ Node::Loop(Loop {
+ body, else_block, ..
+ }) => {
+ nested.push(body);
+ nested.push(else_block);
+ }
+ Node::Match(_, _, arms, _) => {
+ for (_, _, arm) in arms {
+ nested.push(arm);
+ }
+ }
+ _ => {}
+ }
+ }
+ top = false;
+ }
+
+ let blocks: HashMap<_, _> = blocks
+ .iter()
+ .map(|def| {
+ if let Node::BlockDef(_, name, _, _) = def {
+ (*name, *def)
+ } else {
+ unreachable!()
+ }
+ })
+ .collect();
+
+ Ok(Context {
+ nodes,
+ extends,
+ blocks,
+ macros,
+ imports,
+ })
+ }
+}
diff --git a/third_party/rust/askama_derive/src/input.rs b/third_party/rust/askama_derive/src/input.rs
new file mode 100644
index 000000000000..47d51bd891dc
--- /dev/null
+++ b/third_party/rust/askama_derive/src/input.rs
@@ -0,0 +1,231 @@
+use crate::config::{Config, Syntax};
+use crate::generator::TemplateArgs;
+use crate::CompileError;
+
+use std::path::{Path, PathBuf};
+use std::str::FromStr;
+
+use mime::Mime;
+
+pub(crate) struct TemplateInput<'a> {
+ pub(crate) ast: &'a syn::DeriveInput,
+ pub(crate) config: &'a Config<'a>,
+ pub(crate) syntax: &'a Syntax<'a>,
+ pub(crate) source: Source,
+ pub(crate) print: Print,
+ pub(crate) escaper: &'a str,
+ pub(crate) ext: Option,
+ pub(crate) mime_type: String,
+ pub(crate) path: PathBuf,
+}
+
+impl TemplateInput<'_> {
+ /// Extract the template metadata from the `DeriveInput` structure. This
+ /// mostly recovers the data for the `TemplateInput` fields from the
+ /// `template()` attribute list fields.
+ pub(crate) fn new<'n>(
+ ast: &'n syn::DeriveInput,
+ config: &'n Config<'_>,
+ args: TemplateArgs,
+ ) -> Result, CompileError> {
+ let TemplateArgs {
+ source,
+ print,
+ escaping,
+ ext,
+ syntax,
+ ..
+ } = args;
+
+ // Validate the `source` and `ext` value together, since they are
+ // related. In case `source` was used instead of `path`, the value
+ // of `ext` is merged into a synthetic `path` value here.
+ let source = source.expect("template path or source not found in attributes");
+ let path = match (&source, &ext) {
+ (Source::Path(path), _) => config.find_template(path, None)?,
+ (&Source::Source(_), Some(ext)) => PathBuf::from(format!("{}.{}", ast.ident, ext)),
+ (&Source::Source(_), None) => {
+ return Err("must include 'ext' attribute when using 'source' attribute".into())
+ }
+ };
+
+ // Validate syntax
+ let syntax = syntax.map_or_else(
+ || Ok(config.syntaxes.get(config.default_syntax).unwrap()),
+ |s| {
+ config
+ .syntaxes
+ .get(&s)
+ .ok_or_else(|| CompileError::from(format!("attribute syntax {s} not exist")))
+ },
+ )?;
+
+ // Match extension against defined output formats
+
+ let escaping = escaping.unwrap_or_else(|| {
+ path.extension()
+ .map(|s| s.to_str().unwrap())
+ .unwrap_or("")
+ .to_string()
+ });
+
+ let mut escaper = None;
+ for (extensions, path) in &config.escapers {
+ if extensions.contains(&escaping) {
+ escaper = Some(path);
+ break;
+ }
+ }
+
+ let escaper = escaper.ok_or_else(|| {
+ CompileError::from(format!("no escaper defined for extension '{escaping}'"))
+ })?;
+
+ let mime_type =
+ extension_to_mime_type(ext_default_to_path(ext.as_deref(), &path).unwrap_or("txt"))
+ .to_string();
+
+ Ok(TemplateInput {
+ ast,
+ config,
+ syntax,
+ source,
+ print,
+ escaper,
+ ext,
+ mime_type,
+ path,
+ })
+ }
+
+ #[inline]
+ pub(crate) fn extension(&self) -> Option<&str> {
+ ext_default_to_path(self.ext.as_deref(), &self.path)
+ }
+}
+
+#[inline]
+fn ext_default_to_path<'a>(ext: Option<&'a str>, path: &'a Path) -> Option<&'a str> {
+ ext.or_else(|| extension(path))
+}
+
+fn extension(path: &Path) -> Option<&str> {
+ let ext = path.extension().map(|s| s.to_str().unwrap())?;
+
+ const JINJA_EXTENSIONS: [&str; 3] = ["j2", "jinja", "jinja2"];
+ if JINJA_EXTENSIONS.contains(&ext) {
+ Path::new(path.file_stem().unwrap())
+ .extension()
+ .map(|s| s.to_str().unwrap())
+ .or(Some(ext))
+ } else {
+ Some(ext)
+ }
+}
+
+pub(crate) enum Source {
+ Path(String),
+ Source(String),
+}
+
+#[derive(PartialEq)]
+pub(crate) enum Print {
+ All,
+ Ast,
+ Code,
+ None,
+}
+
+impl FromStr for Print {
+ type Err = CompileError;
+
+ fn from_str(s: &str) -> Result {
+ use self::Print::*;
+ Ok(match s {
+ "all" => All,
+ "ast" => Ast,
+ "code" => Code,
+ "none" => None,
+ v => return Err(format!("invalid value for print option: {v}",).into()),
+ })
+ }
+}
+
+impl Default for Print {
+ fn default() -> Self {
+ Self::None
+ }
+}
+
+pub(crate) fn extension_to_mime_type(ext: &str) -> Mime {
+ let basic_type = mime_guess::from_ext(ext).first_or_octet_stream();
+ for (simple, utf_8) in &TEXT_TYPES {
+ if &basic_type == simple {
+ return utf_8.clone();
+ }
+ }
+ basic_type
+}
+
+const TEXT_TYPES: [(Mime, Mime); 6] = [
+ (mime::TEXT_PLAIN, mime::TEXT_PLAIN_UTF_8),
+ (mime::TEXT_HTML, mime::TEXT_HTML_UTF_8),
+ (mime::TEXT_CSS, mime::TEXT_CSS_UTF_8),
+ (mime::TEXT_CSV, mime::TEXT_CSV_UTF_8),
+ (
+ mime::TEXT_TAB_SEPARATED_VALUES,
+ mime::TEXT_TAB_SEPARATED_VALUES_UTF_8,
+ ),
+ (
+ mime::APPLICATION_JAVASCRIPT,
+ mime::APPLICATION_JAVASCRIPT_UTF_8,
+ ),
+];
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_ext() {
+ assert_eq!(extension(Path::new("foo-bar.txt")), Some("txt"));
+ assert_eq!(extension(Path::new("foo-bar.html")), Some("html"));
+ assert_eq!(extension(Path::new("foo-bar.unknown")), Some("unknown"));
+
+ assert_eq!(extension(Path::new("foo/bar/baz.txt")), Some("txt"));
+ assert_eq!(extension(Path::new("foo/bar/baz.html")), Some("html"));
+ assert_eq!(extension(Path::new("foo/bar/baz.unknown")), Some("unknown"));
+ }
+
+ #[test]
+ fn test_double_ext() {
+ assert_eq!(extension(Path::new("foo-bar.html.txt")), Some("txt"));
+ assert_eq!(extension(Path::new("foo-bar.txt.html")), Some("html"));
+ assert_eq!(extension(Path::new("foo-bar.txt.unknown")), Some("unknown"));
+
+ assert_eq!(extension(Path::new("foo/bar/baz.html.txt")), Some("txt"));
+ assert_eq!(extension(Path::new("foo/bar/baz.txt.html")), Some("html"));
+ assert_eq!(
+ extension(Path::new("foo/bar/baz.txt.unknown")),
+ Some("unknown")
+ );
+ }
+
+ #[test]
+ fn test_skip_jinja_ext() {
+ assert_eq!(extension(Path::new("foo-bar.html.j2")), Some("html"));
+ assert_eq!(extension(Path::new("foo-bar.html.jinja")), Some("html"));
+ assert_eq!(extension(Path::new("foo-bar.html.jinja2")), Some("html"));
+
+ assert_eq!(extension(Path::new("foo/bar/baz.txt.j2")), Some("txt"));
+ assert_eq!(extension(Path::new("foo/bar/baz.txt.jinja")), Some("txt"));
+ assert_eq!(extension(Path::new("foo/bar/baz.txt.jinja2")), Some("txt"));
+ }
+
+ #[test]
+ fn test_only_jinja_ext() {
+ assert_eq!(extension(Path::new("foo-bar.j2")), Some("j2"));
+ assert_eq!(extension(Path::new("foo-bar.jinja")), Some("jinja"));
+ assert_eq!(extension(Path::new("foo-bar.jinja2")), Some("jinja2"));
+ }
+}
diff --git a/third_party/rust/askama_derive/src/lib.rs b/third_party/rust/askama_derive/src/lib.rs
new file mode 100644
index 000000000000..2acf58380b04
--- /dev/null
+++ b/third_party/rust/askama_derive/src/lib.rs
@@ -0,0 +1,100 @@
+#![forbid(unsafe_code)]
+#![deny(elided_lifetimes_in_paths)]
+#![deny(unreachable_pub)]
+
+use std::borrow::Cow;
+use std::fmt;
+
+use proc_macro::TokenStream;
+use proc_macro2::Span;
+
+mod config;
+mod generator;
+mod heritage;
+mod input;
+mod parser;
+
+#[proc_macro_derive(Template, attributes(template))]
+pub fn derive_template(input: TokenStream) -> TokenStream {
+ generator::derive_template(input)
+}
+
+#[derive(Debug, Clone)]
+struct CompileError {
+ msg: Cow<'static, str>,
+ span: Span,
+}
+
+impl CompileError {
+ fn new>>(s: S, span: Span) -> Self {
+ Self {
+ msg: s.into(),
+ span,
+ }
+ }
+
+ fn into_compile_error(self) -> TokenStream {
+ syn::Error::new(self.span, self.msg)
+ .to_compile_error()
+ .into()
+ }
+}
+
+impl std::error::Error for CompileError {}
+
+impl fmt::Display for CompileError {
+ #[inline]
+ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt.write_str(&self.msg)
+ }
+}
+
+impl From<&'static str> for CompileError {
+ #[inline]
+ fn from(s: &'static str) -> Self {
+ Self::new(s, Span::call_site())
+ }
+}
+
+impl From for CompileError {
+ #[inline]
+ fn from(s: String) -> Self {
+ Self::new(s, Span::call_site())
+ }
+}
+
+// This is used by the code generator to decide whether a named filter is part of
+// Askama or should refer to a local `filters` module. It should contain all the
+// filters shipped with Askama, even the optional ones (since optional inclusion
+// in the const vector based on features seems impossible right now).
+const BUILT_IN_FILTERS: &[&str] = &[
+ "abs",
+ "capitalize",
+ "center",
+ "e",
+ "escape",
+ "filesizeformat",
+ "fmt",
+ "format",
+ "indent",
+ "into_f64",
+ "into_isize",
+ "join",
+ "linebreaks",
+ "linebreaksbr",
+ "paragraphbreaks",
+ "lower",
+ "lowercase",
+ "safe",
+ "trim",
+ "truncate",
+ "upper",
+ "uppercase",
+ "urlencode",
+ "urlencode_strict",
+ "wordcount",
+ // optional features, reserve the names anyway:
+ "json",
+ "markdown",
+ "yaml",
+];
diff --git a/third_party/rust/askama_derive/src/parser/expr.rs b/third_party/rust/askama_derive/src/parser/expr.rs
new file mode 100644
index 000000000000..fabaa34cc44e
--- /dev/null
+++ b/third_party/rust/askama_derive/src/parser/expr.rs
@@ -0,0 +1,346 @@
+use std::str;
+
+use nom::branch::alt;
+use nom::bytes::complete::{tag, take_till};
+use nom::character::complete::char;
+use nom::combinator::{cut, map, not, opt, peek, recognize};
+use nom::multi::{fold_many0, many0, separated_list0, separated_list1};
+use nom::sequence::{delimited, pair, preceded, terminated, tuple};
+use nom::IResult;
+
+use super::{
+ bool_lit, char_lit, identifier, nested_parenthesis, not_ws, num_lit, path, str_lit, ws,
+};
+
+#[derive(Debug, PartialEq)]
+pub(crate) enum Expr<'a> {
+ BoolLit(&'a str),
+ NumLit(&'a str),
+ StrLit(&'a str),
+ CharLit(&'a str),
+ Var(&'a str),
+ Path(Vec<&'a str>),
+ Array(Vec>),
+ Attr(Box>, &'a str),
+ Index(Box>, Box>),
+ Filter(&'a str, Vec>),
+ Unary(&'a str, Box>),
+ BinOp(&'a str, Box>, Box>),
+ Range(&'a str, Option>>, Option>>),
+ Group(Box>),
+ Tuple(Vec>),
+ Call(Box>, Vec>),
+ RustMacro(&'a str, &'a str),
+ Try(Box>),
+}
+
+impl Expr<'_> {
+ pub(super) fn parse(i: &str) -> IResult<&str, Expr<'_>> {
+ expr_any(i)
+ }
+
+ pub(super) fn parse_arguments(i: &str) -> IResult<&str, Vec>> {
+ arguments(i)
+ }
+
+ /// Returns `true` if enough assumptions can be made,
+ /// to determine that `self` is copyable.
+ pub(crate) fn is_copyable(&self) -> bool {
+ self.is_copyable_within_op(false)
+ }
+
+ fn is_copyable_within_op(&self, within_op: bool) -> bool {
+ use Expr::*;
+ match self {
+ BoolLit(_) | NumLit(_) | StrLit(_) | CharLit(_) => true,
+ Unary(.., expr) => expr.is_copyable_within_op(true),
+ BinOp(_, lhs, rhs) => {
+ lhs.is_copyable_within_op(true) && rhs.is_copyable_within_op(true)
+ }
+ Range(..) => true,
+ // The result of a call likely doesn't need to be borrowed,
+ // as in that case the call is more likely to return a
+ // reference in the first place then.
+ Call(..) | Path(..) => true,
+ // If the `expr` is within a `Unary` or `BinOp` then
+ // an assumption can be made that the operand is copy.
+ // If not, then the value is moved and adding `.clone()`
+ // will solve that issue. However, if the operand is
+ // implicitly borrowed, then it's likely not even possible
+ // to get the template to compile.
+ _ => within_op && self.is_attr_self(),
+ }
+ }
+
+ /// Returns `true` if this is an `Attr` where the `obj` is `"self"`.
+ pub(crate) fn is_attr_self(&self) -> bool {
+ match self {
+ Expr::Attr(obj, _) if matches!(obj.as_ref(), Expr::Var("self")) => true,
+ Expr::Attr(obj, _) if matches!(obj.as_ref(), Expr::Attr(..)) => obj.is_attr_self(),
+ _ => false,
+ }
+ }
+
+ /// Returns `true` if the outcome of this expression may be used multiple times in the same
+ /// `write!()` call, without evaluating the expression again, i.e. the expression should be
+ /// side-effect free.
+ pub(crate) fn is_cacheable(&self) -> bool {
+ match self {
+ // Literals are the definition of pure:
+ Expr::BoolLit(_) => true,
+ Expr::NumLit(_) => true,
+ Expr::StrLit(_) => true,
+ Expr::CharLit(_) => true,
+ // fmt::Display should have no effects:
+ Expr::Var(_) => true,
+ Expr::Path(_) => true,
+ // Check recursively:
+ Expr::Array(args) => args.iter().all(|arg| arg.is_cacheable()),
+ Expr::Attr(lhs, _) => lhs.is_cacheable(),
+ Expr::Index(lhs, rhs) => lhs.is_cacheable() && rhs.is_cacheable(),
+ Expr::Filter(_, args) => args.iter().all(|arg| arg.is_cacheable()),
+ Expr::Unary(_, arg) => arg.is_cacheable(),
+ Expr::BinOp(_, lhs, rhs) => lhs.is_cacheable() && rhs.is_cacheable(),
+ Expr::Range(_, lhs, rhs) => {
+ lhs.as_ref().map_or(true, |v| v.is_cacheable())
+ && rhs.as_ref().map_or(true, |v| v.is_cacheable())
+ }
+ Expr::Group(arg) => arg.is_cacheable(),
+ Expr::Tuple(args) => args.iter().all(|arg| arg.is_cacheable()),
+ // We have too little information to tell if the expression is pure:
+ Expr::Call(_, _) => false,
+ Expr::RustMacro(_, _) => false,
+ Expr::Try(_) => false,
+ }
+ }
+}
+
+fn expr_bool_lit(i: &str) -> IResult<&str, Expr<'_>> {
+ map(bool_lit, Expr::BoolLit)(i)
+}
+
+fn expr_num_lit(i: &str) -> IResult<&str, Expr<'_>> {
+ map(num_lit, Expr::NumLit)(i)
+}
+
+fn expr_array_lit(i: &str) -> IResult<&str, Expr<'_>> {
+ delimited(
+ ws(char('[')),
+ map(separated_list1(ws(char(',')), expr_any), Expr::Array),
+ ws(char(']')),
+ )(i)
+}
+
+fn expr_str_lit(i: &str) -> IResult<&str, Expr<'_>> {
+ map(str_lit, Expr::StrLit)(i)
+}
+
+fn expr_char_lit(i: &str) -> IResult<&str, Expr<'_>> {
+ map(char_lit, Expr::CharLit)(i)
+}
+
+fn expr_var(i: &str) -> IResult<&str, Expr<'_>> {
+ map(identifier, Expr::Var)(i)
+}
+
+fn expr_path(i: &str) -> IResult<&str, Expr<'_>> {
+ let (i, path) = path(i)?;
+ Ok((i, Expr::Path(path)))
+}
+
+fn expr_group(i: &str) -> IResult<&str, Expr<'_>> {
+ let (i, expr) = preceded(ws(char('(')), opt(expr_any))(i)?;
+ let expr = match expr {
+ Some(expr) => expr,
+ None => {
+ let (i, _) = char(')')(i)?;
+ return Ok((i, Expr::Tuple(vec![])));
+ }
+ };
+
+ let (i, comma) = ws(opt(peek(char(','))))(i)?;
+ if comma.is_none() {
+ let (i, _) = char(')')(i)?;
+ return Ok((i, Expr::Group(Box::new(expr))));
+ }
+
+ let mut exprs = vec![expr];
+ let (i, _) = fold_many0(
+ preceded(char(','), ws(expr_any)),
+ || (),
+ |_, expr| {
+ exprs.push(expr);
+ },
+ )(i)?;
+ let (i, _) = pair(ws(opt(char(','))), char(')'))(i)?;
+ Ok((i, Expr::Tuple(exprs)))
+}
+
+fn expr_single(i: &str) -> IResult<&str, Expr<'_>> {
+ alt((
+ expr_bool_lit,
+ expr_num_lit,
+ expr_str_lit,
+ expr_char_lit,
+ expr_path,
+ expr_rust_macro,
+ expr_array_lit,
+ expr_var,
+ expr_group,
+ ))(i)
+}
+
+enum Suffix<'a> {
+ Attr(&'a str),
+ Index(Expr<'a>),
+ Call(Vec>),
+ Try,
+}
+
+fn expr_attr(i: &str) -> IResult<&str, Suffix<'_>> {
+ map(
+ preceded(
+ ws(pair(char('.'), not(char('.')))),
+ cut(alt((num_lit, identifier))),
+ ),
+ Suffix::Attr,
+ )(i)
+}
+
+fn expr_index(i: &str) -> IResult<&str, Suffix<'_>> {
+ map(
+ preceded(ws(char('[')), cut(terminated(expr_any, ws(char(']'))))),
+ Suffix::Index,
+ )(i)
+}
+
+fn expr_call(i: &str) -> IResult<&str, Suffix<'_>> {
+ map(arguments, Suffix::Call)(i)
+}
+
+fn expr_try(i: &str) -> IResult<&str, Suffix<'_>> {
+ map(preceded(take_till(not_ws), char('?')), |_| Suffix::Try)(i)
+}
+
+fn filter(i: &str) -> IResult<&str, (&str, Option>>)> {
+ let (i, (_, fname, args)) = tuple((char('|'), ws(identifier), opt(arguments)))(i)?;
+ Ok((i, (fname, args)))
+}
+
+fn expr_filtered(i: &str) -> IResult<&str, Expr<'_>> {
+ let (i, (obj, filters)) = tuple((expr_prefix, many0(filter)))(i)?;
+
+ let mut res = obj;
+ for (fname, args) in filters {
+ res = Expr::Filter(fname, {
+ let mut args = match args {
+ Some(inner) => inner,
+ None => Vec::new(),
+ };
+ args.insert(0, res);
+ args
+ });
+ }
+
+ Ok((i, res))
+}
+
+fn expr_prefix(i: &str) -> IResult<&str, Expr<'_>> {
+ let (i, (ops, mut expr)) = pair(many0(ws(alt((tag("!"), tag("-"))))), expr_suffix)(i)?;
+ for op in ops.iter().rev() {
+ expr = Expr::Unary(op, Box::new(expr));
+ }
+ Ok((i, expr))
+}
+
+fn expr_suffix(i: &str) -> IResult<&str, Expr<'_>> {
+ let (mut i, mut expr) = expr_single(i)?;
+ loop {
+ let (j, suffix) = opt(alt((expr_attr, expr_index, expr_call, expr_try)))(i)?;
+ i = j;
+ match suffix {
+ Some(Suffix::Attr(attr)) => expr = Expr::Attr(expr.into(), attr),
+ Some(Suffix::Index(index)) => expr = Expr::Index(expr.into(), index.into()),
+ Some(Suffix::Call(args)) => expr = Expr::Call(expr.into(), args),
+ Some(Suffix::Try) => expr = Expr::Try(expr.into()),
+ None => break,
+ }
+ }
+ Ok((i, expr))
+}
+
+fn macro_arguments(i: &str) -> IResult<&str, &str> {
+ delimited(char('('), recognize(nested_parenthesis), char(')'))(i)
+}
+
+fn expr_rust_macro(i: &str) -> IResult<&str, Expr<'_>> {
+ let (i, (mname, _, args)) = tuple((identifier, char('!'), macro_arguments))(i)?;
+ Ok((i, Expr::RustMacro(mname, args)))
+}
+
+macro_rules! expr_prec_layer {
+ ( $name:ident, $inner:ident, $op:expr ) => {
+ fn $name(i: &str) -> IResult<&str, Expr<'_>> {
+ let (i, left) = $inner(i)?;
+ let (i, right) = many0(pair(
+ ws(tag($op)),
+ $inner,
+ ))(i)?;
+ Ok((
+ i,
+ right.into_iter().fold(left, |left, (op, right)| {
+ Expr::BinOp(op, Box::new(left), Box::new(right))
+ }),
+ ))
+ }
+ };
+ ( $name:ident, $inner:ident, $( $op:expr ),+ ) => {
+ fn $name(i: &str) -> IResult<&str, Expr<'_>> {
+ let (i, left) = $inner(i)?;
+ let (i, right) = many0(pair(
+ ws(alt(($( tag($op) ),+,))),
+ $inner,
+ ))(i)?;
+ Ok((
+ i,
+ right.into_iter().fold(left, |left, (op, right)| {
+ Expr::BinOp(op, Box::new(left), Box::new(right))
+ }),
+ ))
+ }
+ }
+}
+
+expr_prec_layer!(expr_muldivmod, expr_filtered, "*", "/", "%");
+expr_prec_layer!(expr_addsub, expr_muldivmod, "+", "-");
+expr_prec_layer!(expr_shifts, expr_addsub, ">>", "<<");
+expr_prec_layer!(expr_band, expr_shifts, "&");
+expr_prec_layer!(expr_bxor, expr_band, "^");
+expr_prec_layer!(expr_bor, expr_bxor, "|");
+expr_prec_layer!(expr_compare, expr_bor, "==", "!=", ">=", ">", "<=", "<");
+expr_prec_layer!(expr_and, expr_compare, "&&");
+expr_prec_layer!(expr_or, expr_and, "||");
+
+fn expr_any(i: &str) -> IResult<&str, Expr<'_>> {
+ let range_right = |i| pair(ws(alt((tag("..="), tag("..")))), opt(expr_or))(i);
+ alt((
+ map(range_right, |(op, right)| {
+ Expr::Range(op, None, right.map(Box::new))
+ }),
+ map(
+ pair(expr_or, opt(range_right)),
+ |(left, right)| match right {
+ Some((op, right)) => Expr::Range(op, Some(Box::new(left)), right.map(Box::new)),
+ None => left,
+ },
+ ),
+ ))(i)
+}
+
+fn arguments(i: &str) -> IResult<&str, Vec>> {
+ delimited(
+ ws(char('(')),
+ separated_list0(char(','), ws(expr_any)),
+ ws(char(')')),
+ )(i)
+}
diff --git a/third_party/rust/askama_derive/src/parser/mod.rs b/third_party/rust/askama_derive/src/parser/mod.rs
new file mode 100644
index 000000000000..79b178ef857d
--- /dev/null
+++ b/third_party/rust/askama_derive/src/parser/mod.rs
@@ -0,0 +1,317 @@
+use std::cell::Cell;
+use std::str;
+
+use nom::branch::alt;
+use nom::bytes::complete::{escaped, is_not, tag, take_till};
+use nom::character::complete::char;
+use nom::character::complete::{anychar, digit1};
+use nom::combinator::{eof, map, not, opt, recognize, value};
+use nom::error::ErrorKind;
+use nom::multi::separated_list1;
+use nom::sequence::{delimited, pair, tuple};
+use nom::{error_position, AsChar, IResult, InputTakeAtPosition};
+
+pub(crate) use self::expr::Expr;
+pub(crate) use self::node::{Cond, CondTest, Loop, Macro, Node, Target, When, Whitespace, Ws};
+use crate::config::Syntax;
+use crate::CompileError;
+
+mod expr;
+mod node;
+#[cfg(test)]
+mod tests;
+
+struct State<'a> {
+ syntax: &'a Syntax<'a>,
+ loop_depth: Cell,
+}
+
+impl<'a> State<'a> {
+ fn new(syntax: &'a Syntax<'a>) -> State<'a> {
+ State {
+ syntax,
+ loop_depth: Cell::new(0),
+ }
+ }
+
+ fn enter_loop(&self) {
+ self.loop_depth.set(self.loop_depth.get() + 1);
+ }
+
+ fn leave_loop(&self) {
+ self.loop_depth.set(self.loop_depth.get() - 1);
+ }
+
+ fn is_in_loop(&self) -> bool {
+ self.loop_depth.get() > 0
+ }
+}
+
+impl From for Whitespace {
+ fn from(c: char) -> Self {
+ match c {
+ '+' => Self::Preserve,
+ '-' => Self::Suppress,
+ '~' => Self::Minimize,
+ _ => panic!("unsupported `Whitespace` conversion"),
+ }
+ }
+}
+
+pub(crate) fn parse<'a>(
+ src: &'a str,
+ syntax: &'a Syntax<'_>,
+) -> Result>, CompileError> {
+ match Node::parse(src, &State::new(syntax)) {
+ Ok((left, res)) => {
+ if !left.is_empty() {
+ Err(format!("unable to parse template:\n\n{left:?}").into())
+ } else {
+ Ok(res)
+ }
+ }
+
+ Err(nom::Err::Error(err)) | Err(nom::Err::Failure(err)) => {
+ let nom::error::Error { input, .. } = err;
+ let offset = src.len() - input.len();
+ let (source_before, source_after) = src.split_at(offset);
+
+ let source_after = match source_after.char_indices().enumerate().take(41).last() {
+ Some((40, (i, _))) => format!("{:?}...", &source_after[..i]),
+ _ => format!("{source_after:?}"),
+ };
+
+ let (row, last_line) = source_before.lines().enumerate().last().unwrap();
+ let column = last_line.chars().count();
+
+ let msg = format!(
+ "problems parsing template source at row {}, column {} near:\n{}",
+ row + 1,
+ column,
+ source_after,
+ );
+ Err(msg.into())
+ }
+
+ Err(nom::Err::Incomplete(_)) => Err("parsing incomplete".into()),
+ }
+}
+
+fn is_ws(c: char) -> bool {
+ matches!(c, ' ' | '\t' | '\r' | '\n')
+}
+
+fn not_ws(c: char) -> bool {
+ !is_ws(c)
+}
+
+fn ws<'a, O>(
+ inner: impl FnMut(&'a str) -> IResult<&'a str, O>,
+) -> impl FnMut(&'a str) -> IResult<&'a str, O> {
+ delimited(take_till(not_ws), inner, take_till(not_ws))
+}
+
+fn split_ws_parts(s: &str) -> Node<'_> {
+ let trimmed_start = s.trim_start_matches(is_ws);
+ let len_start = s.len() - trimmed_start.len();
+ let trimmed = trimmed_start.trim_end_matches(is_ws);
+ Node::Lit(&s[..len_start], trimmed, &trimmed_start[trimmed.len()..])
+}
+
+/// Skips input until `end` was found, but does not consume it.
+/// Returns tuple that would be returned when parsing `end`.
+fn skip_till<'a, O>(
+ end: impl FnMut(&'a str) -> IResult<&'a str, O>,
+) -> impl FnMut(&'a str) -> IResult<&'a str, (&'a str, O)> {
+ enum Next {
+ IsEnd(O),
+ NotEnd(char),
+ }
+ let mut next = alt((map(end, Next::IsEnd), map(anychar, Next::NotEnd)));
+ move |start: &'a str| {
+ let mut i = start;
+ loop {
+ let (j, is_end) = next(i)?;
+ match is_end {
+ Next::IsEnd(lookahead) => return Ok((i, (j, lookahead))),
+ Next::NotEnd(_) => i = j,
+ }
+ }
+ }
+}
+
+fn keyword<'a>(k: &'a str) -> impl FnMut(&'a str) -> IResult<&'a str, &'a str> {
+ move |i: &'a str| -> IResult<&'a str, &'a str> {
+ let (j, v) = identifier(i)?;
+ if k == v {
+ Ok((j, v))
+ } else {
+ Err(nom::Err::Error(error_position!(i, ErrorKind::Tag)))
+ }
+ }
+}
+
+fn identifier(input: &str) -> IResult<&str, &str> {
+ recognize(pair(identifier_start, opt(identifier_tail)))(input)
+}
+
+fn identifier_start(s: &str) -> IResult<&str, &str> {
+ s.split_at_position1_complete(
+ |c| !(c.is_alpha() || c == '_' || c >= '\u{0080}'),
+ nom::error::ErrorKind::Alpha,
+ )
+}
+
+fn identifier_tail(s: &str) -> IResult<&str, &str> {
+ s.split_at_position1_complete(
+ |c| !(c.is_alphanum() || c == '_' || c >= '\u{0080}'),
+ nom::error::ErrorKind::Alpha,
+ )
+}
+
+fn bool_lit(i: &str) -> IResult<&str, &str> {
+ alt((keyword("false"), keyword("true")))(i)
+}
+
+fn num_lit(i: &str) -> IResult<&str, &str> {
+ recognize(pair(digit1, opt(pair(char('.'), digit1))))(i)
+}
+
+fn str_lit(i: &str) -> IResult<&str, &str> {
+ let (i, s) = delimited(
+ char('"'),
+ opt(escaped(is_not("\\\""), '\\', anychar)),
+ char('"'),
+ )(i)?;
+ Ok((i, s.unwrap_or_default()))
+}
+
+fn char_lit(i: &str) -> IResult<&str, &str> {
+ let (i, s) = delimited(
+ char('\''),
+ opt(escaped(is_not("\\\'"), '\\', anychar)),
+ char('\''),
+ )(i)?;
+ Ok((i, s.unwrap_or_default()))
+}
+
+fn nested_parenthesis(i: &str) -> IResult<&str, ()> {
+ let mut nested = 0;
+ let mut last = 0;
+ let mut in_str = false;
+ let mut escaped = false;
+
+ for (i, b) in i.chars().enumerate() {
+ if !(b == '(' || b == ')') || !in_str {
+ match b {
+ '(' => nested += 1,
+ ')' => {
+ if nested == 0 {
+ last = i;
+ break;
+ }
+ nested -= 1;
+ }
+ '"' => {
+ if in_str {
+ if !escaped {
+ in_str = false;
+ }
+ } else {
+ in_str = true;
+ }
+ }
+ '\\' => {
+ escaped = !escaped;
+ }
+ _ => (),
+ }
+ }
+
+ if escaped && b != '\\' {
+ escaped = false;
+ }
+ }
+
+ if nested == 0 {
+ Ok((&i[last..], ()))
+ } else {
+ Err(nom::Err::Error(error_position!(
+ i,
+ ErrorKind::SeparatedNonEmptyList
+ )))
+ }
+}
+
+fn path(i: &str) -> IResult<&str, Vec<&str>> {
+ let root = opt(value("", ws(tag("::"))));
+ let tail = separated_list1(ws(tag("::")), identifier);
+
+ match tuple((root, identifier, ws(tag("::")), tail))(i) {
+ Ok((i, (root, start, _, rest))) => {
+ let mut path = Vec::new();
+ path.extend(root);
+ path.push(start);
+ path.extend(rest);
+ Ok((i, path))
+ }
+ Err(err) => {
+ if let Ok((i, name)) = identifier(i) {
+ // The returned identifier can be assumed to be path if:
+ // - Contains both a lowercase and uppercase character, i.e. a type name like `None`
+ // - Doesn't contain any lowercase characters, i.e. it's a constant
+ // In short, if it contains any uppercase characters it's a path.
+ if name.contains(char::is_uppercase) {
+ return Ok((i, vec![name]));
+ }
+ }
+
+ // If `identifier()` fails then just return the original error
+ Err(err)
+ }
+ }
+}
+
+fn take_content<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
+ let p_start = alt((
+ tag(s.syntax.block_start),
+ tag(s.syntax.comment_start),
+ tag(s.syntax.expr_start),
+ ));
+
+ let (i, _) = not(eof)(i)?;
+ let (i, content) = opt(recognize(skip_till(p_start)))(i)?;
+ let (i, content) = match content {
+ Some("") => {
+ // {block,comment,expr}_start follows immediately.
+ return Err(nom::Err::Error(error_position!(i, ErrorKind::TakeUntil)));
+ }
+ Some(content) => (i, content),
+ None => ("", i), // there is no {block,comment,expr}_start: take everything
+ };
+ Ok((i, split_ws_parts(content)))
+}
+
+fn tag_block_start<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> {
+ tag(s.syntax.block_start)(i)
+}
+
+fn tag_block_end<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> {
+ tag(s.syntax.block_end)(i)
+}
+
+fn tag_comment_start<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> {
+ tag(s.syntax.comment_start)(i)
+}
+
+fn tag_comment_end<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> {
+ tag(s.syntax.comment_end)(i)
+}
+
+fn tag_expr_start<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> {
+ tag(s.syntax.expr_start)(i)
+}
+
+fn tag_expr_end<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> {
+ tag(s.syntax.expr_end)(i)
+}
diff --git a/third_party/rust/askama_derive/src/parser/node.rs b/third_party/rust/askama_derive/src/parser/node.rs
new file mode 100644
index 000000000000..fc6860ef8cf6
--- /dev/null
+++ b/third_party/rust/askama_derive/src/parser/node.rs
@@ -0,0 +1,682 @@
+use std::str;
+
+use nom::branch::alt;
+use nom::bytes::complete::{tag, take_until};
+use nom::character::complete::char;
+use nom::combinator::{complete, consumed, cut, map, opt, peek, value};
+use nom::error::{Error, ErrorKind};
+use nom::multi::{fold_many0, many0, many1, separated_list0, separated_list1};
+use nom::sequence::{delimited, pair, preceded, terminated, tuple};
+use nom::{error_position, IResult};
+
+use super::{
+ bool_lit, char_lit, identifier, keyword, num_lit, path, skip_till, split_ws_parts, str_lit,
+ tag_block_end, tag_block_start, tag_comment_end, tag_comment_start, tag_expr_end,
+ tag_expr_start, take_content, ws, Expr, State,
+};
+use crate::config::WhitespaceHandling;
+
+#[derive(Debug, PartialEq)]
+pub(crate) enum Node<'a> {
+ Lit(&'a str, &'a str, &'a str),
+ Comment(Ws),
+ Expr(Ws, Expr<'a>),
+ Call(Ws, Option<&'a str>, &'a str, Vec>),
+ LetDecl(Ws, Target<'a>),
+ Let(Ws, Target<'a>, Expr<'a>),
+ Cond(Vec>, Ws),
+ Match(Ws, Expr<'a>, Vec>, Ws),
+ Loop(Loop<'a>),
+ Extends(&'a str),
+ BlockDef(Ws, &'a str, Vec>, Ws),
+ Include(Ws, &'a str),
+ Import(Ws, &'a str, &'a str),
+ Macro(&'a str, Macro<'a>),
+ Raw(Ws, &'a str, &'a str, &'a str, Ws),
+ Break(Ws),
+ Continue(Ws),
+}
+
+#[derive(Debug, PartialEq)]
+pub(crate) enum Target<'a> {
+ Name(&'a str),
+ Tuple(Vec<&'a str>, Vec>),
+ Struct(Vec<&'a str>, Vec<(&'a str, Target<'a>)>),
+ NumLit(&'a str),
+ StrLit(&'a str),
+ CharLit(&'a str),
+ BoolLit(&'a str),
+ Path(Vec<&'a str>),
+}
+
+#[derive(Clone, Copy, Debug, PartialEq)]
+pub(crate) enum Whitespace {
+ Preserve,
+ Suppress,
+ Minimize,
+}
+
+impl From for Whitespace {
+ fn from(ws: WhitespaceHandling) -> Self {
+ match ws {
+ WhitespaceHandling::Suppress => Whitespace::Suppress,
+ WhitespaceHandling::Preserve => Whitespace::Preserve,
+ WhitespaceHandling::Minimize => Whitespace::Minimize,
+ }
+ }
+}
+
+#[derive(Debug, PartialEq)]
+pub(crate) struct Loop<'a> {
+ pub(crate) ws1: Ws,
+ pub(crate) var: Target<'a>,
+ pub(crate) iter: Expr<'a>,
+ pub(crate) cond: Option>,
+ pub(crate) body: Vec>,
+ pub(crate) ws2: Ws,
+ pub(crate) else_block: Vec>,
+ pub(crate) ws3: Ws,
+}
+
+pub(crate) type When<'a> = (Ws, Target<'a>, Vec>);
+
+#[derive(Debug, PartialEq)]
+pub(crate) struct Macro<'a> {
+ pub(crate) ws1: Ws,
+ pub(crate) args: Vec<&'a str>,
+ pub(crate) nodes: Vec>,
+ pub(crate) ws2: Ws,
+}
+
+/// First field is "minus/plus sign was used on the left part of the item".
+///
+/// Second field is "minus/plus sign was used on the right part of the item".
+#[derive(Clone, Copy, Debug, PartialEq)]
+pub(crate) struct Ws(pub(crate) Option, pub(crate) Option);
+
+pub(crate) type Cond<'a> = (Ws, Option>, Vec>);
+
+#[derive(Debug, PartialEq)]
+pub(crate) struct CondTest<'a> {
+ pub(crate) target: Option>,
+ pub(crate) expr: Expr<'a>,
+}
+
+impl Node<'_> {
+ pub(super) fn parse<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Vec>> {
+ parse_template(i, s)
+ }
+}
+
+impl Target<'_> {
+ pub(super) fn parse(i: &str) -> IResult<&str, Target<'_>> {
+ target(i)
+ }
+}
+
+fn expr_handle_ws(i: &str) -> IResult<&str, Whitespace> {
+ alt((char('-'), char('+'), char('~')))(i).map(|(s, r)| (s, Whitespace::from(r)))
+}
+
+fn parameters(i: &str) -> IResult<&str, Vec<&str>> {
+ delimited(
+ ws(char('(')),
+ separated_list0(char(','), ws(identifier)),
+ ws(char(')')),
+ )(i)
+}
+
+fn block_call(i: &str) -> IResult<&str, Node<'_>> {
+ let mut p = tuple((
+ opt(expr_handle_ws),
+ ws(keyword("call")),
+ cut(tuple((
+ opt(tuple((ws(identifier), ws(tag("::"))))),
+ ws(identifier),
+ ws(Expr::parse_arguments),
+ opt(expr_handle_ws),
+ ))),
+ ));
+ let (i, (pws, _, (scope, name, args, nws))) = p(i)?;
+ let scope = scope.map(|(scope, _)| scope);
+ Ok((i, Node::Call(Ws(pws, nws), scope, name, args)))
+}
+
+fn cond_if(i: &str) -> IResult<&str, CondTest<'_>> {
+ let mut p = preceded(
+ ws(keyword("if")),
+ cut(tuple((
+ opt(delimited(
+ ws(alt((keyword("let"), keyword("set")))),
+ ws(Target::parse),
+ ws(char('=')),
+ )),
+ ws(Expr::parse),
+ ))),
+ );
+ let (i, (target, expr)) = p(i)?;
+ Ok((i, CondTest { target, expr }))
+}
+
+fn cond_block<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Cond<'a>> {
+ let mut p = tuple((
+ |i| tag_block_start(i, s),
+ opt(expr_handle_ws),
+ ws(keyword("else")),
+ cut(tuple((
+ opt(cond_if),
+ opt(expr_handle_ws),
+ |i| tag_block_end(i, s),
+ cut(|i| parse_template(i, s)),
+ ))),
+ ));
+ let (i, (_, pws, _, (cond, nws, _, block))) = p(i)?;
+ Ok((i, (Ws(pws, nws), cond, block)))
+}
+
+fn block_if<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
+ let mut p = tuple((
+ opt(expr_handle_ws),
+ cond_if,
+ cut(tuple((
+ opt(expr_handle_ws),
+ |i| tag_block_end(i, s),
+ cut(tuple((
+ |i| parse_template(i, s),
+ many0(|i| cond_block(i, s)),
+ cut(tuple((
+ |i| tag_block_start(i, s),
+ opt(expr_handle_ws),
+ ws(keyword("endif")),
+ opt(expr_handle_ws),
+ ))),
+ ))),
+ ))),
+ ));
+ let (i, (pws1, cond, (nws1, _, (block, elifs, (_, pws2, _, nws2))))) = p(i)?;
+
+ let mut res = vec![(Ws(pws1, nws1), Some(cond), block)];
+ res.extend(elifs);
+ Ok((i, Node::Cond(res, Ws(pws2, nws2))))
+}
+
+fn match_else_block<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, When<'a>> {
+ let mut p = tuple((
+ |i| tag_block_start(i, s),
+ opt(expr_handle_ws),
+ ws(keyword("else")),
+ cut(tuple((
+ opt(expr_handle_ws),
+ |i| tag_block_end(i, s),
+ cut(|i| parse_template(i, s)),
+ ))),
+ ));
+ let (i, (_, pws, _, (nws, _, block))) = p(i)?;
+ Ok((i, (Ws(pws, nws), Target::Name("_"), block)))
+}
+
+fn when_block<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, When<'a>> {
+ let mut p = tuple((
+ |i| tag_block_start(i, s),
+ opt(expr_handle_ws),
+ ws(keyword("when")),
+ cut(tuple((
+ ws(Target::parse),
+ opt(expr_handle_ws),
+ |i| tag_block_end(i, s),
+ cut(|i| parse_template(i, s)),
+ ))),
+ ));
+ let (i, (_, pws, _, (target, nws, _, block))) = p(i)?;
+ Ok((i, (Ws(pws, nws), target, block)))
+}
+
+fn block_match<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
+ let mut p = tuple((
+ opt(expr_handle_ws),
+ ws(keyword("match")),
+ cut(tuple((
+ ws(Expr::parse),
+ opt(expr_handle_ws),
+ |i| tag_block_end(i, s),
+ cut(tuple((
+ ws(many0(ws(value((), |i| block_comment(i, s))))),
+ many1(|i| when_block(i, s)),
+ cut(tuple((
+ opt(|i| match_else_block(i, s)),
+ cut(tuple((
+ ws(|i| tag_block_start(i, s)),
+ opt(expr_handle_ws),
+ ws(keyword("endmatch")),
+ opt(expr_handle_ws),
+ ))),
+ ))),
+ ))),
+ ))),
+ ));
+ let (i, (pws1, _, (expr, nws1, _, (_, arms, (else_arm, (_, pws2, _, nws2)))))) = p(i)?;
+
+ let mut arms = arms;
+ if let Some(arm) = else_arm {
+ arms.push(arm);
+ }
+
+ Ok((i, Node::Match(Ws(pws1, nws1), expr, arms, Ws(pws2, nws2))))
+}
+
+fn block_let(i: &str) -> IResult<&str, Node<'_>> {
+ let mut p = tuple((
+ opt(expr_handle_ws),
+ ws(alt((keyword("let"), keyword("set")))),
+ cut(tuple((
+ ws(Target::parse),
+ opt(tuple((ws(char('=')), ws(Expr::parse)))),
+ opt(expr_handle_ws),
+ ))),
+ ));
+ let (i, (pws, _, (var, val, nws))) = p(i)?;
+
+ Ok((
+ i,
+ if let Some((_, val)) = val {
+ Node::Let(Ws(pws, nws), var, val)
+ } else {
+ Node::LetDecl(Ws(pws, nws), var)
+ },
+ ))
+}
+
+fn parse_loop_content<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Vec>> {
+ s.enter_loop();
+ let result = parse_template(i, s);
+ s.leave_loop();
+ result
+}
+
+fn block_for<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
+ let if_cond = preceded(ws(keyword("if")), cut(ws(Expr::parse)));
+ let else_block = |i| {
+ let mut p = preceded(
+ ws(keyword("else")),
+ cut(tuple((
+ opt(expr_handle_ws),
+ delimited(
+ |i| tag_block_end(i, s),
+ |i| parse_template(i, s),
+ |i| tag_block_start(i, s),
+ ),
+ opt(expr_handle_ws),
+ ))),
+ );
+ let (i, (pws, nodes, nws)) = p(i)?;
+ Ok((i, (pws, nodes, nws)))
+ };
+ let mut p = tuple((
+ opt(expr_handle_ws),
+ ws(keyword("for")),
+ cut(tuple((
+ ws(Target::parse),
+ ws(keyword("in")),
+ cut(tuple((
+ ws(Expr::parse),
+ opt(if_cond),
+ opt(expr_handle_ws),
+ |i| tag_block_end(i, s),
+ cut(tuple((
+ |i| parse_loop_content(i, s),
+ cut(tuple((
+ |i| tag_block_start(i, s),
+ opt(expr_handle_ws),
+ opt(else_block),
+ ws(keyword("endfor")),
+ opt(expr_handle_ws),
+ ))),
+ ))),
+ ))),
+ ))),
+ ));
+ let (i, (pws1, _, (var, _, (iter, cond, nws1, _, (body, (_, pws2, else_block, _, nws2)))))) =
+ p(i)?;
+ let (nws3, else_block, pws3) = else_block.unwrap_or_default();
+ Ok((
+ i,
+ Node::Loop(Loop {
+ ws1: Ws(pws1, nws1),
+ var,
+ iter,
+ cond,
+ body,
+ ws2: Ws(pws2, nws3),
+ else_block,
+ ws3: Ws(pws3, nws2),
+ }),
+ ))
+}
+
+fn block_extends(i: &str) -> IResult<&str, Node<'_>> {
+ let (i, (_, name)) = tuple((ws(keyword("extends")), ws(str_lit)))(i)?;
+ Ok((i, Node::Extends(name)))
+}
+
+fn block_block<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
+ let mut start = tuple((
+ opt(expr_handle_ws),
+ ws(keyword("block")),
+ cut(tuple((ws(identifier), opt(expr_handle_ws), |i| {
+ tag_block_end(i, s)
+ }))),
+ ));
+ let (i, (pws1, _, (name, nws1, _))) = start(i)?;
+
+ let mut end = cut(tuple((
+ |i| parse_template(i, s),
+ cut(tuple((
+ |i| tag_block_start(i, s),
+ opt(expr_handle_ws),
+ ws(keyword("endblock")),
+ cut(tuple((opt(ws(keyword(name))), opt(expr_handle_ws)))),
+ ))),
+ )));
+ let (i, (contents, (_, pws2, _, (_, nws2)))) = end(i)?;
+
+ Ok((
+ i,
+ Node::BlockDef(Ws(pws1, nws1), name, contents, Ws(pws2, nws2)),
+ ))
+}
+
+fn block_include(i: &str) -> IResult<&str, Node<'_>> {
+ let mut p = tuple((
+ opt(expr_handle_ws),
+ ws(keyword("include")),
+ cut(pair(ws(str_lit), opt(expr_handle_ws))),
+ ));
+ let (i, (pws, _, (name, nws))) = p(i)?;
+ Ok((i, Node::Include(Ws(pws, nws), name)))
+}
+
+fn block_import(i: &str) -> IResult<&str, Node<'_>> {
+ let mut p = tuple((
+ opt(expr_handle_ws),
+ ws(keyword("import")),
+ cut(tuple((
+ ws(str_lit),
+ ws(keyword("as")),
+ cut(pair(ws(identifier), opt(expr_handle_ws))),
+ ))),
+ ));
+ let (i, (pws, _, (name, _, (scope, nws)))) = p(i)?;
+ Ok((i, Node::Import(Ws(pws, nws), name, scope)))
+}
+
+fn block_macro<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
+ let mut start = tuple((
+ opt(expr_handle_ws),
+ ws(keyword("macro")),
+ cut(tuple((
+ ws(identifier),
+ ws(parameters),
+ opt(expr_handle_ws),
+ |i| tag_block_end(i, s),
+ ))),
+ ));
+ let (i, (pws1, _, (name, params, nws1, _))) = start(i)?;
+
+ let mut end = cut(tuple((
+ |i| parse_template(i, s),
+ cut(tuple((
+ |i| tag_block_start(i, s),
+ opt(expr_handle_ws),
+ ws(keyword("endmacro")),
+ cut(tuple((opt(ws(keyword(name))), opt(expr_handle_ws)))),
+ ))),
+ )));
+ let (i, (contents, (_, pws2, _, (_, nws2)))) = end(i)?;
+
+ assert_ne!(name, "super", "invalid macro name 'super'");
+
+ Ok((
+ i,
+ Node::Macro(
+ name,
+ Macro {
+ ws1: Ws(pws1, nws1),
+ args: params,
+ nodes: contents,
+ ws2: Ws(pws2, nws2),
+ },
+ ),
+ ))
+}
+
+fn block_raw<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
+ let endraw = tuple((
+ |i| tag_block_start(i, s),
+ opt(expr_handle_ws),
+ ws(keyword("endraw")),
+ opt(expr_handle_ws),
+ peek(|i| tag_block_end(i, s)),
+ ));
+
+ let mut p = tuple((
+ opt(expr_handle_ws),
+ ws(keyword("raw")),
+ cut(tuple((
+ opt(expr_handle_ws),
+ |i| tag_block_end(i, s),
+ consumed(skip_till(endraw)),
+ ))),
+ ));
+
+ let (_, (pws1, _, (nws1, _, (contents, (i, (_, pws2, _, nws2, _)))))) = p(i)?;
+ let (lws, val, rws) = match split_ws_parts(contents) {
+ Node::Lit(lws, val, rws) => (lws, val, rws),
+ _ => unreachable!(),
+ };
+ let ws1 = Ws(pws1, nws1);
+ let ws2 = Ws(pws2, nws2);
+ Ok((i, Node::Raw(ws1, lws, val, rws, ws2)))
+}
+
+fn break_statement<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
+ let mut p = tuple((
+ opt(expr_handle_ws),
+ ws(keyword("break")),
+ opt(expr_handle_ws),
+ ));
+ let (j, (pws, _, nws)) = p(i)?;
+ if !s.is_in_loop() {
+ return Err(nom::Err::Failure(error_position!(i, ErrorKind::Tag)));
+ }
+ Ok((j, Node::Break(Ws(pws, nws))))
+}
+
+fn continue_statement<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
+ let mut p = tuple((
+ opt(expr_handle_ws),
+ ws(keyword("continue")),
+ opt(expr_handle_ws),
+ ));
+ let (j, (pws, _, nws)) = p(i)?;
+ if !s.is_in_loop() {
+ return Err(nom::Err::Failure(error_position!(i, ErrorKind::Tag)));
+ }
+ Ok((j, Node::Continue(Ws(pws, nws))))
+}
+
+fn block_node<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
+ let mut p = tuple((
+ |i| tag_block_start(i, s),
+ alt((
+ block_call,
+ block_let,
+ |i| block_if(i, s),
+ |i| block_for(i, s),
+ |i| block_match(i, s),
+ block_extends,
+ block_include,
+ block_import,
+ |i| block_block(i, s),
+ |i| block_macro(i, s),
+ |i| block_raw(i, s),
+ |i| break_statement(i, s),
+ |i| continue_statement(i, s),
+ )),
+ cut(|i| tag_block_end(i, s)),
+ ));
+ let (i, (_, contents, _)) = p(i)?;
+ Ok((i, contents))
+}
+
+fn block_comment_body<'a>(mut i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> {
+ let mut level = 0;
+ loop {
+ let (end, tail) = take_until(s.syntax.comment_end)(i)?;
+ match take_until::<_, _, Error<_>>(s.syntax.comment_start)(i) {
+ Ok((start, _)) if start.as_ptr() < end.as_ptr() => {
+ level += 1;
+ i = &start[2..];
+ }
+ _ if level > 0 => {
+ level -= 1;
+ i = &end[2..];
+ }
+ _ => return Ok((end, tail)),
+ }
+ }
+}
+
+fn block_comment<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
+ let mut p = tuple((
+ |i| tag_comment_start(i, s),
+ cut(tuple((
+ opt(expr_handle_ws),
+ |i| block_comment_body(i, s),
+ |i| tag_comment_end(i, s),
+ ))),
+ ));
+ let (i, (_, (pws, tail, _))) = p(i)?;
+ let nws = if tail.ends_with('-') {
+ Some(Whitespace::Suppress)
+ } else if tail.ends_with('+') {
+ Some(Whitespace::Preserve)
+ } else if tail.ends_with('~') {
+ Some(Whitespace::Minimize)
+ } else {
+ None
+ };
+ Ok((i, Node::Comment(Ws(pws, nws))))
+}
+
+fn expr_node<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
+ let mut p = tuple((
+ |i| tag_expr_start(i, s),
+ cut(tuple((
+ opt(expr_handle_ws),
+ ws(Expr::parse),
+ opt(expr_handle_ws),
+ |i| tag_expr_end(i, s),
+ ))),
+ ));
+ let (i, (_, (pws, expr, nws, _))) = p(i)?;
+ Ok((i, Node::Expr(Ws(pws, nws), expr)))
+}
+
+fn parse_template<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Vec>> {
+ many0(alt((
+ complete(|i| take_content(i, s)),
+ complete(|i| block_comment(i, s)),
+ complete(|i| expr_node(i, s)),
+ complete(|i| block_node(i, s)),
+ )))(i)
+}
+
+fn variant_lit(i: &str) -> IResult<&str, Target<'_>> {
+ alt((
+ map(str_lit, Target::StrLit),
+ map(char_lit, Target::CharLit),
+ map(num_lit, Target::NumLit),
+ map(bool_lit, Target::BoolLit),
+ ))(i)
+}
+
+fn target(i: &str) -> IResult<&str, Target<'_>> {
+ let mut opt_opening_paren = map(opt(ws(char('('))), |o| o.is_some());
+ let mut opt_closing_paren = map(opt(ws(char(')'))), |o| o.is_some());
+ let mut opt_opening_brace = map(opt(ws(char('{'))), |o| o.is_some());
+
+ let (i, lit) = opt(variant_lit)(i)?;
+ if let Some(lit) = lit {
+ return Ok((i, lit));
+ }
+
+ // match tuples and unused parentheses
+ let (i, target_is_tuple) = opt_opening_paren(i)?;
+ if target_is_tuple {
+ let (i, is_empty_tuple) = opt_closing_paren(i)?;
+ if is_empty_tuple {
+ return Ok((i, Target::Tuple(Vec::new(), Vec::new())));
+ }
+
+ let (i, first_target) = target(i)?;
+ let (i, is_unused_paren) = opt_closing_paren(i)?;
+ if is_unused_paren {
+ return Ok((i, first_target));
+ }
+
+ let mut targets = vec![first_target];
+ let (i, _) = cut(tuple((
+ fold_many0(
+ preceded(ws(char(',')), target),
+ || (),
+ |_, target| {
+ targets.push(target);
+ },
+ ),
+ opt(ws(char(','))),
+ ws(cut(char(')'))),
+ )))(i)?;
+ return Ok((i, Target::Tuple(Vec::new(), targets)));
+ }
+
+ // match structs
+ let (i, path) = opt(path)(i)?;
+ if let Some(path) = path {
+ let i_before_matching_with = i;
+ let (i, _) = opt(ws(keyword("with")))(i)?;
+
+ let (i, is_unnamed_struct) = opt_opening_paren(i)?;
+ if is_unnamed_struct {
+ let (i, targets) = alt((
+ map(char(')'), |_| Vec::new()),
+ terminated(
+ cut(separated_list1(ws(char(',')), target)),
+ pair(opt(ws(char(','))), ws(cut(char(')')))),
+ ),
+ ))(i)?;
+ return Ok((i, Target::Tuple(path, targets)));
+ }
+
+ let (i, is_named_struct) = opt_opening_brace(i)?;
+ if is_named_struct {
+ let (i, targets) = alt((
+ map(char('}'), |_| Vec::new()),
+ terminated(
+ cut(separated_list1(ws(char(',')), named_target)),
+ pair(opt(ws(char(','))), ws(cut(char('}')))),
+ ),
+ ))(i)?;
+ return Ok((i, Target::Struct(path, targets)));
+ }
+
+ return Ok((i_before_matching_with, Target::Path(path)));
+ }
+
+ // neither literal nor struct nor path
+ map(identifier, Target::Name)(i)
+}
+
+fn named_target(i: &str) -> IResult<&str, (&str, Target<'_>)> {
+ let (i, (src, target)) = pair(identifier, opt(preceded(ws(char(':')), target)))(i)?;
+ Ok((i, (src, target.unwrap_or(Target::Name(src)))))
+}
diff --git a/third_party/rust/askama_derive/src/parser/tests.rs b/third_party/rust/askama_derive/src/parser/tests.rs
new file mode 100644
index 000000000000..91bb09ba8d2c
--- /dev/null
+++ b/third_party/rust/askama_derive/src/parser/tests.rs
@@ -0,0 +1,668 @@
+use crate::config::Syntax;
+use crate::parser::{Expr, Node, Whitespace, Ws};
+
+fn check_ws_split(s: &str, res: &(&str, &str, &str)) {
+ match super::split_ws_parts(s) {
+ Node::Lit(lws, s, rws) => {
+ assert_eq!(lws, res.0);
+ assert_eq!(s, res.1);
+ assert_eq!(rws, res.2);
+ }
+ _ => {
+ panic!("fail");
+ }
+ }
+}
+
+#[test]
+fn test_ws_splitter() {
+ check_ws_split("", &("", "", ""));
+ check_ws_split("a", &("", "a", ""));
+ check_ws_split("\ta", &("\t", "a", ""));
+ check_ws_split("b\n", &("", "b", "\n"));
+ check_ws_split(" \t\r\n", &(" \t\r\n", "", ""));
+}
+
+#[test]
+#[should_panic]
+fn test_invalid_block() {
+ super::parse("{% extend \"blah\" %}", &Syntax::default()).unwrap();
+}
+
+#[test]
+fn test_parse_filter() {
+ use Expr::*;
+ let syntax = Syntax::default();
+ assert_eq!(
+ super::parse("{{ strvar|e }}", &syntax).unwrap(),
+ vec![Node::Expr(Ws(None, None), Filter("e", vec![Var("strvar")]),)],
+ );
+ assert_eq!(
+ super::parse("{{ 2|abs }}", &syntax).unwrap(),
+ vec![Node::Expr(Ws(None, None), Filter("abs", vec![NumLit("2")]),)],
+ );
+ assert_eq!(
+ super::parse("{{ -2|abs }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ Filter("abs", vec![Unary("-", NumLit("2").into())]),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ (1 - 2)|abs }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ Filter(
+ "abs",
+ vec![Group(
+ BinOp("-", NumLit("1").into(), NumLit("2").into()).into()
+ )]
+ ),
+ )],
+ );
+}
+
+#[test]
+fn test_parse_numbers() {
+ let syntax = Syntax::default();
+ assert_eq!(
+ super::parse("{{ 2 }}", &syntax).unwrap(),
+ vec![Node::Expr(Ws(None, None), Expr::NumLit("2"),)],
+ );
+ assert_eq!(
+ super::parse("{{ 2.5 }}", &syntax).unwrap(),
+ vec![Node::Expr(Ws(None, None), Expr::NumLit("2.5"),)],
+ );
+}
+
+#[test]
+fn test_parse_var() {
+ let s = Syntax::default();
+
+ assert_eq!(
+ super::parse("{{ foo }}", &s).unwrap(),
+ vec![Node::Expr(Ws(None, None), Expr::Var("foo"))],
+ );
+ assert_eq!(
+ super::parse("{{ foo_bar }}", &s).unwrap(),
+ vec![Node::Expr(Ws(None, None), Expr::Var("foo_bar"))],
+ );
+
+ assert_eq!(
+ super::parse("{{ none }}", &s).unwrap(),
+ vec![Node::Expr(Ws(None, None), Expr::Var("none"))],
+ );
+}
+
+#[test]
+fn test_parse_const() {
+ let s = Syntax::default();
+
+ assert_eq!(
+ super::parse("{{ FOO }}", &s).unwrap(),
+ vec![Node::Expr(Ws(None, None), Expr::Path(vec!["FOO"]))],
+ );
+ assert_eq!(
+ super::parse("{{ FOO_BAR }}", &s).unwrap(),
+ vec![Node::Expr(Ws(None, None), Expr::Path(vec!["FOO_BAR"]))],
+ );
+
+ assert_eq!(
+ super::parse("{{ NONE }}", &s).unwrap(),
+ vec![Node::Expr(Ws(None, None), Expr::Path(vec!["NONE"]))],
+ );
+}
+
+#[test]
+fn test_parse_path() {
+ let s = Syntax::default();
+
+ assert_eq!(
+ super::parse("{{ None }}", &s).unwrap(),
+ vec![Node::Expr(Ws(None, None), Expr::Path(vec!["None"]))],
+ );
+ assert_eq!(
+ super::parse("{{ Some(123) }}", &s).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ Expr::Call(
+ Box::new(Expr::Path(vec!["Some"])),
+ vec![Expr::NumLit("123")]
+ ),
+ )],
+ );
+
+ assert_eq!(
+ super::parse("{{ Ok(123) }}", &s).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ Expr::Call(Box::new(Expr::Path(vec!["Ok"])), vec![Expr::NumLit("123")]),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ Err(123) }}", &s).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ Expr::Call(Box::new(Expr::Path(vec!["Err"])), vec![Expr::NumLit("123")]),
+ )],
+ );
+}
+
+#[test]
+fn test_parse_var_call() {
+ assert_eq!(
+ super::parse("{{ function(\"123\", 3) }}", &Syntax::default()).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ Expr::Call(
+ Box::new(Expr::Var("function")),
+ vec![Expr::StrLit("123"), Expr::NumLit("3")]
+ ),
+ )],
+ );
+}
+
+#[test]
+fn test_parse_path_call() {
+ let s = Syntax::default();
+
+ assert_eq!(
+ super::parse("{{ Option::None }}", &s).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ Expr::Path(vec!["Option", "None"])
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ Option::Some(123) }}", &s).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ Expr::Call(
+ Box::new(Expr::Path(vec!["Option", "Some"])),
+ vec![Expr::NumLit("123")],
+ ),
+ )],
+ );
+
+ assert_eq!(
+ super::parse("{{ self::function(\"123\", 3) }}", &s).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ Expr::Call(
+ Box::new(Expr::Path(vec!["self", "function"])),
+ vec![Expr::StrLit("123"), Expr::NumLit("3")],
+ ),
+ )],
+ );
+}
+
+#[test]
+fn test_parse_root_path() {
+ let syntax = Syntax::default();
+ assert_eq!(
+ super::parse("{{ std::string::String::new() }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ Expr::Call(
+ Box::new(Expr::Path(vec!["std", "string", "String", "new"])),
+ vec![]
+ ),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ ::std::string::String::new() }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ Expr::Call(
+ Box::new(Expr::Path(vec!["", "std", "string", "String", "new"])),
+ vec![]
+ ),
+ )],
+ );
+}
+
+#[test]
+fn change_delimiters_parse_filter() {
+ let syntax = Syntax {
+ expr_start: "{=",
+ expr_end: "=}",
+ ..Syntax::default()
+ };
+
+ super::parse("{= strvar|e =}", &syntax).unwrap();
+}
+
+#[test]
+fn test_precedence() {
+ use Expr::*;
+ let syntax = Syntax::default();
+ assert_eq!(
+ super::parse("{{ a + b == c }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ BinOp(
+ "==",
+ BinOp("+", Var("a").into(), Var("b").into()).into(),
+ Var("c").into(),
+ )
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ a + b * c - d / e }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ BinOp(
+ "-",
+ BinOp(
+ "+",
+ Var("a").into(),
+ BinOp("*", Var("b").into(), Var("c").into()).into(),
+ )
+ .into(),
+ BinOp("/", Var("d").into(), Var("e").into()).into(),
+ )
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ a * (b + c) / -d }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ BinOp(
+ "/",
+ BinOp(
+ "*",
+ Var("a").into(),
+ Group(BinOp("+", Var("b").into(), Var("c").into()).into()).into()
+ )
+ .into(),
+ Unary("-", Var("d").into()).into()
+ )
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ a || b && c || d && e }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ BinOp(
+ "||",
+ BinOp(
+ "||",
+ Var("a").into(),
+ BinOp("&&", Var("b").into(), Var("c").into()).into(),
+ )
+ .into(),
+ BinOp("&&", Var("d").into(), Var("e").into()).into(),
+ )
+ )],
+ );
+}
+
+#[test]
+fn test_associativity() {
+ use Expr::*;
+ let syntax = Syntax::default();
+ assert_eq!(
+ super::parse("{{ a + b + c }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ BinOp(
+ "+",
+ BinOp("+", Var("a").into(), Var("b").into()).into(),
+ Var("c").into()
+ )
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ a * b * c }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ BinOp(
+ "*",
+ BinOp("*", Var("a").into(), Var("b").into()).into(),
+ Var("c").into()
+ )
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ a && b && c }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ BinOp(
+ "&&",
+ BinOp("&&", Var("a").into(), Var("b").into()).into(),
+ Var("c").into()
+ )
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ a + b - c + d }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ BinOp(
+ "+",
+ BinOp(
+ "-",
+ BinOp("+", Var("a").into(), Var("b").into()).into(),
+ Var("c").into()
+ )
+ .into(),
+ Var("d").into()
+ )
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ a == b != c > d > e == f }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ BinOp(
+ "==",
+ BinOp(
+ ">",
+ BinOp(
+ ">",
+ BinOp(
+ "!=",
+ BinOp("==", Var("a").into(), Var("b").into()).into(),
+ Var("c").into()
+ )
+ .into(),
+ Var("d").into()
+ )
+ .into(),
+ Var("e").into()
+ )
+ .into(),
+ Var("f").into()
+ )
+ )],
+ );
+}
+
+#[test]
+fn test_odd_calls() {
+ use Expr::*;
+ let syntax = Syntax::default();
+ assert_eq!(
+ super::parse("{{ a[b](c) }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ Call(
+ Box::new(Index(Box::new(Var("a")), Box::new(Var("b")))),
+ vec![Var("c")],
+ ),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ (a + b)(c) }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ Call(
+ Box::new(Group(Box::new(BinOp(
+ "+",
+ Box::new(Var("a")),
+ Box::new(Var("b"))
+ )))),
+ vec![Var("c")],
+ ),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ a + b(c) }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ BinOp(
+ "+",
+ Box::new(Var("a")),
+ Box::new(Call(Box::new(Var("b")), vec![Var("c")])),
+ ),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ (-a)(b) }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ Call(
+ Box::new(Group(Box::new(Unary("-", Box::new(Var("a")))))),
+ vec![Var("b")],
+ ),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ -a(b) }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ Unary("-", Box::new(Call(Box::new(Var("a")), vec![Var("b")])),),
+ )],
+ );
+}
+
+#[test]
+fn test_parse_comments() {
+ let s = &Syntax::default();
+
+ assert_eq!(
+ super::parse("{##}", s).unwrap(),
+ vec![Node::Comment(Ws(None, None))],
+ );
+ assert_eq!(
+ super::parse("{#- #}", s).unwrap(),
+ vec![Node::Comment(Ws(Some(Whitespace::Suppress), None))],
+ );
+ assert_eq!(
+ super::parse("{# -#}", s).unwrap(),
+ vec![Node::Comment(Ws(None, Some(Whitespace::Suppress)))],
+ );
+ assert_eq!(
+ super::parse("{#--#}", s).unwrap(),
+ vec![Node::Comment(Ws(
+ Some(Whitespace::Suppress),
+ Some(Whitespace::Suppress)
+ ))],
+ );
+ assert_eq!(
+ super::parse("{#- foo\n bar -#}", s).unwrap(),
+ vec![Node::Comment(Ws(
+ Some(Whitespace::Suppress),
+ Some(Whitespace::Suppress)
+ ))],
+ );
+ assert_eq!(
+ super::parse("{#- foo\n {#- bar\n -#} baz -#}", s).unwrap(),
+ vec![Node::Comment(Ws(
+ Some(Whitespace::Suppress),
+ Some(Whitespace::Suppress)
+ ))],
+ );
+ assert_eq!(
+ super::parse("{#+ #}", s).unwrap(),
+ vec![Node::Comment(Ws(Some(Whitespace::Preserve), None))],
+ );
+ assert_eq!(
+ super::parse("{# +#}", s).unwrap(),
+ vec![Node::Comment(Ws(None, Some(Whitespace::Preserve)))],
+ );
+ assert_eq!(
+ super::parse("{#++#}", s).unwrap(),
+ vec![Node::Comment(Ws(
+ Some(Whitespace::Preserve),
+ Some(Whitespace::Preserve)
+ ))],
+ );
+ assert_eq!(
+ super::parse("{#+ foo\n bar +#}", s).unwrap(),
+ vec![Node::Comment(Ws(
+ Some(Whitespace::Preserve),
+ Some(Whitespace::Preserve)
+ ))],
+ );
+ assert_eq!(
+ super::parse("{#+ foo\n {#+ bar\n +#} baz -+#}", s).unwrap(),
+ vec![Node::Comment(Ws(
+ Some(Whitespace::Preserve),
+ Some(Whitespace::Preserve)
+ ))],
+ );
+ assert_eq!(
+ super::parse("{#~ #}", s).unwrap(),
+ vec![Node::Comment(Ws(Some(Whitespace::Minimize), None))],
+ );
+ assert_eq!(
+ super::parse("{# ~#}", s).unwrap(),
+ vec![Node::Comment(Ws(None, Some(Whitespace::Minimize)))],
+ );
+ assert_eq!(
+ super::parse("{#~~#}", s).unwrap(),
+ vec![Node::Comment(Ws(
+ Some(Whitespace::Minimize),
+ Some(Whitespace::Minimize)
+ ))],
+ );
+ assert_eq!(
+ super::parse("{#~ foo\n bar ~#}", s).unwrap(),
+ vec![Node::Comment(Ws(
+ Some(Whitespace::Minimize),
+ Some(Whitespace::Minimize)
+ ))],
+ );
+ assert_eq!(
+ super::parse("{#~ foo\n {#~ bar\n ~#} baz -~#}", s).unwrap(),
+ vec![Node::Comment(Ws(
+ Some(Whitespace::Minimize),
+ Some(Whitespace::Minimize)
+ ))],
+ );
+
+ assert_eq!(
+ super::parse("{# foo {# bar #} {# {# baz #} qux #} #}", s).unwrap(),
+ vec![Node::Comment(Ws(None, None))],
+ );
+}
+
+#[test]
+fn test_parse_tuple() {
+ use super::Expr::*;
+ let syntax = Syntax::default();
+ assert_eq!(
+ super::parse("{{ () }}", &syntax).unwrap(),
+ vec![Node::Expr(Ws(None, None), Tuple(vec![]),)],
+ );
+ assert_eq!(
+ super::parse("{{ (1) }}", &syntax).unwrap(),
+ vec![Node::Expr(Ws(None, None), Group(Box::new(NumLit("1"))),)],
+ );
+ assert_eq!(
+ super::parse("{{ (1,) }}", &syntax).unwrap(),
+ vec![Node::Expr(Ws(None, None), Tuple(vec![NumLit("1")]),)],
+ );
+ assert_eq!(
+ super::parse("{{ (1, ) }}", &syntax).unwrap(),
+ vec![Node::Expr(Ws(None, None), Tuple(vec![NumLit("1")]),)],
+ );
+ assert_eq!(
+ super::parse("{{ (1 ,) }}", &syntax).unwrap(),
+ vec![Node::Expr(Ws(None, None), Tuple(vec![NumLit("1")]),)],
+ );
+ assert_eq!(
+ super::parse("{{ (1 , ) }}", &syntax).unwrap(),
+ vec![Node::Expr(Ws(None, None), Tuple(vec![NumLit("1")]),)],
+ );
+ assert_eq!(
+ super::parse("{{ (1, 2) }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ Tuple(vec![NumLit("1"), NumLit("2")]),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ (1, 2,) }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ Tuple(vec![NumLit("1"), NumLit("2")]),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ (1, 2, 3) }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ Tuple(vec![NumLit("1"), NumLit("2"), NumLit("3")]),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ ()|abs }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ Filter("abs", vec![Tuple(vec![])]),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ () | abs }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ BinOp("|", Box::new(Tuple(vec![])), Box::new(Var("abs"))),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ (1)|abs }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ Filter("abs", vec![Group(Box::new(NumLit("1")))]),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ (1) | abs }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ BinOp(
+ "|",
+ Box::new(Group(Box::new(NumLit("1")))),
+ Box::new(Var("abs"))
+ ),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ (1,)|abs }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ Filter("abs", vec![Tuple(vec![NumLit("1")])]),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ (1,) | abs }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ BinOp(
+ "|",
+ Box::new(Tuple(vec![NumLit("1")])),
+ Box::new(Var("abs"))
+ ),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ (1, 2)|abs }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ Filter("abs", vec![Tuple(vec![NumLit("1"), NumLit("2")])]),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ (1, 2) | abs }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(None, None),
+ BinOp(
+ "|",
+ Box::new(Tuple(vec![NumLit("1"), NumLit("2")])),
+ Box::new(Var("abs"))
+ ),
+ )],
+ );
+}
+
+#[test]
+fn test_missing_space_after_kw() {
+ let syntax = Syntax::default();
+ let err = super::parse("{%leta=b%}", &syntax).unwrap_err();
+ assert!(matches!(
+ &*err.msg,
+ "unable to parse template:\n\n\"{%leta=b%}\""
+ ));
+}
diff --git a/third_party/rust/rinja_derive/templates/a.html b/third_party/rust/askama_derive/templates/a.html
similarity index 100%
rename from third_party/rust/rinja_derive/templates/a.html
rename to third_party/rust/askama_derive/templates/a.html
diff --git a/third_party/rust/rinja_derive/templates/b.html b/third_party/rust/askama_derive/templates/b.html
similarity index 100%
rename from third_party/rust/rinja_derive/templates/b.html
rename to third_party/rust/askama_derive/templates/b.html
diff --git a/third_party/rust/rinja_derive/templates/sub/b.html b/third_party/rust/askama_derive/templates/sub/b.html
similarity index 100%
rename from third_party/rust/rinja_derive/templates/sub/b.html
rename to third_party/rust/askama_derive/templates/sub/b.html
diff --git a/third_party/rust/rinja_derive/templates/sub/c.html b/third_party/rust/askama_derive/templates/sub/c.html
similarity index 100%
rename from third_party/rust/rinja_derive/templates/sub/c.html
rename to third_party/rust/askama_derive/templates/sub/c.html
diff --git a/third_party/rust/rinja_derive/templates/sub/sub1/d.html b/third_party/rust/askama_derive/templates/sub/sub1/d.html
similarity index 100%
rename from third_party/rust/rinja_derive/templates/sub/sub1/d.html
rename to third_party/rust/askama_derive/templates/sub/sub1/d.html
diff --git a/third_party/rust/askama_escape/.cargo-checksum.json b/third_party/rust/askama_escape/.cargo-checksum.json
new file mode 100644
index 000000000000..b4c6a0e7257b
--- /dev/null
+++ b/third_party/rust/askama_escape/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.toml":"a140f9df40d83c3f3c39864df0e272bde3e210ad9d37cf90342c45f137c5b1aa","LICENSE-APACHE":"87cb0d734c723c083e51c825930ff42bce28596b52dee15567f6b28f19c195e3","LICENSE-MIT":"df20e0180764bf5bd76f74d47bc9e8c0069a666401629c390003a1d5eba99c92","README.md":"f1c057bd94aff0d98bcd7267655bb8af4c9c81a643423c5948f711e199945905","benches/all.rs":"0e0458780fa24e55402b11fdbc6ef2191b399459461a9f909a516363e824c838","src/lib.rs":"5f96ad55ac916b63ef051373994c08a0bfaa3b85a5bf031a579dc23163c47267"},"package":"619743e34b5ba4e9703bba34deac3427c72507c7159f5fd030aea8cac0cfe341"}
\ No newline at end of file
diff --git a/third_party/rust/askama_escape/Cargo.toml b/third_party/rust/askama_escape/Cargo.toml
new file mode 100644
index 000000000000..d4944ed6eabd
--- /dev/null
+++ b/third_party/rust/askama_escape/Cargo.toml
@@ -0,0 +1,33 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2018"
+name = "askama_escape"
+version = "0.10.3"
+description = "Optimized HTML escaping code, extracted from Askama"
+homepage = "https://github.com/djc/askama"
+documentation = "https://docs.rs/askama_escape"
+readme = "README.md"
+keywords = ["html", "escaping"]
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/djc/askama"
+
+[[bench]]
+name = "all"
+harness = false
+[dev-dependencies.criterion]
+version = "0.3"
+
+[features]
+json = []
+[badges.maintenance]
+status = "actively-developed"
diff --git a/third_party/rust/rinja_parser/LICENSE-APACHE b/third_party/rust/askama_escape/LICENSE-APACHE
similarity index 100%
rename from third_party/rust/rinja_parser/LICENSE-APACHE
rename to third_party/rust/askama_escape/LICENSE-APACHE
diff --git a/third_party/rust/rinja_parser/LICENSE-MIT b/third_party/rust/askama_escape/LICENSE-MIT
similarity index 100%
rename from third_party/rust/rinja_parser/LICENSE-MIT
rename to third_party/rust/askama_escape/LICENSE-MIT
diff --git a/third_party/rust/askama_escape/README.md b/third_party/rust/askama_escape/README.md
new file mode 100644
index 000000000000..8c6796d83bab
--- /dev/null
+++ b/third_party/rust/askama_escape/README.md
@@ -0,0 +1,9 @@
+# askama_escape: escaping utilities for the Askama templating engine
+
+[](https://docs.rs/askama_escape/)
+[](https://crates.io/crates/askama_escape)
+[](https://github.com/djc/askama/actions?query=workflow%3ACI)
+[](https://gitter.im/djc/askama)
+
+This crate contains helper code for HTML escaping used by the
+[Askama](https://github.com/djc/askama) templating engine.
diff --git a/third_party/rust/rinja/benches/strings.inc b/third_party/rust/askama_escape/benches/all.rs
similarity index 81%
rename from third_party/rust/rinja/benches/strings.inc
rename to third_party/rust/askama_escape/benches/all.rs
index 29a47a2586d7..e0ef11f1234f 100644
--- a/third_party/rust/rinja/benches/strings.inc
+++ b/third_party/rust/askama_escape/benches/all.rs
@@ -1,5 +1,18 @@
-{
- const STRING_LONG: &str = r"
+#[macro_use]
+extern crate criterion;
+
+use askama_escape::{Html, MarkupDisplay};
+use criterion::Criterion;
+
+criterion_main!(benches);
+criterion_group!(benches, functions);
+
+fn functions(c: &mut Criterion) {
+ c.bench_function("Escaping", escaping);
+}
+
+fn escaping(b: &mut criterion::Bencher<'_>) {
+ let string_long = r#"
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Mauris consequat tellus sit
amet ornare fermentum. Etiam nec erat ante. In at metus a orci mollis scelerisque.
Sed eget ultrices turpis, at sollicitudin erat. Integer hendrerit nec magna quis
@@ -36,15 +49,11 @@
suscipit leo, lacinia dignissim lacus. Sed eget volutpat mi. In eu bibendum neque. Pellentesque
finibus velit a fermentum rhoncus. Maecenas leo purus, eleifend eu lacus a, condimentum sagittis
justo.
-
";
-
- const STRING_SHORT: &str = "Lorem ipsum dolor sit amet,bar&foo\"bar\\foo/bar";
-
- const EMPTY: &str = "";
-
- const NO_ESCAPE: &str = "Lorem ipsum dolor sit amet,";
-
- const NO_ESCAPE_LONG: &str = r"
+"#;
+ let string_short = "Lorem ipsum dolor sit amet,bar&foo\"bar\\foo/bar";
+ let empty = "";
+ let no_escape = "Lorem ipsum dolor sit amet,";
+ let no_escape_long = r#"
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Proin scelerisque eu urna in aliquet.
Phasellus ac nulla a urna sagittis consequat id quis est. Nullam eu ex eget erat accumsan dictum
ac lobortis urna. Etiam fermentum ut quam at dignissim. Curabitur vestibulum luctus tellus, sit
@@ -56,14 +65,13 @@ lacus ipsum eget quam. Vivamus orci lorem, maximus ac mi eget, bibendum vulputat
vestibulum dui hendrerit, vestibulum lacus sit amet, posuere erat. Vivamus euismod massa diam,
vulputate euismod lectus vestibulum nec. Donec sit amet massa magna. Nunc ipsum nulla, euismod
quis lacus at, gravida maximus elit. Duis tristique, nisl nullam.
- ";
+ "#;
- const STRINGS: &[&str] = &[
- STRING_LONG,
- STRING_SHORT,
- EMPTY,
- NO_ESCAPE,
- NO_ESCAPE_LONG,
- ];
- STRINGS
+ b.iter(|| {
+ format!("{}", MarkupDisplay::new_unsafe(string_long, Html));
+ format!("{}", MarkupDisplay::new_unsafe(string_short, Html));
+ format!("{}", MarkupDisplay::new_unsafe(empty, Html));
+ format!("{}", MarkupDisplay::new_unsafe(no_escape, Html));
+ format!("{}", MarkupDisplay::new_unsafe(no_escape_long, Html));
+ });
}
diff --git a/third_party/rust/askama_escape/src/lib.rs b/third_party/rust/askama_escape/src/lib.rs
new file mode 100644
index 000000000000..178884308600
--- /dev/null
+++ b/third_party/rust/askama_escape/src/lib.rs
@@ -0,0 +1,239 @@
+#![cfg_attr(not(any(feature = "json", test)), no_std)]
+#![deny(elided_lifetimes_in_paths)]
+#![deny(unreachable_pub)]
+
+use core::fmt::{self, Display, Formatter, Write};
+use core::str;
+
+#[derive(Debug)]
+pub struct MarkupDisplay
+where
+ E: Escaper,
+ T: Display,
+{
+ value: DisplayValue,
+ escaper: E,
+}
+
+impl MarkupDisplay
+where
+ E: Escaper,
+ T: Display,
+{
+ pub fn new_unsafe(value: T, escaper: E) -> Self {
+ Self {
+ value: DisplayValue::Unsafe(value),
+ escaper,
+ }
+ }
+
+ pub fn new_safe(value: T, escaper: E) -> Self {
+ Self {
+ value: DisplayValue::Safe(value),
+ escaper,
+ }
+ }
+
+ #[must_use]
+ pub fn mark_safe(mut self) -> MarkupDisplay {
+ self.value = match self.value {
+ DisplayValue::Unsafe(t) => DisplayValue::Safe(t),
+ _ => self.value,
+ };
+ self
+ }
+}
+
+impl Display for MarkupDisplay
+where
+ E: Escaper,
+ T: Display,
+{
+ fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
+ match self.value {
+ DisplayValue::Unsafe(ref t) => write!(
+ EscapeWriter {
+ fmt,
+ escaper: &self.escaper
+ },
+ "{}",
+ t
+ ),
+ DisplayValue::Safe(ref t) => t.fmt(fmt),
+ }
+ }
+}
+
+#[derive(Debug)]
+pub struct EscapeWriter<'a, E, W> {
+ fmt: W,
+ escaper: &'a E,
+}
+
+impl Write for EscapeWriter<'_, E, W>
+where
+ W: Write,
+ E: Escaper,
+{
+ fn write_str(&mut self, s: &str) -> fmt::Result {
+ self.escaper.write_escaped(&mut self.fmt, s)
+ }
+}
+
+pub fn escape(string: &str, escaper: E) -> Escaped<'_, E>
+where
+ E: Escaper,
+{
+ Escaped { string, escaper }
+}
+
+#[derive(Debug)]
+pub struct Escaped<'a, E>
+where
+ E: Escaper,
+{
+ string: &'a str,
+ escaper: E,
+}
+
+impl Display for Escaped<'_, E>
+where
+ E: Escaper,
+{
+ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.escaper.write_escaped(fmt, self.string)
+ }
+}
+
+pub struct Html;
+
+macro_rules! escaping_body {
+ ($start:ident, $i:ident, $fmt:ident, $bytes:ident, $quote:expr) => {{
+ if $start < $i {
+ $fmt.write_str(unsafe { str::from_utf8_unchecked(&$bytes[$start..$i]) })?;
+ }
+ $fmt.write_str($quote)?;
+ $start = $i + 1;
+ }};
+}
+
+impl Escaper for Html {
+ fn write_escaped(&self, mut fmt: W, string: &str) -> fmt::Result
+ where
+ W: Write,
+ {
+ let bytes = string.as_bytes();
+ let mut start = 0;
+ for (i, b) in bytes.iter().enumerate() {
+ if b.wrapping_sub(b'"') <= FLAG {
+ match *b {
+ b'<' => escaping_body!(start, i, fmt, bytes, "<"),
+ b'>' => escaping_body!(start, i, fmt, bytes, ">"),
+ b'&' => escaping_body!(start, i, fmt, bytes, "&"),
+ b'"' => escaping_body!(start, i, fmt, bytes, """),
+ b'\'' => escaping_body!(start, i, fmt, bytes, "'"),
+ _ => (),
+ }
+ }
+ }
+ if start < bytes.len() {
+ fmt.write_str(unsafe { str::from_utf8_unchecked(&bytes[start..]) })
+ } else {
+ Ok(())
+ }
+ }
+}
+
+pub struct Text;
+
+impl Escaper for Text {
+ fn write_escaped(&self, mut fmt: W, string: &str) -> fmt::Result
+ where
+ W: Write,
+ {
+ fmt.write_str(string)
+ }
+}
+
+#[derive(Debug, PartialEq)]
+enum DisplayValue
+where
+ T: Display,
+{
+ Safe(T),
+ Unsafe(T),
+}
+
+pub trait Escaper {
+ fn write_escaped(&self, fmt: W, string: &str) -> fmt::Result
+ where
+ W: Write;
+}
+
+const FLAG: u8 = b'>' - b'"';
+
+/// Escape chevrons, ampersand and apostrophes for use in JSON
+#[cfg(feature = "json")]
+#[derive(Debug, Clone, Default)]
+pub struct JsonEscapeBuffer(Vec);
+
+#[cfg(feature = "json")]
+impl JsonEscapeBuffer {
+ pub fn new() -> Self {
+ Self(Vec::new())
+ }
+
+ pub fn finish(self) -> String {
+ unsafe { String::from_utf8_unchecked(self.0) }
+ }
+}
+
+#[cfg(feature = "json")]
+impl std::io::Write for JsonEscapeBuffer {
+ fn write(&mut self, bytes: &[u8]) -> std::io::Result {
+ macro_rules! push_esc_sequence {
+ ($start:ident, $i:ident, $self:ident, $bytes:ident, $quote:expr) => {{
+ if $start < $i {
+ $self.0.extend_from_slice(&$bytes[$start..$i]);
+ }
+ $self.0.extend_from_slice($quote);
+ $start = $i + 1;
+ }};
+ }
+
+ self.0.reserve(bytes.len());
+ let mut start = 0;
+ for (i, b) in bytes.iter().enumerate() {
+ match *b {
+ b'&' => push_esc_sequence!(start, i, self, bytes, br#"\u0026"#),
+ b'\'' => push_esc_sequence!(start, i, self, bytes, br#"\u0027"#),
+ b'<' => push_esc_sequence!(start, i, self, bytes, br#"\u003c"#),
+ b'>' => push_esc_sequence!(start, i, self, bytes, br#"\u003e"#),
+ _ => (),
+ }
+ }
+ if start < bytes.len() {
+ self.0.extend_from_slice(&bytes[start..]);
+ }
+ Ok(bytes.len())
+ }
+
+ fn flush(&mut self) -> std::io::Result<()> {
+ Ok(())
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use std::string::ToString;
+
+ #[test]
+ fn test_escape() {
+ assert_eq!(escape("", Html).to_string(), "");
+ assert_eq!(escape("<&>", Html).to_string(), "<&>");
+ assert_eq!(escape("bla&", Html).to_string(), "bla&");
+ assert_eq!(escape("",
"The Glean Team ",
@@ -29,7 +29,6 @@ include = [
"/uniffi.toml",
"/build.rs",
]
-autolib = false
autobins = false
autoexamples = false
autotests = false
@@ -41,17 +40,7 @@ license = "MPL-2.0"
repository = "https://github.com/mozilla/glean"
[package.metadata.glean]
-glean-parser = "17.0.1"
-
-[badges.circle-ci]
-branch = "main"
-repository = "mozilla/glean"
-
-[badges.maintenance]
-status = "actively-developed"
-
-[features]
-enable_env_logger = ["env_logger"]
+glean-parser = "16.1.0"
[lib]
name = "glean_core"
@@ -179,7 +168,7 @@ version = "1.0.4"
version = "0.1.40"
[dependencies.uniffi]
-version = "0.29.0"
+version = "0.28.0"
default-features = false
[dependencies.uuid]
@@ -204,10 +193,13 @@ version = "0.4"
version = "3.8.0"
[build-dependencies.uniffi]
-version = "0.29.0"
+version = "0.28.0"
features = ["build"]
default-features = false
+[features]
+enable_env_logger = ["env_logger"]
+
[target.'cfg(target_os = "android")'.dependencies.android_logger]
version = "0.12.0"
default-features = false
@@ -216,3 +208,10 @@ default-features = false
version = "0.1.0"
features = ["logger"]
default-features = false
+
+[badges.circle-ci]
+branch = "main"
+repository = "mozilla/glean"
+
+[badges.maintenance]
+status = "actively-developed"
diff --git a/third_party/rust/glean-core/src/core/mod.rs b/third_party/rust/glean-core/src/core/mod.rs
index 302f90d134e5..6700ea3fb55d 100644
--- a/third_party/rust/glean-core/src/core/mod.rs
+++ b/third_party/rust/glean-core/src/core/mod.rs
@@ -127,7 +127,7 @@ where
/// ping_lifetime_max_time: 2000,
/// };
/// let mut glean = Glean::new(cfg).unwrap();
-/// let ping = PingType::new("sample", true, false, true, true, true, vec![], vec![], true, vec![]);
+/// let ping = PingType::new("sample", true, false, true, true, true, vec![], vec![], true);
/// glean.register_ping_type(&ping);
///
/// let call_counter: CounterMetric = CounterMetric::new(CommonMetricData {
@@ -277,11 +277,13 @@ impl Glean {
// instantiate the core metrics.
glean.on_upload_enabled();
} else {
- // If upload is disabled, then clear the metrics
- // but do not send a deletion request ping.
- // If we have run before, and we have an old client_id,
- // do the full upload disabled operations to clear metrics
- // and send a deletion request ping.
+ // If upload is disabled, and we've never run before, only set the
+ // client_id to KNOWN_CLIENT_ID, but do not send a deletion request
+ // ping.
+ // If we have run before, and if the client_id is not equal to
+ // the KNOWN_CLIENT_ID, do the full upload disabled operations to
+ // clear metrics, set the client_id to KNOWN_CLIENT_ID, and send a
+ // deletion request ping.
match glean
.core_metrics
.client_id
@@ -289,17 +291,7 @@ impl Glean {
{
None => glean.clear_metrics(),
Some(uuid) => {
- if uuid == *KNOWN_CLIENT_ID {
- // Previously Glean kept the KNOWN_CLIENT_ID stored.
- // Let's ensure we erase it now.
- if let Some(data) = glean.data_store.as_ref() {
- _ = data.remove_single_metric(
- Lifetime::User,
- "glean_client_info",
- "client_id",
- );
- }
- } else {
+ if uuid != *KNOWN_CLIENT_ID {
// Temporarily enable uploading so we can submit a
// deletion request ping.
glean.upload_enabled = true;
@@ -588,6 +580,14 @@ impl Glean {
// so that it can't be accessed until this function is done.
let _lock = self.upload_manager.clear_ping_queue();
+ // There is only one metric that we want to survive after clearing all
+ // metrics: first_run_date. Here, we store its value so we can restore
+ // it after clearing the metrics.
+ let existing_first_run_date = self
+ .core_metrics
+ .first_run_date
+ .get_value(self, "glean_client_info");
+
// Clear any pending pings that follow `collection_enabled`.
let ping_maker = PingMaker::new();
let disabled_pings = self
@@ -605,7 +605,8 @@ impl Glean {
// the effect of resetting those to their initial values.
if let Some(data) = self.data_store.as_ref() {
_ = data.clear_lifetime_storage(Lifetime::User, "glean_internal_info");
- _ = data.remove_single_metric(Lifetime::User, "glean_client_info", "client_id");
+ _ = data.clear_lifetime_storage(Lifetime::User, "glean_client_info");
+ _ = data.clear_lifetime_storage(Lifetime::Application, "glean_client_info");
for (ping_name, ping) in &self.ping_registry {
if ping.follows_collection_enabled() {
_ = data.clear_ping_lifetime_storage(ping_name);
@@ -622,6 +623,32 @@ impl Glean {
// StorageEngineManager), since doing so would mean we would have to have the
// application tell us again which experiments are active if telemetry is
// re-enabled.
+
+ {
+ // We need to briefly set upload_enabled to true here so that `set`
+ // is not a no-op. This is safe, since nothing on the Rust side can
+ // run concurrently to this since we hold a mutable reference to the
+ // Glean object. Additionally, the pending pings have been cleared
+ // from disk, so the PingUploader can't wake up and start sending
+ // pings.
+ self.upload_enabled = true;
+
+ // Store a "dummy" KNOWN_CLIENT_ID in the client_id metric. This will
+ // make it easier to detect if pings were unintentionally sent after
+ // uploading is disabled.
+ self.core_metrics
+ .client_id
+ .set_from_uuid_sync(self, *KNOWN_CLIENT_ID);
+
+ // Restore the first_run_date.
+ if let Some(existing_first_run_date) = existing_first_run_date {
+ self.core_metrics
+ .first_run_date
+ .set_sync_chrono(self, existing_first_run_date);
+ }
+
+ self.upload_enabled = false;
+ }
}
/// Gets the application ID as specified on instantiation.
diff --git a/third_party/rust/glean-core/src/database/mod.rs b/third_party/rust/glean-core/src/database/mod.rs
index d12d0305f641..74f03125bba8 100644
--- a/third_party/rust/glean-core/src/database/mod.rs
+++ b/third_party/rust/glean-core/src/database/mod.rs
@@ -826,6 +826,7 @@ impl Database {
data: &BTreeMap,
) -> Result<()> {
if self.ping_lifetime_threshold == 0 && self.ping_lifetime_max_time.is_zero() {
+ log::trace!("Auto-flush disabled.");
return Ok(());
}
diff --git a/third_party/rust/glean-core/src/glean.udl b/third_party/rust/glean-core/src/glean.udl
index d8f3aa179052..ec75cf0109f7 100644
--- a/third_party/rust/glean-core/src/glean.udl
+++ b/third_party/rust/glean-core/src/glean.udl
@@ -108,7 +108,6 @@ dictionary PingRateLimit {
};
// An enum representing the different logging levels for the `log` crate.
-[Remote]
enum LevelFilter {
"Off",
"Error",
@@ -215,8 +214,6 @@ dictionary PingRequest {
boolean body_has_info_sections;
// The ping's name. Likely also somewhere in `path`.
string ping_name;
- // The capabilities required during this ping's upload.
- sequence uploader_capabilities;
};
// An enum representing the possible upload tasks to be performed by an uploader.
@@ -226,7 +223,6 @@ interface PingUploadTask {
//
// * request: the ping request for upload
Upload(PingRequest request);
-
// A flag signaling that the pending pings directories are not done being processed,
// thus the requester should wait and come back later.
//
@@ -258,15 +254,6 @@ interface UploadResult {
// * unused: _ignored_.
UnrecoverableFailure(i8 unused);
- // The uploader is not capable of uploading this request due to lack of or
- // mismatched capabilities.
- //
- // e.g. The ping requires upload over OHTTP,
- // but the uploader doesn't support OHTTP.
- //
- // * unused: _ignored_.
- Incapable(i8 unused);
-
// A HTTP response code.
//
// This can still indicate an error, depending on the status code.
@@ -322,8 +309,7 @@ interface PingType {
boolean enabled,
sequence schedules_pings,
sequence reason_codes,
- boolean follows_collection_enabled,
- sequence uploader_capabilities
+ boolean follows_collection_enabled
);
void submit(optional string? reason = null);
diff --git a/third_party/rust/glean-core/src/internal_metrics.rs b/third_party/rust/glean-core/src/internal_metrics.rs
index 7c0f697eed6c..fa5a3baca45a 100644
--- a/third_party/rust/glean-core/src/internal_metrics.rs
+++ b/third_party/rust/glean-core/src/internal_metrics.rs
@@ -172,7 +172,6 @@ impl UploadMetrics {
Cow::from("status_code_unknown"),
Cow::from("unrecoverable"),
Cow::from("recoverable"),
- Cow::from("incapable"),
]),
),
diff --git a/third_party/rust/glean-core/src/internal_pings.rs b/third_party/rust/glean-core/src/internal_pings.rs
index 89b4b94fb10d..9b03fb125ae8 100644
--- a/third_party/rust/glean-core/src/internal_pings.rs
+++ b/third_party/rust/glean-core/src/internal_pings.rs
@@ -35,7 +35,6 @@ impl InternalPings {
"inactive".to_string(),
],
true,
- vec![],
),
metrics: PingType::new(
"metrics",
@@ -53,7 +52,6 @@ impl InternalPings {
"upgrade".to_string(),
],
true,
- vec![],
),
events: PingType::new(
"events",
@@ -69,7 +67,6 @@ impl InternalPings {
"max_capacity".to_string(),
],
true,
- vec![],
),
deletion_request: PingType::new(
"deletion-request",
@@ -81,7 +78,6 @@ impl InternalPings {
vec![],
vec!["at_init".to_string(), "set_upload_enabled".to_string()],
true,
- vec![],
),
}
}
diff --git a/third_party/rust/glean-core/src/lib.rs b/third_party/rust/glean-core/src/lib.rs
index aac594e20992..dc615c27ad92 100644
--- a/third_party/rust/glean-core/src/lib.rs
+++ b/third_party/rust/glean-core/src/lib.rs
@@ -2,7 +2,6 @@
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at https://mozilla.org/MPL/2.0/.
-#![allow(clippy::doc_overindented_list_items)]
#![allow(clippy::significant_drop_in_scrutinee)]
#![allow(clippy::uninlined_format_args)]
#![deny(rustdoc::broken_intra_doc_links)]
@@ -412,7 +411,7 @@ fn initialize_inner(
// The debug view tag might have been set before initialize,
// get the cached value and set it.
let debug_tag = PRE_INIT_DEBUG_VIEW_TAG.lock().unwrap();
- if !debug_tag.is_empty() {
+ if debug_tag.len() > 0 {
glean.set_debug_view_tag(&debug_tag);
}
@@ -426,7 +425,7 @@ fn initialize_inner(
// The source tags might have been set before initialize,
// get the cached value and set them.
let source_tags = PRE_INIT_SOURCE_TAGS.lock().unwrap();
- if !source_tags.is_empty() {
+ if source_tags.len() > 0 {
glean.set_source_tags(source_tags.to_vec());
}
@@ -1307,19 +1306,31 @@ mod ffi {
type CowString = Cow<'static, str>;
- uniffi::custom_type!(CowString, String, {
- remote,
- lower: |s| s.into_owned(),
- try_lift: |s| Ok(Cow::from(s))
- });
+ impl UniffiCustomTypeConverter for CowString {
+ type Builtin = String;
+
+ fn into_custom(val: Self::Builtin) -> uniffi::Result {
+ Ok(Cow::from(val))
+ }
+
+ fn from_custom(obj: Self) -> Self::Builtin {
+ obj.into_owned()
+ }
+ }
type JsonValue = serde_json::Value;
- uniffi::custom_type!(JsonValue, String, {
- remote,
- lower: |s| serde_json::to_string(&s).unwrap(),
- try_lift: |s| Ok(serde_json::from_str(&s)?)
- });
+ impl UniffiCustomTypeConverter for JsonValue {
+ type Builtin = String;
+
+ fn into_custom(val: Self::Builtin) -> uniffi::Result {
+ Ok(serde_json::from_str(&val)?)
+ }
+
+ fn from_custom(obj: Self) -> Self::Builtin {
+ serde_json::to_string(&obj).unwrap()
+ }
+ }
}
pub use ffi::*;
diff --git a/third_party/rust/glean-core/src/lib_unit_tests.rs b/third_party/rust/glean-core/src/lib_unit_tests.rs
index 005cf92b4165..39a0ed243488 100644
--- a/third_party/rust/glean-core/src/lib_unit_tests.rs
+++ b/third_party/rust/glean-core/src/lib_unit_tests.rs
@@ -35,7 +35,6 @@ pub fn new_glean(tempdir: Option) -> (Glean, tempfile::TempDi
vec![],
vec![],
true,
- vec![],
);
glean.register_ping_type(&ping);
let ping = PingType::new_internal(
@@ -48,7 +47,6 @@ pub fn new_glean(tempdir: Option) -> (Glean, tempfile::TempDi
vec![],
vec![],
true,
- vec![],
);
glean.register_ping_type(&ping);
(glean, dir)
@@ -350,11 +348,12 @@ fn client_id_is_managed_correctly_when_toggling_uploading() {
glean.set_upload_enabled(false);
assert_eq!(
- None,
+ *KNOWN_CLIENT_ID,
glean
.core_metrics
.client_id
.get_value(&glean, "glean_client_info")
+ .unwrap()
);
glean.set_upload_enabled(true);
@@ -368,17 +367,18 @@ fn client_id_is_managed_correctly_when_toggling_uploading() {
}
#[test]
-fn client_id_is_not_set_when_uploading_disabled_at_start() {
+fn client_id_is_set_to_known_value_when_uploading_disabled_at_start() {
let dir = tempfile::tempdir().unwrap();
let tmpname = dir.path().display().to_string();
let glean = Glean::with_options(&tmpname, GLOBAL_APPLICATION_ID, false, true);
assert_eq!(
- None,
+ *KNOWN_CLIENT_ID,
glean
.core_metrics
.client_id
.get_value(&glean, "glean_client_info")
+ .unwrap()
);
}
@@ -1218,7 +1218,6 @@ fn disabled_pings_are_not_submitted() {
vec![],
vec![],
true,
- vec![],
);
glean.register_ping_type(&ping);
@@ -1272,7 +1271,6 @@ fn pings_are_controllable_from_remote_settings_config() {
vec![],
vec![],
true,
- vec![],
);
glean.register_ping_type(&disabled_ping);
let enabled_ping = PingType::new(
@@ -1285,7 +1283,6 @@ fn pings_are_controllable_from_remote_settings_config() {
vec![],
vec![],
true,
- vec![],
);
glean.register_ping_type(&enabled_ping);
diff --git a/third_party/rust/glean-core/src/metrics/ping.rs b/third_party/rust/glean-core/src/metrics/ping.rs
index 1b19d9d41d73..dcc8de9097cd 100644
--- a/third_party/rust/glean-core/src/metrics/ping.rs
+++ b/third_party/rust/glean-core/src/metrics/ping.rs
@@ -40,9 +40,6 @@ struct InnerPing {
/// True when it follows the `collection_enabled` flag (aka `upload_enabled`) flag.
/// Otherwise it needs to be enabled through `enabled_pings`.
follows_collection_enabled: AtomicBool,
-
- /// Ordered list of uploader capabilities required to upload this ping.
- uploader_capabilities: Vec,
}
impl fmt::Debug for PingType {
@@ -60,7 +57,6 @@ impl fmt::Debug for PingType {
"follows_collection_enabled",
&self.0.follows_collection_enabled.load(Ordering::Relaxed),
)
- .field("uploader_capabilities", &self.0.uploader_capabilities)
.finish()
}
}
@@ -83,7 +79,6 @@ impl PingType {
/// * `enabled` - Whether or not this ping is enabled. Note: Data that would be sent on a disabled
/// ping will still be collected but is discarded rather than being submitted.
/// * `reason_codes` - The valid reason codes for this ping.
- /// * `uploader_capabilities` - The ordered list of capabilities this ping requires to be uploaded with.
#[allow(clippy::too_many_arguments)]
pub fn new>(
name: A,
@@ -95,7 +90,6 @@ impl PingType {
schedules_pings: Vec,
reason_codes: Vec,
follows_collection_enabled: bool,
- uploader_capabilities: Vec,
) -> Self {
Self::new_internal(
name,
@@ -107,7 +101,6 @@ impl PingType {
schedules_pings,
reason_codes,
follows_collection_enabled,
- uploader_capabilities,
)
}
@@ -122,7 +115,6 @@ impl PingType {
schedules_pings: Vec,
reason_codes: Vec,
follows_collection_enabled: bool,
- uploader_capabilities: Vec,
) -> Self {
let this = Self(Arc::new(InnerPing {
name: name.into(),
@@ -134,7 +126,6 @@ impl PingType {
schedules_pings,
reason_codes,
follows_collection_enabled: AtomicBool::new(follows_collection_enabled),
- uploader_capabilities,
}));
// Register this ping.
@@ -231,11 +222,6 @@ impl PingType {
&self.0.reason_codes
}
- /// The capabilities this ping requires to be uploaded under.
- pub fn uploader_capabilities(&self) -> &[String] {
- &self.0.uploader_capabilities
- }
-
/// Submits the ping for eventual uploading.
///
/// The ping content is assembled as soon as possible, but upload is not
@@ -357,7 +343,6 @@ impl PingType {
headers: Some(ping.headers),
body_has_info_sections: self.0.include_info_sections,
ping_name: self.0.name.to_string(),
- uploader_capabilities: self.0.uploader_capabilities.clone(),
};
glean.upload_manager.enqueue_ping(glean, ping);
diff --git a/third_party/rust/glean-core/src/ping/mod.rs b/third_party/rust/glean-core/src/ping/mod.rs
index 089c38d8caf0..da80fec90158 100644
--- a/third_party/rust/glean-core/src/ping/mod.rs
+++ b/third_party/rust/glean-core/src/ping/mod.rs
@@ -34,8 +34,6 @@ pub struct Ping<'a> {
pub includes_info_sections: bool,
/// Other pings that should be scheduled when this ping is sent.
pub schedules_pings: Vec,
- /// Capabilities the uploader must have in order to uplaoad this ping.
- pub uploader_capabilities: Vec,
}
/// Collect a ping's data, assemble it into its full payload and store it on disk.
@@ -336,7 +334,6 @@ impl PingMaker {
headers: self.get_headers(glean),
includes_info_sections: ping.include_info_sections(),
schedules_pings: ping.schedules_pings().to_vec(),
- uploader_capabilities: ping.uploader_capabilities().to_vec(),
})
}
@@ -395,7 +392,6 @@ impl PingMaker {
headers: Some(ping.headers.clone()),
body_has_info_sections: Some(ping.includes_info_sections),
ping_name: Some(ping.name.to_string()),
- uploader_capabilities: Some(ping.uploader_capabilities.clone()),
};
file.write_all(::serde_json::to_string(&metadata)?.as_bytes())?;
}
diff --git a/third_party/rust/glean-core/src/upload/directory.rs b/third_party/rust/glean-core/src/upload/directory.rs
index b11561a41979..fb90675be3c8 100644
--- a/third_party/rust/glean-core/src/upload/directory.rs
+++ b/third_party/rust/glean-core/src/upload/directory.rs
@@ -30,8 +30,6 @@ pub struct PingPayload {
pub body_has_info_sections: bool,
/// The ping's name. (Also likely in the upload_path.)
pub ping_name: String,
- /// The capabilities this ping must be uploaded under.
- pub uploader_capabilities: Vec,
}
/// A struct to hold the result of scanning all pings directories.
@@ -88,8 +86,6 @@ pub struct PingMetadata {
pub body_has_info_sections: Option,
/// The name of the ping.
pub ping_name: Option,
- /// The capabilities this ping must be uploaded under.
- pub uploader_capabilities: Option>,
}
/// Processes a ping's metadata.
@@ -200,7 +196,6 @@ impl PingDirectoryManager {
headers,
body_has_info_sections,
ping_name,
- uploader_capabilities,
} = metadata
.and_then(|m| process_metadata(&path, &m))
.unwrap_or_default();
@@ -213,7 +208,6 @@ impl PingDirectoryManager {
headers,
body_has_info_sections: body_has_info_sections.unwrap_or(true),
ping_name,
- uploader_capabilities: uploader_capabilities.unwrap_or_default(),
});
} else {
log::warn!(
@@ -343,18 +337,7 @@ mod test {
let (mut glean, dir) = new_glean(None);
// Register a ping for testing
- let ping_type = PingType::new(
- "test",
- true,
- true,
- true,
- true,
- true,
- vec![],
- vec![],
- true,
- vec![],
- );
+ let ping_type = PingType::new("test", true, true, true, true, true, vec![], vec![], true);
glean.register_ping_type(&ping_type);
// Submit the ping to populate the pending_pings directory
@@ -381,18 +364,7 @@ mod test {
let (mut glean, dir) = new_glean(None);
// Register a ping for testing
- let ping_type = PingType::new(
- "test",
- true,
- true,
- true,
- true,
- true,
- vec![],
- vec![],
- true,
- vec![],
- );
+ let ping_type = PingType::new("test", true, true, true, true, true, vec![], vec![], true);
glean.register_ping_type(&ping_type);
// Submit the ping to populate the pending_pings directory
@@ -428,18 +400,7 @@ mod test {
let (mut glean, dir) = new_glean(None);
// Register a ping for testing
- let ping_type = PingType::new(
- "test",
- true,
- true,
- true,
- true,
- true,
- vec![],
- vec![],
- true,
- vec![],
- );
+ let ping_type = PingType::new("test", true, true, true, true, true, vec![], vec![], true);
glean.register_ping_type(&ping_type);
// Submit the ping to populate the pending_pings directory
diff --git a/third_party/rust/glean-core/src/upload/mod.rs b/third_party/rust/glean-core/src/upload/mod.rs
index 4769aef8286b..87d637c06efe 100644
--- a/third_party/rust/glean-core/src/upload/mod.rs
+++ b/third_party/rust/glean-core/src/upload/mod.rs
@@ -331,7 +331,6 @@ impl PingUploadManager {
headers,
body_has_info_sections,
ping_name,
- uploader_capabilities,
} = ping;
let mut request = PingRequest::builder(
&self.language_binding_name,
@@ -341,8 +340,7 @@ impl PingUploadManager {
.path(path)
.body(body)
.body_has_info_sections(body_has_info_sections)
- .ping_name(ping_name)
- .uploader_capabilities(uploader_capabilities);
+ .ping_name(ping_name);
if let Some(headers) = headers {
request = request.headers(headers);
@@ -744,7 +742,7 @@ impl PingUploadManager {
self.directory_manager.delete_file(document_id);
}
- UnrecoverableFailure { .. } | HttpStatus { code: 400..=499 } | Incapable { .. } => {
+ UnrecoverableFailure { .. } | HttpStatus { code: 400..=499 } => {
log::warn!(
"Unrecoverable upload failure while attempting to send ping {}. Error was {:?}",
document_id,
@@ -891,7 +889,6 @@ mod test {
headers: None,
body_has_info_sections: true,
ping_name: "ping-name".into(),
- uploader_capabilities: vec![],
},
);
@@ -919,7 +916,6 @@ mod test {
headers: None,
body_has_info_sections: true,
ping_name: "ping-name".into(),
- uploader_capabilities: vec![],
},
);
}
@@ -958,7 +954,6 @@ mod test {
headers: None,
body_has_info_sections: true,
ping_name: "ping-name".into(),
- uploader_capabilities: vec![],
},
);
}
@@ -979,7 +974,6 @@ mod test {
headers: None,
body_has_info_sections: true,
ping_name: "ping-name".into(),
- uploader_capabilities: vec![],
},
);
@@ -1013,7 +1007,6 @@ mod test {
headers: None,
body_has_info_sections: true,
ping_name: "ping-name".into(),
- uploader_capabilities: vec![],
},
);
}
@@ -1043,7 +1036,6 @@ mod test {
vec![],
vec![],
true,
- vec![],
);
glean.register_ping_type(&ping_type);
@@ -1086,7 +1078,6 @@ mod test {
vec![],
vec![],
true,
- vec![],
);
glean.register_ping_type(&ping_type);
@@ -1127,7 +1118,6 @@ mod test {
vec![],
vec![],
true,
- vec![],
);
glean.register_ping_type(&ping_type);
@@ -1168,7 +1158,6 @@ mod test {
vec![],
vec![],
true,
- vec![],
);
glean.register_ping_type(&ping_type);
@@ -1209,7 +1198,6 @@ mod test {
vec![],
vec![],
true,
- vec![],
);
glean.register_ping_type(&ping_type);
@@ -1252,7 +1240,6 @@ mod test {
vec![],
vec![],
true,
- vec![],
);
glean.register_ping_type(&ping_type);
@@ -1303,7 +1290,6 @@ mod test {
headers: None,
body_has_info_sections: true,
ping_name: "test-ping".into(),
- uploader_capabilities: vec![],
},
);
@@ -1324,7 +1310,6 @@ mod test {
headers: None,
body_has_info_sections: true,
ping_name: "test-ping".into(),
- uploader_capabilities: vec![],
},
);
@@ -1373,7 +1358,6 @@ mod test {
vec![],
vec![],
true,
- vec![],
);
glean.register_ping_type(&ping_type);
@@ -1410,7 +1394,6 @@ mod test {
headers: None,
body_has_info_sections: true,
ping_name: "test-ping".into(),
- uploader_capabilities: vec![],
},
);
upload_manager.enqueue_ping(
@@ -1422,7 +1405,6 @@ mod test {
headers: None,
body_has_info_sections: true,
ping_name: "test-ping".into(),
- uploader_capabilities: vec![],
},
);
@@ -1452,7 +1434,6 @@ mod test {
vec![],
vec![],
true,
- vec![],
);
glean.register_ping_type(&ping_type);
@@ -1513,7 +1494,6 @@ mod test {
vec![],
vec![],
true,
- vec![],
);
glean.register_ping_type(&ping_type);
@@ -1595,7 +1575,6 @@ mod test {
vec![],
vec![],
true,
- vec![],
);
glean.register_ping_type(&ping_type);
@@ -1678,7 +1657,6 @@ mod test {
vec![],
vec![],
true,
- vec![],
);
glean.register_ping_type(&ping_type);
@@ -1763,7 +1741,6 @@ mod test {
vec![],
vec![],
true,
- vec![],
);
glean.register_ping_type(&ping_type);
@@ -1864,7 +1841,6 @@ mod test {
headers: None,
body_has_info_sections: true,
ping_name: "ping-name".into(),
- uploader_capabilities: vec![],
},
);
upload_manager.enqueue_ping(
@@ -1876,7 +1852,6 @@ mod test {
headers: None,
body_has_info_sections: true,
ping_name: "ping-name".into(),
- uploader_capabilities: vec![],
},
);
@@ -1942,7 +1917,6 @@ mod test {
headers: None,
body_has_info_sections: true,
ping_name: "ping-name".into(),
- uploader_capabilities: vec![],
};
upload_manager.enqueue_ping(&glean, ping);
assert!(upload_manager.get_upload_task(&glean, false).is_upload());
@@ -1955,7 +1929,6 @@ mod test {
headers: None,
body_has_info_sections: true,
ping_name: "ping-name".into(),
- uploader_capabilities: vec![],
};
upload_manager.enqueue_ping(&glean, ping);
diff --git a/third_party/rust/glean-core/src/upload/request.rs b/third_party/rust/glean-core/src/upload/request.rs
index d0bc10d8d7a1..6f3b0c0e5c8a 100644
--- a/third_party/rust/glean-core/src/upload/request.rs
+++ b/third_party/rust/glean-core/src/upload/request.rs
@@ -64,7 +64,6 @@ pub struct Builder {
body_max_size: usize,
body_has_info_sections: Option,
ping_name: Option,
- uploader_capabilities: Option>,
}
impl Builder {
@@ -92,7 +91,6 @@ impl Builder {
body_max_size,
body_has_info_sections: None,
ping_name: None,
- uploader_capabilities: None,
}
}
@@ -168,12 +166,6 @@ impl Builder {
self
}
- /// Sets the required uploader capabilities.
- pub fn uploader_capabilities(mut self, uploader_capabilities: Vec) -> Self {
- self.uploader_capabilities = Some(uploader_capabilities);
- self
- }
-
/// Consumes the builder and create a PingRequest.
///
/// # Panics
@@ -204,9 +196,6 @@ impl Builder {
ping_name: self
.ping_name
.expect("ping_name must be set before attempting to build PingRequest"),
- uploader_capabilities: self
- .uploader_capabilities
- .expect("uploader_capabilities must be set before attempting to build PingRequest"),
})
}
}
@@ -229,8 +218,6 @@ pub struct PingRequest {
pub body_has_info_sections: bool,
/// The ping's name. Likely also somewhere in `path`.
pub ping_name: String,
- /// The capabilities required during this ping's upload.
- pub uploader_capabilities: Vec,
}
impl PingRequest {
@@ -293,7 +280,6 @@ mod test {
.body("{}")
.body_has_info_sections(false)
.ping_name("whatevs")
- .uploader_capabilities(vec![])
.build()
.unwrap();
diff --git a/third_party/rust/glean-core/src/upload/result.rs b/third_party/rust/glean-core/src/upload/result.rs
index c4d0dd494964..3097af9d643a 100644
--- a/third_party/rust/glean-core/src/upload/result.rs
+++ b/third_party/rust/glean-core/src/upload/result.rs
@@ -25,16 +25,6 @@ pub enum UploadResult {
unused: i8,
},
- /// The uploader is not capable of uploading this request due to lack of or
- /// mismatched capabilities.
- ///
- /// e.g. The ping requires upload over OHTTP, but the uploader doesn't support OHTTP.
- Incapable {
- #[doc(hidden)]
- /// Unused field. Required because UniFFI can't handle variants without fields.
- unused: i8,
- },
-
/// A HTTP response code.
///
/// This can still indicate an error, depending on the status code.
@@ -65,7 +55,6 @@ impl UploadResult {
UploadResult::HttpStatus { .. } => Some("status_code_unknown"),
UploadResult::UnrecoverableFailure { .. } => Some("unrecoverable"),
UploadResult::RecoverableFailure { .. } => Some("recoverable"),
- UploadResult::Incapable { .. } => Some("incapable"),
UploadResult::Done { .. } => None,
}
}
@@ -86,14 +75,6 @@ impl UploadResult {
Self::UnrecoverableFailure { unused: 0 }
}
- /// The uploader is not capable of uploading this request due to lack of or
- /// mismatched capabilities.
- ///
- /// e.g. The ping requires upload over OHTTP, but the uploader doesn't support OHTTP.
- pub fn incapable() -> Self {
- Self::Incapable { unused: 0 }
- }
-
/// A HTTP response code.
///
/// This can still indicate an error, depending on the status code.
diff --git a/third_party/rust/glean-core/tests/collection_enabled.rs b/third_party/rust/glean-core/tests/collection_enabled.rs
index 130f511d389e..4e07a5e7d929 100644
--- a/third_party/rust/glean-core/tests/collection_enabled.rs
+++ b/third_party/rust/glean-core/tests/collection_enabled.rs
@@ -12,18 +12,33 @@ use glean_core::Lifetime;
fn nofollows_ping(glean: &mut Glean) -> PingType {
// When `follows_collection_enabled=false` then by default `enabled=false`
- let ping = PingBuilder::new("nofollows")
- .with_send_if_empty(true)
- .with_include_info_sections(false)
- .with_enabled(false)
- .with_follows_collection_enabled(false)
- .build();
+ let ping = PingType::new(
+ "nofollows",
+ /* include_client_id */ false,
+ /* send_if_empty */ true,
+ /* precise_timestamps */ true,
+ /* include_info_sections */ false,
+ /* enabled */ false,
+ vec![],
+ vec![],
+ /* follows_collection_enabled */ false,
+ );
glean.register_ping_type(&ping);
ping
}
fn manual_ping(glean: &mut Glean) -> PingType {
- let ping = PingBuilder::new("manual").build();
+ let ping = PingType::new(
+ "manual",
+ /* include_client_id */ true,
+ /* send_if_empty */ false,
+ /* precise_timestamps */ true,
+ /* include_info_sections */ true,
+ /* enabled */ true,
+ vec![],
+ vec![],
+ /* collection_enabled */ true,
+ );
glean.register_ping_type(&ping);
ping
}
@@ -89,9 +104,17 @@ fn nofollows_ping_can_ride_along() {
let nofollows_ping = nofollows_ping(&mut glean);
// Basically `manual_ping` but with a ride-along
- let manual_ping = PingBuilder::new("manual")
- .with_schedules_pings(vec!["nofollows".to_string()])
- .build();
+ let manual_ping = PingType::new(
+ "manual",
+ /* include_client_id */ true,
+ /* send_if_empty */ false,
+ /* precise_timestamps */ true,
+ /* include_info_sections */ true,
+ /* enabled */ true,
+ vec!["nofollows".to_string()],
+ vec![],
+ /* collection_enabled */ true,
+ );
glean.register_ping_type(&manual_ping);
// We need to store a metric as an empty ping is not stored.
diff --git a/third_party/rust/glean-core/tests/common/mod.rs b/third_party/rust/glean-core/tests/common/mod.rs
index 3615ae0cf6b2..5d11f4eeabc7 100644
--- a/third_party/rust/glean-core/tests/common/mod.rs
+++ b/third_party/rust/glean-core/tests/common/mod.rs
@@ -78,86 +78,11 @@ pub fn new_glean(tempdir: Option) -> (Glean, tempfile::TempDi
}
pub fn new_test_ping(glean: &mut Glean, name: &str) -> PingType {
- let ping = PingBuilder::new(name).build();
+ let ping = PingType::new(name, true, false, true, true, true, vec![], vec![], true);
glean.register_ping_type(&ping);
ping
}
-pub struct PingBuilder {
- name: String,
- include_client_id: bool,
- send_if_empty: bool,
- precise_timestamps: bool,
- include_info_sections: bool,
- enabled: bool,
- schedules_pings: Vec,
- reason_codes: Vec,
- follows_collection_enabled: bool,
- uploader_capabilities: Vec,
-}
-
-impl PingBuilder {
- pub fn new(name: &str) -> Self {
- Self {
- name: name.to_string(),
- include_client_id: true,
- send_if_empty: false,
- precise_timestamps: true,
- include_info_sections: true,
- enabled: true,
- schedules_pings: vec![],
- reason_codes: vec![],
- follows_collection_enabled: true,
- uploader_capabilities: vec![],
- }
- }
-
- pub fn build(self) -> PingType {
- PingType::new(
- self.name,
- self.include_client_id,
- self.send_if_empty,
- self.precise_timestamps,
- self.include_info_sections,
- self.enabled,
- self.schedules_pings,
- self.reason_codes,
- self.follows_collection_enabled,
- self.uploader_capabilities,
- )
- }
-
- pub fn with_send_if_empty(mut self, value: bool) -> Self {
- self.send_if_empty = value;
- self
- }
-
- pub fn with_include_info_sections(mut self, value: bool) -> Self {
- self.include_info_sections = value;
- self
- }
-
- pub fn with_enabled(mut self, value: bool) -> Self {
- self.enabled = value;
- self
- }
-
- pub fn with_follows_collection_enabled(mut self, value: bool) -> Self {
- self.follows_collection_enabled = value;
- self
- }
-
- pub fn with_schedules_pings(mut self, value: Vec) -> Self {
- self.schedules_pings = value;
- self
- }
-
- pub fn with_reasons(mut self, value: Vec) -> Self {
- self.reason_codes = value;
- self
- }
-}
-
/// Converts an iso8601::DateTime to a chrono::DateTime
pub fn iso8601_to_chrono(datetime: &iso8601::DateTime) -> chrono::DateTime {
if let YMD { year, month, day } = datetime.date {
diff --git a/third_party/rust/glean-core/tests/event.rs b/third_party/rust/glean-core/tests/event.rs
index 31b2d5104a76..5ea9ab615d7f 100644
--- a/third_party/rust/glean-core/tests/event.rs
+++ b/third_party/rust/glean-core/tests/event.rs
@@ -163,11 +163,17 @@ fn test_sending_of_event_ping_when_it_fills_up() {
let store_names: Vec = vec!["events".into()];
for store_name in &store_names {
- glean.register_ping_type(
- &PingBuilder::new(store_name)
- .with_reasons(vec!["max_capacity".to_string()])
- .build(),
- );
+ glean.register_ping_type(&PingType::new(
+ store_name.clone(),
+ true,
+ false,
+ true,
+ true,
+ true,
+ vec![],
+ vec!["max_capacity".to_string()],
+ true,
+ ));
}
let click = EventMetric::new(
@@ -225,11 +231,17 @@ fn test_server_knobs_config_changing_max_events() {
let store_names: Vec = vec!["events".into()];
for store_name in &store_names {
- glean.register_ping_type(
- &PingBuilder::new(store_name)
- .with_reasons(vec!["max_capacity".to_string()])
- .build(),
- );
+ glean.register_ping_type(&PingType::new(
+ store_name.clone(),
+ true,
+ false,
+ true,
+ true,
+ true,
+ vec![],
+ vec!["max_capacity".to_string()],
+ true,
+ ));
}
// 1. Set up an event to record
@@ -501,7 +513,17 @@ fn event_storage_trimming() {
let new_ping = |glean: &mut Glean, ping: &str| {
// In Rust, pings are registered via construction.
// But that's done asynchronously, so we do it synchronously here:
- glean.register_ping_type(&PingBuilder::new(ping).build());
+ glean.register_ping_type(&PingType::new(
+ ping.to_string(),
+ true,
+ false,
+ true,
+ true,
+ true,
+ vec![],
+ vec![],
+ true,
+ ));
};
// First, register both pings, so that we can record the event in the two pings.
@@ -556,7 +578,17 @@ fn with_event_timestamps() {
ping_lifetime_max_time: 0,
};
let mut glean = Glean::new(cfg).unwrap();
- let ping = PingBuilder::new("store1").build();
+ let ping = PingType::new(
+ "store1",
+ true,
+ false,
+ true,
+ true,
+ true,
+ vec![],
+ vec![],
+ true,
+ );
glean.register_ping_type(&ping);
let store_name = "store1";
diff --git a/third_party/rust/glean-core/tests/ping.rs b/third_party/rust/glean-core/tests/ping.rs
index cb6309216095..7836f6f521ba 100644
--- a/third_party/rust/glean-core/tests/ping.rs
+++ b/third_party/rust/glean-core/tests/ping.rs
@@ -104,11 +104,29 @@ fn deletion_request_only_when_toggled_from_on_to_off() {
fn empty_pings_with_flag_are_sent() {
let (mut glean, _t) = new_glean(None);
- let ping1 = PingBuilder::new("custom-ping1")
- .with_send_if_empty(true)
- .build();
+ let ping1 = PingType::new(
+ "custom-ping1",
+ true,
+ true,
+ true,
+ true,
+ true,
+ vec![],
+ vec![],
+ true,
+ );
glean.register_ping_type(&ping1);
- let ping2 = PingBuilder::new("custom-ping2").build();
+ let ping2 = PingType::new(
+ "custom-ping2",
+ true,
+ false,
+ true,
+ true,
+ true,
+ vec![],
+ vec![],
+ true,
+ );
glean.register_ping_type(&ping2);
// No data is stored in either of the custom pings
@@ -145,7 +163,7 @@ fn test_pings_submitted_metric() {
let metrics_ping = new_test_ping(&mut glean, "metrics");
let baseline_ping = new_test_ping(&mut glean, "baseline");
- let custom_ping = PingBuilder::new("custom").with_send_if_empty(true).build();
+ let custom_ping = PingType::new("custom", true, true, true, true, true, vec![], vec![], true);
glean.register_ping_type(&custom_ping);
// We need to store a metric as an empty ping is not stored.
@@ -278,15 +296,30 @@ fn events_ping_with_metric_but_no_events_is_not_sent() {
fn test_scheduled_pings_are_sent() {
let (mut glean, _t) = new_glean(None);
- let piggyback_ping = PingBuilder::new("piggyback")
- .with_send_if_empty(true)
- .build();
+ let piggyback_ping = PingType::new(
+ "piggyback",
+ true,
+ true,
+ true,
+ true,
+ true,
+ vec![],
+ vec![],
+ true,
+ );
glean.register_ping_type(&piggyback_ping);
- let trigger_ping = PingBuilder::new("trigger")
- .with_send_if_empty(true)
- .with_schedules_pings(vec!["piggyback".into()])
- .build();
+ let trigger_ping = PingType::new(
+ "trigger",
+ true,
+ true,
+ true,
+ true,
+ true,
+ vec!["piggyback".into()],
+ vec![],
+ true,
+ );
glean.register_ping_type(&trigger_ping);
assert!(trigger_ping.submit_sync(&glean, None));
diff --git a/third_party/rust/glean-core/tests/ping_maker.rs b/third_party/rust/glean-core/tests/ping_maker.rs
index 2d51f648b4de..7bd9dd823f24 100644
--- a/third_party/rust/glean-core/tests/ping_maker.rs
+++ b/third_party/rust/glean-core/tests/ping_maker.rs
@@ -97,7 +97,17 @@ fn test_metrics_must_report_experimentation_id() {
})
.unwrap();
let ping_maker = PingMaker::new();
- let ping_type = PingBuilder::new("store1").build();
+ let ping_type = PingType::new(
+ "store1",
+ true,
+ false,
+ true,
+ true,
+ true,
+ vec![],
+ vec![],
+ true,
+ );
glean.register_ping_type(&ping_type);
// Record something, so the ping will have data
@@ -154,7 +164,17 @@ fn experimentation_id_is_removed_if_send_if_empty_is_false() {
.unwrap();
let ping_maker = PingMaker::new();
- let unknown_ping_type = PingBuilder::new("unknown").build();
+ let unknown_ping_type = PingType::new(
+ "unknown",
+ true,
+ false,
+ true,
+ true,
+ true,
+ vec![],
+ vec![],
+ true,
+ );
glean.register_ping_type(&unknown_ping_type);
assert!(ping_maker
@@ -170,7 +190,17 @@ fn collect_must_report_none_when_no_data_is_stored() {
let (mut glean, ping_maker, ping_type, _t) = set_up_basic_ping();
- let unknown_ping_type = PingBuilder::new("unknown").build();
+ let unknown_ping_type = PingType::new(
+ "unknown",
+ true,
+ false,
+ true,
+ true,
+ true,
+ vec![],
+ vec![],
+ true,
+ );
glean.register_ping_type(&ping_type);
assert!(ping_maker
@@ -194,7 +224,17 @@ fn seq_number_must_be_sequential() {
for i in 0..=1 {
for ping_name in ["store1", "store2"].iter() {
- let ping_type = PingBuilder::new(ping_name).build();
+ let ping_type = PingType::new(
+ *ping_name,
+ true,
+ false,
+ true,
+ true,
+ true,
+ vec![],
+ vec![],
+ true,
+ );
let ping = ping_maker
.collect(&glean, &ping_type, None, "", "")
.unwrap();
@@ -279,7 +319,7 @@ fn no_pings_submitted_if_upload_disabled() {
// Regression test, bug 1603571
let (mut glean, _t) = new_glean(None);
- let ping_type = PingBuilder::new("store1").with_send_if_empty(true).build();
+ let ping_type = PingType::new("store1", true, true, true, true, true, vec![], vec![], true);
glean.register_ping_type(&ping_type);
assert!(ping_type.submit_sync(&glean, None));
@@ -297,7 +337,7 @@ fn no_pings_submitted_if_upload_disabled() {
fn metadata_is_correctly_added_when_necessary() {
let (mut glean, _t) = new_glean(None);
glean.set_debug_view_tag("valid-tag");
- let ping_type = PingBuilder::new("store1").with_send_if_empty(true).build();
+ let ping_type = PingType::new("store1", true, true, true, true, true, vec![], vec![], true);
glean.register_ping_type(&ping_type);
assert!(ping_type.submit_sync(&glean, None));
diff --git a/third_party/rust/glean/.cargo-checksum.json b/third_party/rust/glean/.cargo-checksum.json
index 94a298206bd4..f306d8d3cb74 100644
--- a/third_party/rust/glean/.cargo-checksum.json
+++ b/third_party/rust/glean/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.lock":"e4d79c41440e41fe7d6aebfb531cd0f2d2459494fd1caac405c81313d5e16724","Cargo.toml":"8fe2df77137fdf166ce01926338d7f4d42811406b6c994984953e86ee6dfffc2","LICENSE":"1f256ecad192880510e84ad60474eab7589218784b9a50bc7ceee34c2b91f1d5","README.md":"5627cc81e6187ab6c2b4dff061af16d559edcab64ba786bac39daa69c703c595","src/common_test.rs":"c86cccfb7da1506cfed29cb2ee13d839b7ac7cffdfd70793c9665bb44e0b684f","src/configuration.rs":"de65ab99a26b4547be20803bc195cb50a6ab40b1a3f49a2e6230fed5a9d7a8d8","src/core_metrics.rs":"fef8fb4e5fa57c179836c6eb2cf59278fe3b8b036dbe57b0ff02971b4acd822f","src/lib.rs":"61b56a35c2bc6cd60bba2225b399881512d4b9a7d8cadca7fbed37ee6959d74c","src/net/http_uploader.rs":"0a94ac3cd87cb021529dee46d537765ab8d923e0f4ac7615225e878d3739e6dc","src/net/mod.rs":"09ba010b03d045fd8a2ccbe4f205c5275bb622bceb34cb81a0aa8f7d33804e2e","src/private/event.rs":"f299c79e4e2acb657f06004f3038bd8909e287719458566bc7f96262d8665e62","src/private/mod.rs":"66e90c41de74d1e80c5d3f49b8f1a86b8396be0b8c4a80f1a28903fe6d105ecf","src/private/object.rs":"7f17a7a658e8f7aa19a6bedf70f60f3f42713316d5d60298d682bb045caaafb7","src/private/ping.rs":"d2fb45e9e178ff6b17aa9c1b5258dfcd2ed91a2b43b44dec826de256ef5e8520","src/system.rs":"d602804a72258bfd65e51c571946631732ee27d81342d8aa406e47fdd241bbfa","src/test.rs":"bfbea9416dfdc96ebc1f9af5005b5b23f2285b74ef82c74cdab11635322ea3e3","tests/collection_enabled.rs":"3327a949dbdeec493d661261abda68ffa71acc50ab24cba4fde5302749e6f16b","tests/collection_enabled_bin.rs":"d3a6458b84012a447e5cb792f2292a06951ed252fad803b9166b437bacba542c","tests/common/mod.rs":"2fd391c5eb45f56fdfa3261dd631406c67ed36b10b0d5432febe2483da5c9d89","tests/custom_distribution_buffered.rs":"47c13d1f39adf3881e10caa19e0c08235f08958809e234bf37a79d37d7322cd5","tests/init_fails.rs":"ca7fa1b3dd6a21a9e005b7a4f0a18664c4bceb952dd463db8316500f72280d5b","tests/interruptible_shutdown.rs":"3d954bbe47d4f5fd103c51a4ff99f151662143c25c826da9734a00cd215909b9","tests/memory_distribution_buffered.rs":"db487475a5cf17a0864ccf150984ebdd28bf616573772cf678246cc1bdbcbc0f","tests/metric_metadata.rs":"05c947d3decf0a3281378dbb108080a05319ad8f130af5b07f9b049b80e5f04f","tests/near-empty-c0ffee-db.safe.bin":"89afb3bb8fc94430fb0ed0fe55f85f3f8bcc8fd0fed69a9df13cc560294ec9f5","tests/never_init.rs":"51fff5618f6603bc0945d70131698d10a1c6275f43bbc22a2de5807f8a79229f","tests/no_time_to_init.rs":"2ede23df6618ff1cb5ae3b7bbf95900ad0fd92072afa2e0319bf147b4f75cefc","tests/overflowing_preinit.rs":"985e140460a100986fd051ce901b787a3a7a9747a856cd06066b740ac7d2381c","tests/persist_ping_lifetime_nopanic.rs":"18379d3ffbf4a2c8c684c04ff7a0660b86dfbbb447db2d24dfed6073cb7ddf8f","tests/schema.rs":"23b49005402b914e55a0c5c155f30c2662c609f79be78d1385ec25b3600b3547","tests/simple.rs":"15c76a1b5a336fd6abfbdebafc971f5c6a9b75107ddbca65f0031cde3e2886da","tests/test-delayed-ping-data.sh":"4a6db98b4df6b77898ace6a8b4e8b4c60d3e5c44873bbf38c62e83583e27a3ff","tests/test-enabled-pings.sh":"06656e38f63e65475006b107dd6bd179b0cbaa1fad1470de38e679e91a9315a3","tests/test-pending-gets-removed.sh":"e335f2f00fa97a61b6d94e0005fb3b9de8c8db8076111a67ca47d85392039ea9","tests/test-ping-lifetime-flush.sh":"e8f118ea2f6fd973809e38d5e828a03cfccfe0b0f497ccde5ec92d6d1380c071","tests/test-shutdown-blocking.sh":"a44d8d4bbe2ee3ede9e48121150ae7a5386025160c5cef2181ca142232c5fb27","tests/test-thread-crashing.sh":"f3cd0cc8a7b4fe82bef0fe6fbfbbe45fbad6da3afe0f82578bc5cfb2d6527ac6","tests/timing_distribution_buffered.rs":"501f7289c0c28f0ab83838c88b058999b19436d0f2b693be0787513d7b67e06d","tests/timing_distribution_single_sample.rs":"4f9498b6ef29913da0356027efe5f572c81d2f426e8538c068b54a1cfa33c1b8","tests/upload_timing.rs":"b3b9db197bc2ec41556388969a6bf289e7ef19e05b9019bc2bd98c823fcf6ea3","tests/uploader_capabilities.rs":"347f19e534a50a211ea179d6818631270d1b4ec468098e6b6abcde1e4a6a9bca"},"package":"ba92338cfd9fb34b00c02c6da8e22936b41835eb02ab5462d3d88cc4b6249c35"}
\ No newline at end of file
+{"files":{"Cargo.lock":"d5243e925c951fc394126291886afc070712380f2faa93a68c3f571c7fccc105","Cargo.toml":"f86642423b6bb7faed3108bdf55e4add8e33ccb98dda8a15ede988db2b8fa974","LICENSE":"1f256ecad192880510e84ad60474eab7589218784b9a50bc7ceee34c2b91f1d5","README.md":"5627cc81e6187ab6c2b4dff061af16d559edcab64ba786bac39daa69c703c595","src/common_test.rs":"997f5331b719f82d86bb5e2f8e711da9cfb9433403e233c04a0ff39c3de5f7d0","src/configuration.rs":"de65ab99a26b4547be20803bc195cb50a6ab40b1a3f49a2e6230fed5a9d7a8d8","src/core_metrics.rs":"fef8fb4e5fa57c179836c6eb2cf59278fe3b8b036dbe57b0ff02971b4acd822f","src/lib.rs":"97d7d8001e091bd009e579ccb296a2355f523e5608e81e0485b3492347b40989","src/net/http_uploader.rs":"01ad5bd91384411a12c74434cd1c5cd585078cb34faba4615c70bdb669a9bccb","src/net/mod.rs":"5dff006240a6522e1db988514f22fb9361b3dece0c22fcf9eb8ff1b3308dd8f0","src/private/event.rs":"f299c79e4e2acb657f06004f3038bd8909e287719458566bc7f96262d8665e62","src/private/mod.rs":"66e90c41de74d1e80c5d3f49b8f1a86b8396be0b8c4a80f1a28903fe6d105ecf","src/private/object.rs":"7f17a7a658e8f7aa19a6bedf70f60f3f42713316d5d60298d682bb045caaafb7","src/private/ping.rs":"3b126183d4a5fdc200a9ded45c9a656d7d1e4c44e0d7e1c22f1b0e6968b07630","src/system.rs":"6eae5b41c15eba9cad6dbd116abe3519ee3e1fe034e79bdd692b029829a8c384","src/test.rs":"7c5f67bdce46bdb14b77cde0b716c2c2e0ab831e6c01b2e417c348c562289cac","tests/common/mod.rs":"68b0fca253f5c773cdb54d10a02d324d7c74ed5e16d4ba96387e4b643af2c0f3","tests/custom_distribution_buffered.rs":"47c13d1f39adf3881e10caa19e0c08235f08958809e234bf37a79d37d7322cd5","tests/init_fails.rs":"073b8c244ecbcae8e9cfc12cffd0629038bd978a4a4337073dbed6866023317b","tests/interruptible_shutdown.rs":"17b674c5960f3787ba0c51dc54f0c3759403427ad819985ad85f254e261002ab","tests/memory_distribution_buffered.rs":"db487475a5cf17a0864ccf150984ebdd28bf616573772cf678246cc1bdbcbc0f","tests/metric_metadata.rs":"05c947d3decf0a3281378dbb108080a05319ad8f130af5b07f9b049b80e5f04f","tests/never_init.rs":"fcbba9034f829eef0f54ff650f6442ad75cdd609bdd02f45472fd4456f8e3a66","tests/no_time_to_init.rs":"0a2027de97188a82f97ba6a45c75c740917eea4e1f4bd4b947b6da3da7c354ed","tests/overflowing_preinit.rs":"985e140460a100986fd051ce901b787a3a7a9747a856cd06066b740ac7d2381c","tests/persist_ping_lifetime_nopanic.rs":"18379d3ffbf4a2c8c684c04ff7a0660b86dfbbb447db2d24dfed6073cb7ddf8f","tests/schema.rs":"da8f808f7cfd42b0cefd5dd04ca87d514392476ba268a32c140d3293c9332caf","tests/simple.rs":"4991afdbd037e789af2325fb87dc4a1e0fbbfa63aa54f1f22dc8bf01190473c7","tests/test-delayed-ping-data.sh":"4a6db98b4df6b77898ace6a8b4e8b4c60d3e5c44873bbf38c62e83583e27a3ff","tests/test-enabled-pings.sh":"06656e38f63e65475006b107dd6bd179b0cbaa1fad1470de38e679e91a9315a3","tests/test-pending-gets-removed.sh":"e335f2f00fa97a61b6d94e0005fb3b9de8c8db8076111a67ca47d85392039ea9","tests/test-ping-lifetime-flush.sh":"e8f118ea2f6fd973809e38d5e828a03cfccfe0b0f497ccde5ec92d6d1380c071","tests/test-shutdown-blocking.sh":"a44d8d4bbe2ee3ede9e48121150ae7a5386025160c5cef2181ca142232c5fb27","tests/test-thread-crashing.sh":"f3cd0cc8a7b4fe82bef0fe6fbfbbe45fbad6da3afe0f82578bc5cfb2d6527ac6","tests/timing_distribution_buffered.rs":"501f7289c0c28f0ab83838c88b058999b19436d0f2b693be0787513d7b67e06d","tests/timing_distribution_single_sample.rs":"4f9498b6ef29913da0356027efe5f572c81d2f426e8538c068b54a1cfa33c1b8","tests/upload_timing.rs":"8b9ed65eaba3d51faf3cb62d1280d2737f234e0332615bfe6d9c60aab44b6560"},"package":"e2afa6754943cac5243099efd0d26e89cc8e06f1585776ba14ab0c6ee99e1f71"}
\ No newline at end of file
diff --git a/third_party/rust/glean/Cargo.lock b/third_party/rust/glean/Cargo.lock
index dfda1a409637..47f57289510d 100644
--- a/third_party/rust/glean/Cargo.lock
+++ b/third_party/rust/glean/Cargo.lock
@@ -28,9 +28,9 @@ dependencies = [
[[package]]
name = "anyhow"
-version = "1.0.95"
+version = "1.0.71"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04"
+checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8"
[[package]]
name = "arrayref"
@@ -39,16 +39,48 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4c527152e37cf757a3f78aae5a06fbeefdb07ccc535c980a3208ee3060dd544"
[[package]]
-name = "autocfg"
-version = "1.4.0"
+name = "askama"
+version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26"
+checksum = "47cbc3cf73fa8d9833727bbee4835ba5c421a0d65b72daf9a7b5d0e0f9cfb57e"
+dependencies = [
+ "askama_derive",
+ "askama_escape",
+]
+
+[[package]]
+name = "askama_derive"
+version = "0.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c22fbe0413545c098358e56966ff22cdd039e10215ae213cfbd65032b119fc94"
+dependencies = [
+ "basic-toml",
+ "mime",
+ "mime_guess",
+ "nom",
+ "proc-macro2",
+ "quote",
+ "serde",
+ "syn",
+]
+
+[[package]]
+name = "askama_escape"
+version = "0.10.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "619743e34b5ba4e9703bba34deac3427c72507c7159f5fd030aea8cac0cfe341"
+
+[[package]]
+name = "autocfg"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
name = "basic-toml"
-version = "0.1.9"
+version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "823388e228f614e9558c6804262db37960ec8821856535f5c3f59913140558f8"
+checksum = "5c0de75129aa8d0cceaf750b89013f0e08804d6ec61416da787b35ad0d7cddf1"
dependencies = [
"serde",
]
@@ -85,15 +117,41 @@ checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610"
[[package]]
name = "bytes"
-version = "1.9.0"
+version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b"
+checksum = "dfb24e866b15a1af2a1b663f10c6b6b8f397a84aadb828f12e5b289ec23a3a3c"
[[package]]
name = "camino"
-version = "1.1.9"
+version = "1.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8b96ec4966b5813e2c0507c1f86115c8c5abaadc3980879c3424042a02fd1ad3"
+checksum = "c530edf18f37068ac2d977409ed5cd50d53d73bc653c7647b48eb78976ac9ae2"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "cargo-platform"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cbdb825da8a5df079a43676dbe042702f1707b1109f713a01420fbb4cc71fa27"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "cargo_metadata"
+version = "0.15.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "982a0cf6a99c350d7246035613882e376d58cebe571785abc5da4f648d53ac0a"
+dependencies = [
+ "camino",
+ "cargo-platform",
+ "semver",
+ "serde",
+ "serde_json",
+ "thiserror",
+]
[[package]]
name = "cc"
@@ -222,12 +280,9 @@ dependencies = [
[[package]]
name = "fs-err"
-version = "2.11.0"
+version = "2.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "88a41f105fe1d5b6b34b2055e3dc59bb79b46b48b2040b9e6c7b4b5de097aa41"
-dependencies = [
- "autocfg",
-]
+checksum = "0845fa252299212f0389d64ba26f34fa32cfe41588355f21ed507c59a0f64541"
[[package]]
name = "getrandom"
@@ -242,7 +297,7 @@ dependencies = [
[[package]]
name = "glean"
-version = "64.0.0"
+version = "63.1.0"
dependencies = [
"crossbeam-channel",
"env_logger",
@@ -260,9 +315,9 @@ dependencies = [
[[package]]
name = "glean-core"
-version = "64.0.0"
+version = "63.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "92a2cbf41fbb9996b14fc1721b8bd06e669589de05e6efc20a24bab14285623a"
+checksum = "53cd53bb7a3b89b17d3989e95dd808b137ff47c504d1d19f14cb0d820cc2f42e"
dependencies = [
"android_logger",
"bincode",
@@ -284,15 +339,15 @@ dependencies = [
[[package]]
name = "glob"
-version = "0.3.2"
+version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2"
+checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574"
[[package]]
name = "goblin"
-version = "0.8.2"
+version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1b363a30c165f666402fe6a3024d3bec7ebc898f96a4a23bd1c99f8dbf3f4f47"
+checksum = "bb07a4ffed2093b118a525b1d8f5204ae274faed5604537caf7135d0f18d9887"
dependencies = [
"log",
"plain",
@@ -364,9 +419,9 @@ dependencies = [
[[package]]
name = "itoa"
-version = "1.0.14"
+version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674"
+checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc"
[[package]]
name = "json-pointer"
@@ -415,27 +470,27 @@ checksum = "da2479e8c062e40bf0066ffa0bc823de0a9368974af99c9f6df941d2c231e03f"
[[package]]
name = "log"
-version = "0.4.25"
+version = "0.4.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "04cbf5b083de1c7e0222a7a51dbfdba1cbe1c6ab0b15e29fff3f6c077fd9cd9f"
+checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f"
[[package]]
name = "memchr"
-version = "2.7.4"
+version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
+checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]]
name = "mime"
-version = "0.3.17"
+version = "0.3.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
+checksum = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d"
[[package]]
name = "mime_guess"
-version = "2.0.5"
+version = "2.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f7c44f8e672c00fe5308fa235f821cb4198414e1c77935c1ab6948d3fd78550e"
+checksum = "4192263c238a5f0d0c6bfd21f336a313a4ce1c450542449ca191bb657b4642ef"
dependencies = [
"mime",
"unicase",
@@ -458,9 +513,9 @@ dependencies = [
[[package]]
name = "nom"
-version = "7.1.3"
+version = "7.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
+checksum = "a8903e5a29a317527874d0402f867152a3d21c908bb0b933e416c65e301d4c36"
dependencies = [
"memchr",
"minimal-lexical",
@@ -497,9 +552,9 @@ dependencies = [
[[package]]
name = "once_cell"
-version = "1.20.2"
+version = "1.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775"
+checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
[[package]]
name = "ordered-float"
@@ -523,9 +578,9 @@ dependencies = [
[[package]]
name = "paste"
-version = "1.0.15"
+version = "1.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
+checksum = "cf1c2c742266c2f1041c914ba65355a83ae8747b05f208319784083583494b4b"
[[package]]
name = "percent-encoding"
@@ -541,18 +596,18 @@ checksum = "b4596b6d070b27117e987119b4dac604f3c58cfb0b191112e24771b2faeac1a6"
[[package]]
name = "proc-macro2"
-version = "1.0.93"
+version = "1.0.66"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99"
+checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
-version = "1.0.38"
+version = "1.0.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc"
+checksum = "5fe8a65d69dd0808184ebb5f836ab526bb259db23c657efa38711b1072ee47f0"
dependencies = [
"proc-macro2",
]
@@ -581,45 +636,6 @@ version = "0.6.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244"
-[[package]]
-name = "rinja"
-version = "0.3.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3dc4940d00595430b3d7d5a01f6222b5e5b51395d1120bdb28d854bb8abb17a5"
-dependencies = [
- "itoa",
- "rinja_derive",
-]
-
-[[package]]
-name = "rinja_derive"
-version = "0.3.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "08d9ed0146aef6e2825f1b1515f074510549efba38d71f4554eec32eb36ba18b"
-dependencies = [
- "basic-toml",
- "memchr",
- "mime",
- "mime_guess",
- "proc-macro2",
- "quote",
- "rinja_parser",
- "rustc-hash",
- "serde",
- "syn",
-]
-
-[[package]]
-name = "rinja_parser"
-version = "0.3.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "93f9a866e2e00a7a1fb27e46e9e324a6f7c0e7edc4543cae1d38f4e4a100c610"
-dependencies = [
- "memchr",
- "nom",
- "serde",
-]
-
[[package]]
name = "rkv"
version = "0.19.0"
@@ -642,12 +658,6 @@ dependencies = [
"uuid",
]
-[[package]]
-name = "rustc-hash"
-version = "2.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d"
-
[[package]]
name = "rustix"
version = "0.38.20"
@@ -663,9 +673,9 @@ dependencies = [
[[package]]
name = "ryu"
-version = "1.0.19"
+version = "1.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6ea1a2d0a644769cc99faa24c3ad26b379b786fe7c36fd3c546254801650e6dd"
+checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09"
[[package]]
name = "scroll"
@@ -688,19 +698,28 @@ dependencies = [
]
[[package]]
-name = "serde"
-version = "1.0.217"
+name = "semver"
+version = "1.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70"
+checksum = "e25dfac463d778e353db5be2449d1cce89bd6fd23c9f1ea21310ce6e5a1b29c4"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "serde"
+version = "1.0.179"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0a5bf42b8d227d4abf38a1ddb08602e229108a517cd4e5bb28f9c7eaafdce5c0"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
-version = "1.0.217"
+version = "1.0.179"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0"
+checksum = "741e124f5485c7e60c03b043f79f320bff3527f4bbf12cf3831750dc46a0ec2c"
dependencies = [
"proc-macro2",
"quote",
@@ -709,21 +728,20 @@ dependencies = [
[[package]]
name = "serde_json"
-version = "1.0.138"
+version = "1.0.89"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d434192e7da787e94a6ea7e9670b26a036d0ca41e0b7efb2676dd32bae872949"
+checksum = "020ff22c755c2ed3f8cf162dbb41a7268d934702f3ed3631656ea597e08fc3db"
dependencies = [
"itoa",
- "memchr",
"ryu",
"serde",
]
[[package]]
name = "siphasher"
-version = "0.3.11"
+version = "0.3.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d"
+checksum = "7bd3e3206899af3f8b12af284fafc038cc1dc2b41d1b89dd17297221c5d225de"
[[package]]
name = "smawk"
@@ -739,9 +757,9 @@ checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
[[package]]
name = "syn"
-version = "2.0.98"
+version = "2.0.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "36147f1a48ae0ec2b5b3bc5b537d267457555a10dc06f3dbc8cb11ba3006d3b1"
+checksum = "45c3457aacde3c65315de5031ec191ce46604304d2446e803d71ade03308d970"
dependencies = [
"proc-macro2",
"quote",
@@ -763,27 +781,27 @@ dependencies = [
[[package]]
name = "textwrap"
-version = "0.16.1"
+version = "0.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "23d434d3f8967a09480fb04132ebe0a3e088c173e6d0ee7897abbdf4eab0f8b9"
+checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d"
dependencies = [
"smawk",
]
[[package]]
name = "thiserror"
-version = "1.0.69"
+version = "1.0.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52"
+checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
-version = "1.0.69"
+version = "1.0.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1"
+checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f"
dependencies = [
"proc-macro2",
"quote",
@@ -818,18 +836,21 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c"
[[package]]
name = "toml"
-version = "0.5.11"
+version = "0.5.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234"
+checksum = "1333c76748e868a4d9d1017b5ab53171dfd095f70c712fdb4653a406547f598f"
dependencies = [
"serde",
]
[[package]]
name = "unicase"
-version = "2.8.1"
+version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539"
+checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6"
+dependencies = [
+ "version_check",
+]
[[package]]
name = "unicode-bidi"
@@ -839,9 +860,9 @@ checksum = "099b7128301d285f79ddd55b9a83d5e6b9e97c92e0ea0daebee7263e932de992"
[[package]]
name = "unicode-ident"
-version = "1.0.16"
+version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a210d160f08b701c8721ba1c726c11662f877ea6b7094007e1ca9a1041945034"
+checksum = "b15811caf2415fb889178633e7724bad2509101cde276048e013b9def5e51fa0"
[[package]]
name = "unicode-normalization"
@@ -854,9 +875,9 @@ dependencies = [
[[package]]
name = "uniffi"
-version = "0.29.0"
+version = "0.28.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ba62a57e90f9baed5ad02a71a0870180fa1cc35499093b2d21be2edfb68ec0f7"
+checksum = "2db87def739fe4183947f8419d572d1849a4a09355eba4e988a2105cfd0ac6a7"
dependencies = [
"anyhow",
"uniffi_build",
@@ -866,11 +887,12 @@ dependencies = [
[[package]]
name = "uniffi_bindgen"
-version = "0.29.0"
+version = "0.28.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2242f35214f1e0e3b47c495d340c69f649f9a9ece3a943a29e275686cc884533"
+checksum = "7a112599c9556d1581e4a3d72019a74c2c3e122cc27f4af12577a429c4d5e614"
dependencies = [
"anyhow",
+ "askama",
"camino",
"fs-err",
"glob",
@@ -878,7 +900,6 @@ dependencies = [
"heck",
"once_cell",
"paste",
- "rinja",
"serde",
"textwrap",
"toml",
@@ -888,9 +909,9 @@ dependencies = [
[[package]]
name = "uniffi_build"
-version = "0.29.0"
+version = "0.28.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c887a6c9a2857d8dc2ab0c8d578e8aa4978145b4fd65ed44296341e89aebc3cc"
+checksum = "e2b12684401d2a8508ca9c72a95bbc45906417e42fc80942abaf033bbf01aa33"
dependencies = [
"anyhow",
"camino",
@@ -898,34 +919,37 @@ dependencies = [
]
[[package]]
-name = "uniffi_core"
-version = "0.29.0"
+name = "uniffi_checksum_derive"
+version = "0.28.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cad9fbdeb7ae4daf8d0f7704a3b638c37018eb16bb701e30fa17a2dd3e2d39c1"
-dependencies = [
- "anyhow",
- "bytes",
- "once_cell",
- "paste",
- "static_assertions",
-]
-
-[[package]]
-name = "uniffi_internal_macros"
-version = "0.29.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "22a9dba1d78b9ce429439891089c223478043d52a1c3176a0fcea2b5573a7fcf"
+checksum = "a22dbe67c1c957ac6e7611bdf605a6218aa86b0eebeb8be58b70ae85ad7d73dc"
dependencies = [
"quote",
"syn",
]
[[package]]
-name = "uniffi_macros"
-version = "0.29.0"
+name = "uniffi_core"
+version = "0.28.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "78dd5f8eefba5898b901086f5e7916da67b9a5286a01cc44e910cd75fa37c630"
+checksum = "5a0c35aaad30e3a9e6d4fe34e358d64dbc92ee09045b48591b05fc9f12e0905b"
dependencies = [
+ "anyhow",
+ "bytes",
+ "camino",
+ "log",
+ "once_cell",
+ "paste",
+ "static_assertions",
+]
+
+[[package]]
+name = "uniffi_macros"
+version = "0.28.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "db66474c5c61b0f7afc3b4995fecf9b72b340daa5ca0ef3da7778d75eb5482ea"
+dependencies = [
+ "bincode",
"camino",
"fs-err",
"once_cell",
@@ -939,24 +963,39 @@ dependencies = [
[[package]]
name = "uniffi_meta"
-version = "0.29.0"
+version = "0.28.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9d5965b1d4ffacef1eaa72fef9c00d2491641e87ad910f6c5859b9c503ddb16a"
+checksum = "d898893f102e0e39b8bcb7e3d2188f4156ba280db32db9e8af1f122d057e9526"
dependencies = [
"anyhow",
+ "bytes",
"siphasher",
- "uniffi_internal_macros",
+ "uniffi_checksum_derive",
+]
+
+[[package]]
+name = "uniffi_testing"
+version = "0.28.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2c6aa4f0cf9d12172d84fc00a35a6c1f3522b526daad05ae739f709f6941b9b6"
+dependencies = [
+ "anyhow",
+ "camino",
+ "cargo_metadata",
+ "fs-err",
+ "once_cell",
]
[[package]]
name = "uniffi_udl"
-version = "0.29.0"
+version = "0.28.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "279b82bac9a382c796a0d210bb8354a0b813499b28aa1de046c85d78ca389805"
+checksum = "6b044e9c519e0bb51e516ab6f6d8f4f4dcf900ce30d5ad07c03f924e2824f28e"
dependencies = [
"anyhow",
"textwrap",
"uniffi_meta",
+ "uniffi_testing",
"weedle2",
]
@@ -980,6 +1019,12 @@ dependencies = [
"getrandom",
]
+[[package]]
+name = "version_check"
+version = "0.9.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
+
[[package]]
name = "wasi"
version = "0.10.0+wasi-snapshot-preview1"
diff --git a/third_party/rust/glean/Cargo.toml b/third_party/rust/glean/Cargo.toml
index 0400c9e12b80..d22e80c8569e 100644
--- a/third_party/rust/glean/Cargo.toml
+++ b/third_party/rust/glean/Cargo.toml
@@ -13,7 +13,7 @@
edition = "2021"
rust-version = "1.76"
name = "glean"
-version = "64.0.0"
+version = "63.1.0"
authors = [
"Jan-Erik Rediger ",
"The Glean Team ",
@@ -26,7 +26,6 @@ include = [
"/tests",
"/Cargo.toml",
]
-autolib = false
autobins = false
autoexamples = false
autotests = false
@@ -40,25 +39,10 @@ keywords = [
license = "MPL-2.0"
repository = "https://github.com/mozilla/glean"
-[badges.circle-ci]
-branch = "main"
-repository = "mozilla/glean"
-
-[badges.maintenance]
-status = "actively-developed"
-
[lib]
name = "glean"
path = "src/lib.rs"
-[[test]]
-name = "collection_enabled"
-path = "tests/collection_enabled.rs"
-
-[[test]]
-name = "collection_enabled_bin"
-path = "tests/collection_enabled_bin.rs"
-
[[test]]
name = "custom_distribution_buffered"
path = "tests/custom_distribution_buffered.rs"
@@ -115,15 +99,11 @@ path = "tests/timing_distribution_single_sample.rs"
name = "upload_timing"
path = "tests/upload_timing.rs"
-[[test]]
-name = "uploader_capabilities"
-path = "tests/uploader_capabilities.rs"
-
[dependencies.crossbeam-channel]
version = "0.5"
[dependencies.glean-core]
-version = "64.0.0"
+version = "63.1.0"
[dependencies.inherent]
version = "1"
@@ -156,3 +136,10 @@ version = "1.0.44"
[dev-dependencies.tempfile]
version = "3.1.0"
+
+[badges.circle-ci]
+branch = "main"
+repository = "mozilla/glean"
+
+[badges.maintenance]
+status = "actively-developed"
diff --git a/third_party/rust/glean/src/common_test.rs b/third_party/rust/glean/src/common_test.rs
index 2cfa4ec288bb..13efb6c7230f 100644
--- a/third_party/rust/glean/src/common_test.rs
+++ b/third_party/rust/glean/src/common_test.rs
@@ -46,18 +46,7 @@ pub(crate) fn new_glean(
.build(),
};
- _ = PingType::new(
- "store1",
- true,
- true,
- true,
- true,
- true,
- vec![],
- vec![],
- true,
- vec![],
- );
+ _ = PingType::new("store1", true, true, true, true, true, vec![], vec![], true);
crate::test_reset_glean(cfg, ClientInfoMetrics::unknown(), clear_stores);
dir
diff --git a/third_party/rust/glean/src/lib.rs b/third_party/rust/glean/src/lib.rs
index d0fd006dbf35..549aae79a222 100644
--- a/third_party/rust/glean/src/lib.rs
+++ b/third_party/rust/glean/src/lib.rs
@@ -23,7 +23,7 @@
//! let cfg = ConfigurationBuilder::new(true, "/tmp/data", "org.mozilla.glean_core.example").build();
//! glean::initialize(cfg, ClientInfoMetrics::unknown());
//!
-//! let prototype_ping = PingType::new("prototype", true, true, true, true, true, vec!(), vec!(), true, vec![]);
+//! let prototype_ping = PingType::new("prototype", true, true, true, true, true, vec!(), vec!(), true);
//!
//! prototype_ping.submit(None);
//! ```
diff --git a/third_party/rust/glean/src/net/http_uploader.rs b/third_party/rust/glean/src/net/http_uploader.rs
index 243541550815..4ca1687acf5f 100644
--- a/third_party/rust/glean/src/net/http_uploader.rs
+++ b/third_party/rust/glean/src/net/http_uploader.rs
@@ -2,7 +2,7 @@
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at https://mozilla.org/MPL/2.0/.
-use crate::net::{CapablePingUploadRequest, PingUploader, UploadResult};
+use crate::net::{PingUploadRequest, PingUploader, UploadResult};
/// A simple mechanism to upload pings over HTTPS.
#[derive(Debug)]
@@ -14,8 +14,7 @@ impl PingUploader for HttpUploader {
/// # Arguments
///
/// * `upload_request` - the requested upload.
- fn upload(&self, upload_request: CapablePingUploadRequest) -> UploadResult {
- let upload_request = upload_request.capable(|_| true).unwrap();
+ fn upload(&self, upload_request: PingUploadRequest) -> UploadResult {
log::debug!("TODO bug 1675468: submitting to {:?}", upload_request.url);
UploadResult::http_status(200)
}
diff --git a/third_party/rust/glean/src/net/mod.rs b/third_party/rust/glean/src/net/mod.rs
index ae28cd114f5a..ee75e2b47e7a 100644
--- a/third_party/rust/glean/src/net/mod.rs
+++ b/third_party/rust/glean/src/net/mod.rs
@@ -34,26 +34,6 @@ pub struct PingUploadRequest {
pub ping_name: String,
}
-/// A PingUploadRequest requiring proof of uploader capability.
-pub struct CapablePingUploadRequest {
- request: PingUploadRequest,
- capabilities: Vec,
-}
-
-impl CapablePingUploadRequest {
- /// If you are capable of satisfying this ping upload request's capabilities,
- /// obtain the PingUploadRequest.
- pub fn capable(self, func: F) -> Option
- where
- F: FnOnce(Vec) -> bool,
- {
- if func(self.capabilities) {
- return Some(self.request);
- }
- None
- }
-}
-
/// A description of a component used to upload pings.
pub trait PingUploader: std::fmt::Debug + Send + Sync {
/// Uploads a ping to a server.
@@ -64,7 +44,7 @@ pub trait PingUploader: std::fmt::Debug + Send + Sync {
/// * `body` - the serialized text data to send.
/// * `headers` - a vector of tuples containing the headers to send with
/// the request, i.e. (Name, Value).
- fn upload(&self, upload_request: CapablePingUploadRequest) -> UploadResult;
+ fn upload(&self, upload_request: PingUploadRequest) -> UploadResult;
}
/// The logic for uploading pings: this leaves the actual upload mechanism as
@@ -152,10 +132,6 @@ impl UploadManager {
body_has_info_sections: request.body_has_info_sections,
ping_name: request.ping_name,
};
- let upload_request = CapablePingUploadRequest {
- request: upload_request,
- capabilities: request.uploader_capabilities,
- };
let result = inner.uploader.upload(upload_request);
// Process the upload response.
match glean_core::glean_process_ping_upload_response(doc_id, result) {
diff --git a/third_party/rust/glean/src/private/ping.rs b/third_party/rust/glean/src/private/ping.rs
index 4093b6b18d85..f022dc1a2a89 100644
--- a/third_party/rust/glean/src/private/ping.rs
+++ b/third_party/rust/glean/src/private/ping.rs
@@ -34,7 +34,6 @@ impl PingType {
/// * `schedules_pings` - A list of pings which are triggered for submission when this ping is
/// submitted.
/// * `reason_codes` - The valid reason codes for this ping.
- /// * `uploader_capabilities` - The capabilities required during this ping's upload.
#[allow(clippy::too_many_arguments)]
pub fn new>(
name: A,
@@ -46,7 +45,6 @@ impl PingType {
schedules_pings: Vec,
reason_codes: Vec,
follows_collection_enabled: bool,
- uploader_capabilities: Vec,
) -> Self {
let inner = glean_core::metrics::PingType::new(
name.into(),
@@ -58,7 +56,6 @@ impl PingType {
schedules_pings,
reason_codes,
follows_collection_enabled,
- uploader_capabilities,
);
Self {
diff --git a/third_party/rust/glean/src/system.rs b/third_party/rust/glean/src/system.rs
index 1b2e56ad512c..4816f2552a9b 100644
--- a/third_party/rust/glean/src/system.rs
+++ b/third_party/rust/glean/src/system.rs
@@ -46,36 +46,11 @@ pub const ARCH: &str = "x86";
/// `target_arch` when building this crate: `x86_64`
pub const ARCH: &str = "x86_64";
-#[cfg(target_arch = "powerpc64")]
-/// `target_arch` when building this crate: `powerpc64`
-pub const ARCH: &str = "powerpc64";
-
-#[cfg(target_arch = "riscv64")]
-/// `target_arch` when building this crate: `riscv64`
-pub const ARCH: &str = "riscv64";
-
-#[cfg(target_arch = "mips")]
-/// `target_arch` when building this crate: `mips`
-pub const ARCH: &str = "mips";
-
-#[cfg(target_arch = "loongarch64")]
-/// `target_arch` when building this crate: `loongarch64`
-pub const ARCH: &str = "loongarch64";
-
-#[cfg(target_arch = "s390x")]
-/// `target_arch` when building this crate: `s390x`
-pub const ARCH: &str = "s390x";
-
#[cfg(not(any(
target_arch = "aarch64",
target_arch = "arm",
target_arch = "x86",
- target_arch = "x86_64",
- target_arch = "powerpc64",
- target_arch = "riscv64",
- target_arch = "mips",
- target_arch = "loongarch64",
- target_arch = "s390x",
+ target_arch = "x86_64"
)))]
/// `target_arch` when building this crate: unknown!
pub const ARCH: &str = "Unknown";
diff --git a/third_party/rust/glean/src/test.rs b/third_party/rust/glean/src/test.rs
index d7e1c668c12b..cc38afed94ee 100644
--- a/third_party/rust/glean/src/test.rs
+++ b/third_party/rust/glean/src/test.rs
@@ -19,18 +19,7 @@ use super::*;
use crate::common_test::{lock_test, new_glean, GLOBAL_APPLICATION_ID};
fn new_test_ping(name: &str) -> PingType {
- PingType::new(
- name,
- true,
- true,
- true,
- true,
- true,
- vec![],
- vec![],
- true,
- vec![],
- )
+ PingType::new(name, true, true, true, true, true, vec![], vec![], true)
}
#[test]
@@ -45,8 +34,7 @@ fn send_a_ping() {
sender: crossbeam_channel::Sender,
}
impl net::PingUploader for FakeUploader {
- fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
- let upload_request = upload_request.capable(|_| true).unwrap();
+ fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
self.sender.send(upload_request).unwrap();
net::UploadResult::http_status(200)
}
@@ -87,8 +75,7 @@ fn send_a_ping_without_info_sections() {
sender: crossbeam_channel::Sender,
}
impl net::PingUploader for FakeUploader {
- fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
- let upload_request = upload_request.capable(|_| true).unwrap();
+ fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
self.sender.send(upload_request).unwrap();
net::UploadResult::http_status(200)
}
@@ -117,7 +104,6 @@ fn send_a_ping_without_info_sections() {
vec![],
vec![],
true,
- vec![],
);
custom_ping.submit(None);
@@ -254,8 +240,7 @@ fn sending_of_foreground_background_pings() {
sender: crossbeam_channel::Sender,
}
impl net::PingUploader for FakeUploader {
- fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
- let upload_request = upload_request.capable(|_| true).unwrap();
+ fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
self.sender.send(upload_request.url).unwrap();
net::UploadResult::http_status(200)
}
@@ -323,8 +308,7 @@ fn sending_of_startup_baseline_ping() {
sender: crossbeam_channel::Sender,
}
impl net::PingUploader for FakeUploader {
- fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
- let upload_request = upload_request.capable(|_| true).unwrap();
+ fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
self.sender.send(upload_request.url).unwrap();
net::UploadResult::http_status(200)
}
@@ -371,8 +355,7 @@ fn no_dirty_baseline_on_clean_shutdowns() {
sender: crossbeam_channel::Sender,
}
impl net::PingUploader for FakeUploader {
- fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
- let upload_request = upload_request.capable(|_| true).unwrap();
+ fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
self.sender.send(upload_request.url).unwrap();
net::UploadResult::http_status(200)
}
@@ -595,9 +578,8 @@ fn ping_collection_must_happen_after_concurrently_scheduled_metrics_recordings()
sender: crossbeam_channel::Sender<(String, JsonValue)>,
}
impl net::PingUploader for FakeUploader {
- fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
- let net::PingUploadRequest { body, url, .. } =
- upload_request.capable(|_| true).unwrap();
+ fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
+ let net::PingUploadRequest { body, url, .. } = upload_request;
// Decode the gzipped body.
let mut gzip_decoder = GzDecoder::new(&body[..]);
let mut s = String::with_capacity(body.len());
@@ -681,8 +663,9 @@ fn basic_metrics_should_be_cleared_when_disabling_uploading() {
assert_eq!("TEST VALUE", metric.test_get_value(None).unwrap());
}
+// TODO: Should probably move into glean-core.
#[test]
-fn core_metrics_are_not_cleared_when_disabling_and_enabling_uploading() {
+fn core_metrics_should_be_cleared_and_restored_when_disabling_and_enabling_uploading() {
let _lock = lock_test();
let dir = tempfile::tempdir().unwrap();
@@ -708,13 +691,12 @@ fn core_metrics_are_not_cleared_when_disabling_and_enabling_uploading() {
});
assert!(os_version.test_get_value(None).is_some());
- let initial_value = os_version.test_get_value(None).unwrap();
set_upload_enabled(false);
- assert_eq!(initial_value, os_version.test_get_value(None).unwrap());
+ assert!(os_version.test_get_value(None).is_none());
set_upload_enabled(true);
- assert_eq!(initial_value, os_version.test_get_value(None).unwrap());
+ assert!(os_version.test_get_value(None).is_some());
}
#[test]
@@ -730,8 +712,7 @@ fn sending_deletion_ping_if_disabled_outside_of_run() {
sender: crossbeam_channel::Sender,
}
impl net::PingUploader for FakeUploader {
- fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
- let upload_request = upload_request.capable(|_| true).unwrap();
+ fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
self.sender.send(upload_request.url).unwrap();
net::UploadResult::http_status(200)
}
@@ -776,8 +757,7 @@ fn no_sending_of_deletion_ping_if_unchanged_outside_of_run() {
sender: crossbeam_channel::Sender,
}
impl net::PingUploader for FakeUploader {
- fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
- let upload_request = upload_request.capable(|_| true).unwrap();
+ fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
self.sender.send(upload_request.url).unwrap();
net::UploadResult::http_status(200)
}
@@ -820,8 +800,7 @@ fn deletion_request_ping_contains_experimentation_id() {
sender: crossbeam_channel::Sender,
}
impl net::PingUploader for FakeUploader {
- fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
- let upload_request = upload_request.capable(|_| true).unwrap();
+ fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
let body = upload_request.body;
let mut gzip_decoder = GzDecoder::new(&body[..]);
let mut body_str = String::with_capacity(body.len());
@@ -885,9 +864,8 @@ fn test_sending_of_startup_baseline_ping_with_application_lifetime_metric() {
sender: crossbeam_channel::Sender<(String, JsonValue)>,
}
impl net::PingUploader for FakeUploader {
- fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
- let net::PingUploadRequest { url, body, .. } =
- upload_request.capable(|_| true).unwrap();
+ fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
+ let net::PingUploadRequest { url, body, .. } = upload_request;
// Decode the gzipped body.
let mut gzip_decoder = GzDecoder::new(&body[..]);
let mut s = String::with_capacity(body.len());
@@ -967,8 +945,7 @@ fn setting_debug_view_tag_before_initialization_should_not_crash() {
sender: crossbeam_channel::Sender>,
}
impl net::PingUploader for FakeUploader {
- fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
- let upload_request = upload_request.capable(|_| true).unwrap();
+ fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
self.sender.send(upload_request.headers).unwrap();
net::UploadResult::http_status(200)
}
@@ -1014,8 +991,7 @@ fn setting_source_tags_before_initialization_should_not_crash() {
sender: crossbeam_channel::Sender>,
}
impl net::PingUploader for FakeUploader {
- fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
- let upload_request = upload_request.capable(|_| true).unwrap();
+ fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
self.sender.send(upload_request.headers).unwrap();
net::UploadResult::http_status(200)
}
@@ -1060,8 +1036,7 @@ fn setting_source_tags_after_initialization_should_not_crash() {
sender: crossbeam_channel::Sender>,
}
impl net::PingUploader for FakeUploader {
- fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
- let upload_request = upload_request.capable(|_| true).unwrap();
+ fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
self.sender.send(upload_request.headers).unwrap();
net::UploadResult::http_status(200)
}
@@ -1120,8 +1095,7 @@ fn flipping_upload_enabled_respects_order_of_events() {
sender: crossbeam_channel::Sender,
}
impl net::PingUploader for FakeUploader {
- fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
- let upload_request = upload_request.capable(|_| true).unwrap();
+ fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
self.sender.send(upload_request.url).unwrap();
net::UploadResult::http_status(200)
}
@@ -1174,8 +1148,7 @@ fn registering_pings_before_init_must_work() {
sender: crossbeam_channel::Sender,
}
impl net::PingUploader for FakeUploader {
- fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
- let upload_request = upload_request.capable(|_| true).unwrap();
+ fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
self.sender.send(upload_request.url).unwrap();
net::UploadResult::http_status(200)
}
@@ -1216,8 +1189,7 @@ fn test_a_ping_before_submission() {
sender: crossbeam_channel::Sender,
}
impl net::PingUploader for FakeUploader {
- fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
- let upload_request = upload_request.capable(|_| true).unwrap();
+ fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
self.sender.send(upload_request.url).unwrap();
net::UploadResult::http_status(200)
}
@@ -1319,7 +1291,7 @@ fn signaling_done() {
counter: Arc>>,
}
impl net::PingUploader for FakeUploader {
- fn upload(&self, _upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
+ fn upload(&self, _upload_request: net::PingUploadRequest) -> net::UploadResult {
let mut map = self.counter.lock().unwrap();
*map.entry(thread::current().id()).or_insert(0) += 1;
@@ -1391,8 +1363,7 @@ fn configure_ping_throttling() {
done: Arc,
}
impl net::PingUploader for FakeUploader {
- fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
- let upload_request = upload_request.capable(|_| true).unwrap();
+ fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
if self.done.load(std::sync::atomic::Ordering::SeqCst) {
// If we've outlived the test, just lie.
return net::UploadResult::http_status(200);
@@ -1467,8 +1438,7 @@ fn pings_ride_along_builtin_pings() {
sender: crossbeam_channel::Sender,
}
impl net::PingUploader for FakeUploader {
- fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
- let upload_request = upload_request.capable(|_| true).unwrap();
+ fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
self.sender.send(upload_request.url).unwrap();
net::UploadResult::http_status(200)
}
diff --git a/third_party/rust/glean/tests/collection_enabled.rs b/third_party/rust/glean/tests/collection_enabled.rs
deleted file mode 100644
index 7ecfe2bad0f3..000000000000
--- a/third_party/rust/glean/tests/collection_enabled.rs
+++ /dev/null
@@ -1,171 +0,0 @@
-// This Source Code Form is subject to the terms of the Mozilla Public
-// License, v. 2.0. If a copy of the MPL was not distributed with this
-// file, You can obtain one at https://mozilla.org/MPL/2.0/.
-
-//! This integration test should model how the RLB is used when embedded in another Rust application
-//! (e.g. FOG/Firefox Desktop).
-//!
-//! We write a single test scenario per file to avoid any state keeping across runs
-//! (different files run as different processes).
-
-mod common;
-
-use std::io::Read;
-
-use crossbeam_channel::bounded;
-
-use crossbeam_channel::Sender;
-use crossbeam_channel::TryRecvError;
-use flate2::read::GzDecoder;
-use glean::net;
-use glean::ClientInfoMetrics;
-use glean::ConfigurationBuilder;
-use pings::nofollows;
-use serde_json::Value as JsonValue;
-
-mod pings {
- use super::*;
- use glean::private::PingType;
- use once_cell::sync::Lazy;
-
- #[allow(non_upper_case_globals)]
- pub static nofollows: Lazy = Lazy::new(|| {
- common::PingBuilder::new("nofollows")
- .with_send_if_empty(true)
- .with_include_info_sections(true) // WITH info sections
- .with_enabled(false)
- .with_follows_collection_enabled(false)
- .with_include_client_id(true)
- .build()
- });
-
- #[allow(non_upper_case_globals)]
- pub static manual: Lazy = Lazy::new(|| {
- common::PingBuilder::new("manual")
- .with_send_if_empty(true)
- .build()
- });
-}
-
-// Define a fake uploader that reports when and what it uploads.
-#[derive(Debug)]
-struct ReportingUploader {
- sender: Sender,
-}
-
-impl net::PingUploader for ReportingUploader {
- fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
- let upload_request = upload_request.capable(|_| true).unwrap();
- let body = upload_request.body;
- let decode = |body: Vec| {
- let mut gzip_decoder = GzDecoder::new(&body[..]);
- let mut s = String::with_capacity(body.len());
-
- gzip_decoder
- .read_to_string(&mut s)
- .ok()
- .map(|_| &s[..])
- .or_else(|| std::str::from_utf8(&body).ok())
- .and_then(|payload| serde_json::from_str(payload).ok())
- .unwrap()
- };
-
- self.sender.send(decode(body)).unwrap();
- net::UploadResult::http_status(200)
- }
-}
-
-/// Test scenario:
-///
-/// * Glean has _some_ data already stored.
-/// * Glean is started with collection-enabled=false.
-/// * Most data is cleared, but not `client_info`` (except `client_id`)
-/// * Pings with `follows_collection_enabled=false` still have the `client_info` filled in.
-#[test]
-fn nofollows_contains_client_info_when_collection_disabled() {
- common::enable_test_logging();
-
- // Create a custom configuration to use our reporting uploader.
- let dir = tempfile::tempdir().unwrap();
- let tmpname = dir.path().to_path_buf();
-
- // collection-enabled = true
- // Forces database to be created with data, then clears data.
- // Keeps `first_run_date`.
- // Ensures the _next_ init is NOT a first-run.
- let cfg = ConfigurationBuilder::new(true, tmpname.clone(), "glean-fc")
- .with_server_endpoint("invalid-test-host")
- .with_use_core_mps(false)
- .build();
- common::initialize(cfg);
- glean::set_upload_enabled(false);
- glean::shutdown();
-
- // collection-enabled = false
- let (tx, rx) = bounded(1);
- let cfg = ConfigurationBuilder::new(false, tmpname.clone(), "glean-fc")
- .with_server_endpoint("invalid-test-host")
- .with_use_core_mps(false)
- .with_uploader(ReportingUploader { sender: tx })
- .build();
- // Same as `common::initialize`.
- let client_info = ClientInfoMetrics {
- app_build: "1.0.0".to_string(),
- app_display_version: "1.0.0".to_string(),
- channel: Some("testing".to_string()),
- locale: Some("xx-XX".to_string()),
- };
- glean::test_reset_glean(cfg, client_info, false);
-
- _ = &*pings::nofollows;
- _ = &*pings::manual;
- nofollows.set_enabled(true);
-
- pings::manual.submit(None);
- pings::nofollows.submit(None);
-
- // Wait for the ping to arrive.
- let payload = rx.recv().unwrap();
-
- let client_info = payload["client_info"].as_object().unwrap();
- // General client info is set
- assert!(client_info["app_build"].is_string());
- assert!(client_info["architecture"].is_string());
- assert!(client_info["os"].is_string());
- assert!(client_info["telemetry_sdk_build"].is_string());
- // No client_id
- assert_eq!(None, client_info.get("client_id"));
-
- // No second ping received.
- assert!(matches!(rx.try_recv(), Err(TryRecvError::Empty)));
-
- // Now we enable collection.
- // This should give us a client ID.
- glean::set_collection_enabled(true);
-
- pings::manual.submit(None);
- let payload = rx.recv().unwrap();
- let client_info = payload["client_info"].as_object().unwrap();
- // General client info is set
- assert!(client_info["app_build"].is_string());
- assert!(client_info["architecture"].is_string());
- assert!(client_info["os"].is_string());
- assert!(client_info["telemetry_sdk_build"].is_string());
- // No client_id
- let client_id = client_info["client_id"].as_str().unwrap();
-
- pings::nofollows.submit(None);
- let payload = rx.recv().unwrap();
- let client_info = payload["client_info"].as_object().unwrap();
- // General client info is set
- assert!(client_info["app_build"].is_string());
- assert!(client_info["architecture"].is_string());
- assert!(client_info["os"].is_string());
- assert!(client_info["telemetry_sdk_build"].is_string());
- // No client_id
- let nf_client_id = client_info["client_id"].as_str().unwrap();
-
- assert_eq!(client_id, nf_client_id);
-
- glean::shutdown();
-}
diff --git a/third_party/rust/glean/tests/collection_enabled_bin.rs b/third_party/rust/glean/tests/collection_enabled_bin.rs
deleted file mode 100644
index 8ddc8637a698..000000000000
--- a/third_party/rust/glean/tests/collection_enabled_bin.rs
+++ /dev/null
@@ -1,165 +0,0 @@
-// This Source Code Form is subject to the terms of the Mozilla Public
-// License, v. 2.0. If a copy of the MPL was not distributed with this
-// file, You can obtain one at https://mozilla.org/MPL/2.0/.
-
-//! This integration test should model how the RLB is used when embedded in another Rust application
-//! (e.g. FOG/Firefox Desktop).
-//!
-//! We write a single test scenario per file to avoid any state keeping across runs
-//! (different files run as different processes).
-
-mod common;
-
-use std::fs;
-use std::io::Read;
-
-use crossbeam_channel::bounded;
-
-use crossbeam_channel::Sender;
-use crossbeam_channel::TryRecvError;
-use flate2::read::GzDecoder;
-use glean::net;
-use glean::ClientInfoMetrics;
-use glean::ConfigurationBuilder;
-use pings::nofollows;
-use serde_json::Value as JsonValue;
-
-mod pings {
- use super::*;
- use glean::private::PingType;
- use once_cell::sync::Lazy;
-
- #[allow(non_upper_case_globals)]
- pub static nofollows: Lazy = Lazy::new(|| {
- common::PingBuilder::new("nofollows")
- .with_send_if_empty(true)
- .with_include_info_sections(true) // WITH info sections
- .with_enabled(false)
- .with_follows_collection_enabled(false)
- .with_include_client_id(true)
- .build()
- });
-
- #[allow(non_upper_case_globals)]
- pub static manual: Lazy = Lazy::new(|| {
- common::PingBuilder::new("manual")
- .with_send_if_empty(true)
- .build()
- });
-}
-
-// Define a fake uploader that reports when and what it uploads.
-#[derive(Debug)]
-struct ReportingUploader {
- sender: Sender,
-}
-
-impl net::PingUploader for ReportingUploader {
- fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
- let upload_request = upload_request.capable(|_| true).unwrap();
- let body = upload_request.body;
- let decode = |body: Vec| {
- let mut gzip_decoder = GzDecoder::new(&body[..]);
- let mut s = String::with_capacity(body.len());
-
- gzip_decoder
- .read_to_string(&mut s)
- .ok()
- .map(|_| &s[..])
- .or_else(|| std::str::from_utf8(&body).ok())
- .and_then(|payload| serde_json::from_str(payload).ok())
- .unwrap()
- };
-
- self.sender.send(decode(body)).unwrap();
- net::UploadResult::http_status(200)
- }
-}
-
-/// Test scenario:
-///
-/// * Glean has _some_ data already stored.
-/// * Glean is started with collection-enabled=false.
-/// * Most data is cleared, but not `client_info`` (except `client_id`)
-/// * Pings with `follows_collection_enabled=false` still have the `client_info` filled in.
-#[test]
-fn nofollows_contains_client_info_when_collection_disabled() {
- common::enable_test_logging();
-
- // Create a custom configuration to use our reporting uploader.
- let dir = tempfile::tempdir().unwrap();
- let tmpname = dir.path().to_path_buf();
-
- let db_dir = tmpname.join("db");
- fs::create_dir_all(&db_dir).unwrap();
- let db_path = db_dir.join("data.safe.bin");
- fs::write(db_path, include_bytes!("./near-empty-c0ffee-db.safe.bin")).unwrap();
-
- // collection-enabled = false
- let (tx, rx) = bounded(1);
- let cfg = ConfigurationBuilder::new(false, tmpname.clone(), "glean-fc")
- .with_server_endpoint("invalid-test-host")
- .with_use_core_mps(false)
- .with_uploader(ReportingUploader { sender: tx })
- .build();
- // Same as `common::initialize`.
- let client_info = ClientInfoMetrics {
- app_build: "1.0.0".to_string(),
- app_display_version: "1.0.0".to_string(),
- channel: Some("testing".to_string()),
- locale: Some("xx-XX".to_string()),
- };
- glean::test_reset_glean(cfg, client_info, false);
-
- _ = &*pings::nofollows;
- _ = &*pings::manual;
- nofollows.set_enabled(true);
-
- pings::manual.submit(None);
- pings::nofollows.submit(None);
-
- // Wait for the ping to arrive.
- let payload = rx.recv().unwrap();
-
- let client_info = payload["client_info"].as_object().unwrap();
- // General client info is set
- assert!(client_info["app_build"].is_string());
- assert!(client_info["architecture"].is_string());
- assert!(client_info["os"].is_string());
- assert!(client_info["telemetry_sdk_build"].is_string());
- // No client_id
- assert_eq!(None, client_info.get("client_id"));
-
- // No second ping received.
- assert!(matches!(rx.try_recv(), Err(TryRecvError::Empty)));
-
- // Now we enable collection.
- // This should give us a client ID.
- glean::set_collection_enabled(true);
-
- pings::manual.submit(None);
- let payload = rx.recv().unwrap();
- let client_info = payload["client_info"].as_object().unwrap();
- // General client info is set
- assert!(client_info["app_build"].is_string());
- assert!(client_info["architecture"].is_string());
- assert!(client_info["os"].is_string());
- assert!(client_info["telemetry_sdk_build"].is_string());
- // No client_id
- let client_id = client_info["client_id"].as_str().unwrap();
-
- pings::nofollows.submit(None);
- let payload = rx.recv().unwrap();
- let client_info = payload["client_info"].as_object().unwrap();
- // General client info is set
- assert!(client_info["app_build"].is_string());
- assert!(client_info["architecture"].is_string());
- assert!(client_info["os"].is_string());
- assert!(client_info["telemetry_sdk_build"].is_string());
- // No client_id
- let nf_client_id = client_info["client_id"].as_str().unwrap();
-
- assert_eq!(client_id, nf_client_id);
-
- glean::shutdown();
-}
diff --git a/third_party/rust/glean/tests/common/mod.rs b/third_party/rust/glean/tests/common/mod.rs
index c261666e02d8..da58a7cee7cb 100644
--- a/third_party/rust/glean/tests/common/mod.rs
+++ b/third_party/rust/glean/tests/common/mod.rs
@@ -48,81 +48,6 @@ pub fn initialize(cfg: Configuration) {
locale: Some("xx-XX".to_string()),
};
- _ = PingBuilder::new("store1").with_send_if_empty(true).build();
+ _ = PingType::new("store1", true, true, true, true, true, vec![], vec![], true);
glean::initialize(cfg, client_info);
}
-
-pub struct PingBuilder {
- name: String,
- include_client_id: bool,
- send_if_empty: bool,
- precise_timestamps: bool,
- include_info_sections: bool,
- enabled: bool,
- schedules_pings: Vec,
- reason_codes: Vec,
- follows_collection_enabled: bool,
- uploader_capabilities: Vec,
-}
-
-impl PingBuilder {
- pub fn new(name: &str) -> Self {
- Self {
- name: name.to_string(),
- include_client_id: true,
- send_if_empty: false,
- precise_timestamps: true,
- include_info_sections: true,
- enabled: true,
- schedules_pings: vec![],
- reason_codes: vec![],
- follows_collection_enabled: true,
- uploader_capabilities: vec![],
- }
- }
-
- pub fn build(self) -> PingType {
- PingType::new(
- self.name,
- self.include_client_id,
- self.send_if_empty,
- self.precise_timestamps,
- self.include_info_sections,
- self.enabled,
- self.schedules_pings,
- self.reason_codes,
- self.follows_collection_enabled,
- self.uploader_capabilities,
- )
- }
-
- pub fn with_send_if_empty(mut self, value: bool) -> Self {
- self.send_if_empty = value;
- self
- }
-
- pub fn with_uploader_capabilities(mut self, value: Vec) -> Self {
- self.uploader_capabilities = value;
- self
- }
-
- pub fn with_include_info_sections(mut self, value: bool) -> Self {
- self.include_info_sections = value;
- self
- }
-
- pub fn with_enabled(mut self, value: bool) -> Self {
- self.enabled = value;
- self
- }
-
- pub fn with_follows_collection_enabled(mut self, value: bool) -> Self {
- self.follows_collection_enabled = value;
- self
- }
-
- pub fn with_include_client_id(mut self, value: bool) -> Self {
- self.include_client_id = value;
- self
- }
-}
diff --git a/third_party/rust/glean/tests/init_fails.rs b/third_party/rust/glean/tests/init_fails.rs
index ccaaa415be80..98657e166b27 100644
--- a/third_party/rust/glean/tests/init_fails.rs
+++ b/third_party/rust/glean/tests/init_fails.rs
@@ -38,15 +38,22 @@ mod metrics {
}
mod pings {
- use super::*;
use glean::private::PingType;
use once_cell::sync::Lazy;
#[allow(non_upper_case_globals)]
pub static validation: Lazy = Lazy::new(|| {
- common::PingBuilder::new("validation")
- .with_send_if_empty(true)
- .build()
+ glean::private::PingType::new(
+ "validation",
+ true,
+ true,
+ true,
+ true,
+ true,
+ vec![],
+ vec![],
+ true,
+ )
});
}
diff --git a/third_party/rust/glean/tests/interruptible_shutdown.rs b/third_party/rust/glean/tests/interruptible_shutdown.rs
index d4f6c0f70f90..62f0d7e9f199 100644
--- a/third_party/rust/glean/tests/interruptible_shutdown.rs
+++ b/third_party/rust/glean/tests/interruptible_shutdown.rs
@@ -39,15 +39,22 @@ mod metrics {
}
mod pings {
- use super::*;
use glean::private::PingType;
use once_cell::sync::Lazy;
#[allow(non_upper_case_globals)]
pub static validation: Lazy = Lazy::new(|| {
- common::PingBuilder::new("validation")
- .with_send_if_empty(true)
- .build()
+ glean::private::PingType::new(
+ "validation",
+ true,
+ true,
+ true,
+ true,
+ true,
+ vec![],
+ vec![],
+ true,
+ )
});
}
@@ -59,8 +66,7 @@ struct ReportingUploader {
}
impl net::PingUploader for ReportingUploader {
- fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
- let upload_request = upload_request.capable(|_| true).unwrap();
+ fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
let calls = self.calls.fetch_add(1, Ordering::SeqCst);
let body = upload_request.body;
let decode = |body: Vec| {
diff --git a/third_party/rust/glean/tests/near-empty-c0ffee-db.safe.bin b/third_party/rust/glean/tests/near-empty-c0ffee-db.safe.bin
deleted file mode 100644
index a126aa6640c0..000000000000
Binary files a/third_party/rust/glean/tests/near-empty-c0ffee-db.safe.bin and /dev/null differ
diff --git a/third_party/rust/glean/tests/never_init.rs b/third_party/rust/glean/tests/never_init.rs
index 625301b33f83..b40cdc1871d7 100644
--- a/third_party/rust/glean/tests/never_init.rs
+++ b/third_party/rust/glean/tests/never_init.rs
@@ -34,15 +34,22 @@ mod metrics {
}
mod pings {
- use super::*;
use glean::private::PingType;
use once_cell::sync::Lazy;
#[allow(non_upper_case_globals)]
pub static validation: Lazy = Lazy::new(|| {
- common::PingBuilder::new("validation")
- .with_send_if_empty(true)
- .build()
+ glean::private::PingType::new(
+ "validation",
+ true,
+ true,
+ true,
+ true,
+ true,
+ vec![],
+ vec![],
+ true,
+ )
});
}
diff --git a/third_party/rust/glean/tests/no_time_to_init.rs b/third_party/rust/glean/tests/no_time_to_init.rs
index f497b9d01d4e..4412214996f7 100644
--- a/third_party/rust/glean/tests/no_time_to_init.rs
+++ b/third_party/rust/glean/tests/no_time_to_init.rs
@@ -36,15 +36,22 @@ mod metrics {
}
mod pings {
- use super::*;
use glean::private::PingType;
use once_cell::sync::Lazy;
#[allow(non_upper_case_globals)]
pub static validation: Lazy = Lazy::new(|| {
- common::PingBuilder::new("validation")
- .with_send_if_empty(true)
- .build()
+ glean::private::PingType::new(
+ "validation",
+ true,
+ true,
+ true,
+ true,
+ true,
+ vec![],
+ vec![],
+ true,
+ )
});
}
diff --git a/third_party/rust/glean/tests/schema.rs b/third_party/rust/glean/tests/schema.rs
index 893fb7c4d5b1..df53095536df 100644
--- a/third_party/rust/glean/tests/schema.rs
+++ b/third_party/rust/glean/tests/schema.rs
@@ -2,9 +2,6 @@
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at https://mozilla.org/MPL/2.0/.
-mod common;
-use crate::common::*;
-
use std::collections::HashMap;
use std::io::Read;
@@ -12,7 +9,7 @@ use flate2::read::GzDecoder;
use jsonschema_valid::schemas::Draft;
use serde_json::Value;
-use glean::net::{CapablePingUploadRequest, UploadResult};
+use glean::net::{PingUploadRequest, UploadResult};
use glean::private::*;
use glean::{
traits, ClientInfoMetrics, CommonMetricData, ConfigurationBuilder, HistogramType, MemoryUnit,
@@ -62,8 +59,7 @@ fn validate_against_schema() {
sender: crossbeam_channel::Sender>,
}
impl glean::net::PingUploader for ValidatingUploader {
- fn upload(&self, ping_request: CapablePingUploadRequest) -> UploadResult {
- let ping_request = ping_request.capable(|_| true).unwrap();
+ fn upload(&self, ping_request: PingUploadRequest) -> UploadResult {
self.sender.send(ping_request.body).unwrap();
UploadResult::http_status(200)
}
@@ -174,7 +170,17 @@ fn validate_against_schema() {
text_metric.set("loooooong text".repeat(100));
// Define a new ping and submit it.
- let custom_ping = PingBuilder::new(PING_NAME).with_send_if_empty(true).build();
+ let custom_ping = glean::private::PingType::new(
+ PING_NAME,
+ true,
+ true,
+ true,
+ true,
+ true,
+ vec![],
+ vec![],
+ true,
+ );
custom_ping.submit(None);
// Wait for the ping to arrive.
diff --git a/third_party/rust/glean/tests/simple.rs b/third_party/rust/glean/tests/simple.rs
index 2f3fc9ee815f..59e2f325df45 100644
--- a/third_party/rust/glean/tests/simple.rs
+++ b/third_party/rust/glean/tests/simple.rs
@@ -36,15 +36,22 @@ mod metrics {
}
mod pings {
- use super::*;
use glean::private::PingType;
use once_cell::sync::Lazy;
#[allow(non_upper_case_globals)]
pub static validation: Lazy = Lazy::new(|| {
- common::PingBuilder::new("validation")
- .with_send_if_empty(true)
- .build()
+ glean::private::PingType::new(
+ "validation",
+ true,
+ true,
+ true,
+ true,
+ true,
+ vec![],
+ vec![],
+ true,
+ )
});
}
diff --git a/third_party/rust/glean/tests/upload_timing.rs b/third_party/rust/glean/tests/upload_timing.rs
index 6a83b526c41e..4b5a7798e029 100644
--- a/third_party/rust/glean/tests/upload_timing.rs
+++ b/third_party/rust/glean/tests/upload_timing.rs
@@ -92,15 +92,22 @@ pub mod metrics {
}
mod pings {
- use super::*;
use glean::private::PingType;
use once_cell::sync::Lazy;
#[allow(non_upper_case_globals)]
pub static validation: Lazy = Lazy::new(|| {
- common::PingBuilder::new("validation")
- .with_send_if_empty(true)
- .build()
+ glean::private::PingType::new(
+ "validation",
+ true,
+ true,
+ true,
+ true,
+ true,
+ vec![],
+ vec![],
+ true,
+ )
});
}
@@ -112,8 +119,7 @@ struct FakeUploader {
}
impl net::PingUploader for FakeUploader {
- fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
- let upload_request = upload_request.capable(|_| true).unwrap();
+ fn upload(&self, upload_request: net::PingUploadRequest) -> net::UploadResult {
let calls = self.calls.fetch_add(1, Ordering::SeqCst);
let body = upload_request.body;
let decode = |body: Vec| {
diff --git a/third_party/rust/glean/tests/uploader_capabilities.rs b/third_party/rust/glean/tests/uploader_capabilities.rs
deleted file mode 100644
index d30da88e8f84..000000000000
--- a/third_party/rust/glean/tests/uploader_capabilities.rs
+++ /dev/null
@@ -1,112 +0,0 @@
-// This Source Code Form is subject to the terms of the Mozilla Public
-// License, v. 2.0. If a copy of the MPL was not distributed with this
-// file, You can obtain one at https://mozilla.org/MPL/2.0/.
-
-//! This integration test should model how the RLB is used when embedded in another Rust application
-//! (e.g. FOG/Firefox Desktop).
-//!
-//! We write a single test scenario per file to avoid any state keeping across runs
-//! (different files run as different processes).
-
-mod common;
-
-use crossbeam_channel::{bounded, Sender};
-
-use glean::net;
-use glean::ConfigurationBuilder;
-
-mod pings {
- use super::*;
- use glean::private::PingType;
- use once_cell::sync::Lazy;
-
- #[allow(non_upper_case_globals)]
- pub static no_capabilities: Lazy = Lazy::new(|| {
- common::PingBuilder::new("no-capabilities")
- .with_send_if_empty(true)
- .build()
- });
-
- #[allow(non_upper_case_globals)]
- pub static one_capability: Lazy = Lazy::new(|| {
- common::PingBuilder::new("one-capability")
- .with_send_if_empty(true)
- .with_uploader_capabilities(vec!["capability1".to_string()])
- .build()
- });
-
- #[allow(non_upper_case_globals)]
- pub static two_capabilities: Lazy = Lazy::new(|| {
- common::PingBuilder::new("two-capabilities")
- .with_send_if_empty(true)
- .with_uploader_capabilities(vec!["capability1".to_string(), "capability2".to_string()])
- .build()
- });
-}
-
-// Define a fake uploader that reports when and what it uploads.
-#[derive(Debug)]
-struct ReportingUploader {
- sender: Sender,
-}
-
-impl net::PingUploader for ReportingUploader {
- fn upload(&self, upload_request: net::CapablePingUploadRequest) -> net::UploadResult {
- let uploader_capabilities: Vec = vec!["capability1".to_string()];
-
- let Some(_upload_request) = upload_request.capable(|capabilities| {
- capabilities.iter().all(|required_capability| {
- uploader_capabilities
- .iter()
- .any(|uploader_capability| uploader_capability == required_capability)
- })
- }) else {
- self.sender.send(net::UploadResult::incapable()).unwrap();
- return net::UploadResult::incapable();
- };
-
- self.sender
- .send(net::UploadResult::http_status(200))
- .unwrap();
- net::UploadResult::http_status(200)
- }
-}
-
-/// Test scenario: We only upload pings we're capable of.
-#[test]
-fn interruptible_shutdown() {
- common::enable_test_logging();
-
- // Create a custom configuration to use our reporting uploader.
- let dir = tempfile::tempdir().unwrap();
- let tmpname = dir.path().to_path_buf();
- let (tx, rx) = bounded(1);
-
- let cfg = ConfigurationBuilder::new(true, tmpname.clone(), "glean-interruptible-shutdown")
- .with_server_endpoint("invalid-test-host")
- .with_use_core_mps(false)
- .with_uploader(ReportingUploader { sender: tx })
- .build();
- common::initialize(cfg);
-
- pings::no_capabilities.submit(None);
- let result = rx.recv().unwrap();
- assert!(
- matches!(result, net::UploadResult::HttpStatus { code: 200 }),
- "Can upload pings requiring no capabilities."
- );
-
- pings::one_capability.submit(None);
- let result = rx.recv().unwrap();
- assert!(
- matches!(result, net::UploadResult::HttpStatus { code: 200 }),
- "Can upload pings with matching capability."
- );
-
- pings::two_capabilities.submit(None);
- let result = rx.recv().unwrap();
- assert!(
- matches!(result, net::UploadResult::Incapable { .. }),
- "Can't upload pings requiring capabilities we don't support."
- );
-}
diff --git a/third_party/rust/interrupt-support/.cargo-checksum.json b/third_party/rust/interrupt-support/.cargo-checksum.json
index 3ea1726598cc..56ef168f2377 100644
--- a/third_party/rust/interrupt-support/.cargo-checksum.json
+++ b/third_party/rust/interrupt-support/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"f86577c4ceee8cab07cc66a2305629708c0bdf2d3e023ffc4b55344148b1817b","README.md":"7f1418b4a7c138ba20bcaea077fe6cf0d6ffbaf6df6b90c80efc52aa0d0e2e9f","build.rs":"49840f26c73c5db19cb4e7f02930e49d7a19648168b83f2313ac1a0303c103df","src/error.rs":"b83cbe8abd22a9d687508d236a2a77e28b3fc6c39673633e5820cc0e3fc86cba","src/interrupt_support.udl":"bac2d5a94b5ae5d1b819b2058b82c541e02b1f75ef157c1eb236bfb4f0c78a05","src/interruptee.rs":"c56f9ac610d0b24a128a907266432287558c4b73f6c24b82674ca7894181d18f","src/lib.rs":"cf44a84310913be5264e1c4a3e004a9f7a6cd82d01a109bb6ac4d6002b5dd560","src/shutdown.rs":"e4b7a89f1ef319646aee3282a0d60465c3dbf571c52a0295f3b1a8909f345818","src/sql.rs":"db9b93fb2fe813ae0af6313082f07fad0e381691290466a7ac67bec14024722d"},"package":null}
\ No newline at end of file
+{"files":{"Cargo.toml":"ff59cecd6f8a6388c9465f405cfc880e7fa57d3c83454e5c2eb24b406d3d1fb0","README.md":"7f1418b4a7c138ba20bcaea077fe6cf0d6ffbaf6df6b90c80efc52aa0d0e2e9f","build.rs":"49840f26c73c5db19cb4e7f02930e49d7a19648168b83f2313ac1a0303c103df","src/error.rs":"b83cbe8abd22a9d687508d236a2a77e28b3fc6c39673633e5820cc0e3fc86cba","src/interrupt_support.udl":"bac2d5a94b5ae5d1b819b2058b82c541e02b1f75ef157c1eb236bfb4f0c78a05","src/interruptee.rs":"c56f9ac610d0b24a128a907266432287558c4b73f6c24b82674ca7894181d18f","src/lib.rs":"cf44a84310913be5264e1c4a3e004a9f7a6cd82d01a109bb6ac4d6002b5dd560","src/shutdown.rs":"e4b7a89f1ef319646aee3282a0d60465c3dbf571c52a0295f3b1a8909f345818","src/sql.rs":"db9b93fb2fe813ae0af6313082f07fad0e381691290466a7ac67bec14024722d"},"package":null}
\ No newline at end of file
diff --git a/third_party/rust/interrupt-support/Cargo.toml b/third_party/rust/interrupt-support/Cargo.toml
index dbc402283c36..1088c422c4c6 100644
--- a/third_party/rust/interrupt-support/Cargo.toml
+++ b/third_party/rust/interrupt-support/Cargo.toml
@@ -41,8 +41,8 @@ features = [
]
[dependencies.uniffi]
-version = "0.29.0"
+version = "0.28.2"
[build-dependencies.uniffi]
-version = "0.29.0"
+version = "0.28.2"
features = ["build"]
diff --git a/third_party/rust/itoa/.cargo-checksum.json b/third_party/rust/itoa/.cargo-checksum.json
index 57f1979bbb91..5ab0879bbaba 100644
--- a/third_party/rust/itoa/.cargo-checksum.json
+++ b/third_party/rust/itoa/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.lock":"d609730a66530a060198a10acf8ff9f499be3fe740fefec2cf4e3026a983038e","Cargo.toml":"c1d45a6aa2324a0862b0e6c8100e8f595616f91612f915f63c862010954667bc","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"48573443063fa4e0786c3b46f42b6efd1f171c6b73408a64afc1b34de89f31fe","benches/bench.rs":"636f3093bd461210ad3063289d455f90669c4a1be3273bcd30898de39f02c641","src/lib.rs":"ef9f1a8665a678cf5b77bcaa628d00538d620de0c84fd2a8b92323a314a95636","src/udiv128.rs":"d28c1872c37ee2185931babcb20a221b8706a5aa8abc4963419763888023ff17","tests/test.rs":"aa1e910573a1d847d39773b4a2e4c597a8d3810070332673df0f6864cab24807"},"package":"4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"}
\ No newline at end of file
+{"files":{"Cargo.toml":"bb96760f2d45e86313dbec93a3210e5073c4ee74116097bb5ca45ba9c5b049a6","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"48573443063fa4e0786c3b46f42b6efd1f171c6b73408a64afc1b34de89f31fe","benches/bench.rs":"636f3093bd461210ad3063289d455f90669c4a1be3273bcd30898de39f02c641","src/lib.rs":"da13f0d5dcba3bb2971f67b6856ea6f2e3cbdc31d47f7042d7f131b08bb7de85","src/udiv128.rs":"d28c1872c37ee2185931babcb20a221b8706a5aa8abc4963419763888023ff17","tests/test.rs":"f7404fc5f7cd1bdaf74a3b64a70d5b30586241ddc1ce2c82bd1b564999fcce0e"},"package":"fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440"}
\ No newline at end of file
diff --git a/third_party/rust/itoa/Cargo.lock b/third_party/rust/itoa/Cargo.lock
deleted file mode 100644
index b940b8178678..000000000000
--- a/third_party/rust/itoa/Cargo.lock
+++ /dev/null
@@ -1,56 +0,0 @@
-# This file is automatically @generated by Cargo.
-# It is not intended for manual editing.
-[[package]]
-name = "itoa"
-version = "1.0.15"
-dependencies = [
- "no-panic 0.1.33 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "no-panic"
-version = "0.1.33"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "proc-macro2 1.0.94 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 1.0.39 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 2.0.99 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "proc-macro2"
-version = "1.0.94"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "unicode-ident 1.0.17 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "quote"
-version = "1.0.39"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "proc-macro2 1.0.94 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "syn"
-version = "2.0.99"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "proc-macro2 1.0.94 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 1.0.39 (registry+https://github.com/rust-lang/crates.io-index)",
- "unicode-ident 1.0.17 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "unicode-ident"
-version = "1.0.17"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[metadata]
-"checksum no-panic 0.1.33 (registry+https://github.com/rust-lang/crates.io-index)" = "8f7da86466fe446079286ef4b2f6d789755b610a9d85da8477633f734d2697e8"
-"checksum proc-macro2 1.0.94 (registry+https://github.com/rust-lang/crates.io-index)" = "a31971752e70b8b2686d7e46ec17fb38dad4051d94024c88df49b667caea9c84"
-"checksum quote 1.0.39 (registry+https://github.com/rust-lang/crates.io-index)" = "c1f1914ce909e1658d9907913b4b91947430c7d9be598b15a1912935b8c04801"
-"checksum syn 2.0.99 (registry+https://github.com/rust-lang/crates.io-index)" = "e02e925281e18ffd9d640e234264753c43edc62d64b2d4cf898f1bc5e75f3fc2"
-"checksum unicode-ident 1.0.17 (registry+https://github.com/rust-lang/crates.io-index)" = "00e2473a93778eb0bad35909dff6a10d28e63f792f16ed15e404fca9d5eeedbe"
diff --git a/third_party/rust/itoa/Cargo.toml b/third_party/rust/itoa/Cargo.toml
index b5b3e91fc0b0..86c10b644683 100644
--- a/third_party/rust/itoa/Cargo.toml
+++ b/third_party/rust/itoa/Cargo.toml
@@ -13,18 +13,12 @@
edition = "2018"
rust-version = "1.36"
name = "itoa"
-version = "1.0.15"
+version = "1.0.5"
authors = ["David Tolnay "]
-build = false
exclude = [
"performance.png",
"chart/**",
]
-autolib = false
-autobins = false
-autoexamples = false
-autotests = false
-autobenches = false
description = "Fast integer primitive to string conversion"
documentation = "https://docs.rs/itoa"
readme = "README.md"
@@ -32,32 +26,13 @@ keywords = ["integer"]
categories = [
"value-formatting",
"no-std",
- "no-std::no-alloc",
]
license = "MIT OR Apache-2.0"
repository = "https://github.com/dtolnay/itoa"
[package.metadata.docs.rs]
-rustdoc-args = [
- "--generate-link-to-definition",
- "--extern-html-root-url=core=https://doc.rust-lang.org",
- "--extern-html-root-url=alloc=https://doc.rust-lang.org",
- "--extern-html-root-url=std=https://doc.rust-lang.org",
-]
targets = ["x86_64-unknown-linux-gnu"]
-[lib]
-name = "itoa"
-path = "src/lib.rs"
-
-[[test]]
-name = "test"
-path = "tests/test.rs"
-
-[[bench]]
-name = "bench"
-path = "benches/bench.rs"
-
[dependencies.no-panic]
version = "0.1"
optional = true
diff --git a/third_party/rust/itoa/LICENSE-APACHE b/third_party/rust/itoa/LICENSE-APACHE
index 1b5ec8b78e23..16fe87b06e80 100644
--- a/third_party/rust/itoa/LICENSE-APACHE
+++ b/third_party/rust/itoa/LICENSE-APACHE
@@ -174,3 +174,28 @@ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/third_party/rust/itoa/src/lib.rs b/third_party/rust/itoa/src/lib.rs
index e506b216244d..168407f061f2 100644
--- a/third_party/rust/itoa/src/lib.rs
+++ b/third_party/rust/itoa/src/lib.rs
@@ -13,6 +13,7 @@
//! See also [`ryu`] for printing floating point primitives.
//!
//! [libcore]: https://github.com/rust-lang/rust/blob/b8214dc6c6fc20d0a660fb5700dca9ebf51ebe89/src/libcore/fmt/num.rs#L201-L254
+//! [`core::fmt::Formatter`]: https://doc.rust-lang.org/std/fmt/struct.Formatter.html
//! [`ryu`]: https://github.com/dtolnay/ryu
//!
//! # Example
@@ -29,23 +30,18 @@
//!
//! 
-#![doc(html_root_url = "https://docs.rs/itoa/1.0.15")]
+#![doc(html_root_url = "https://docs.rs/itoa/1.0.5")]
#![no_std]
#![allow(
clippy::cast_lossless,
clippy::cast_possible_truncation,
- clippy::cast_possible_wrap,
- clippy::cast_sign_loss,
- clippy::expl_impl_clone_on_copy,
clippy::must_use_candidate,
- clippy::needless_doctest_main,
clippy::unreadable_literal
)]
mod udiv128;
-use core::hint;
-use core::mem::MaybeUninit;
+use core::mem::{self, MaybeUninit};
use core::{ptr, slice, str};
#[cfg(feature = "no-panic")]
use no_panic::no_panic;
@@ -61,7 +57,7 @@ use no_panic::no_panic;
/// assert_eq!(printed, "1234");
/// ```
pub struct Buffer {
- bytes: [MaybeUninit; i128::MAX_STR_LEN],
+ bytes: [MaybeUninit; I128_MAX_LEN],
}
impl Default for Buffer {
@@ -71,11 +67,8 @@ impl Default for Buffer {
}
}
-impl Copy for Buffer {}
-
impl Clone for Buffer {
#[inline]
- #[allow(clippy::non_canonical_clone_impl)] // false positive https://github.com/rust-lang/rust-clippy/issues/11072
fn clone(&self) -> Self {
Buffer::new()
}
@@ -87,7 +80,7 @@ impl Buffer {
#[inline]
#[cfg_attr(feature = "no-panic", no_panic)]
pub fn new() -> Buffer {
- let bytes = [MaybeUninit::::uninit(); i128::MAX_STR_LEN];
+ let bytes = [MaybeUninit::::uninit(); I128_MAX_LEN];
Buffer { bytes }
}
@@ -95,37 +88,27 @@ impl Buffer {
/// representation within the buffer.
#[cfg_attr(feature = "no-panic", no_panic)]
pub fn format(&mut self, i: I) -> &str {
- let string = i.write(unsafe {
- &mut *(&mut self.bytes as *mut [MaybeUninit; i128::MAX_STR_LEN]
+ i.write(unsafe {
+ &mut *(&mut self.bytes as *mut [MaybeUninit; I128_MAX_LEN]
as *mut ::Buffer)
- });
- if string.len() > I::MAX_STR_LEN {
- unsafe { hint::unreachable_unchecked() };
- }
- string
+ })
}
}
/// An integer that can be written into an [`itoa::Buffer`][Buffer].
///
/// This trait is sealed and cannot be implemented for types outside of itoa.
-pub trait Integer: private::Sealed {
- /// The maximum length of string that formatting an integer of this type can
- /// produce on the current target platform.
- const MAX_STR_LEN: usize;
-}
+pub trait Integer: private::Sealed {}
// Seal to prevent downstream implementations of the Integer trait.
mod private {
- #[doc(hidden)]
pub trait Sealed: Copy {
- #[doc(hidden)]
type Buffer: 'static;
fn write(self, buf: &mut Self::Buffer) -> &str;
}
}
-const DEC_DIGITS_LUT: [u8; 200] = *b"\
+const DEC_DIGITS_LUT: &[u8] = b"\
0001020304050607080910111213141516171819\
2021222324252627282930313233343536373839\
4041424344454647484950515253545556575859\
@@ -135,10 +118,8 @@ const DEC_DIGITS_LUT: [u8; 200] = *b"\
// Adaptation of the original implementation at
// https://github.com/rust-lang/rust/blob/b8214dc6c6fc20d0a660fb5700dca9ebf51ebe89/src/libcore/fmt/num.rs#L188-L266
macro_rules! impl_Integer {
- ($t:ty[len = $max_len:expr] as $large_unsigned:ty) => {
- impl Integer for $t {
- const MAX_STR_LEN: usize = $max_len;
- }
+ ($($max_len:expr => $t:ident),* as $conv_fn:ident) => {$(
+ impl Integer for $t {}
impl private::Sealed for $t {
type Buffer = [MaybeUninit; $max_len];
@@ -149,109 +130,98 @@ macro_rules! impl_Integer {
fn write(self, buf: &mut [MaybeUninit; $max_len]) -> &str {
let is_nonnegative = self >= 0;
let mut n = if is_nonnegative {
- self as $large_unsigned
+ self as $conv_fn
} else {
- // Convert negative number to positive by summing 1 to its two's complement.
- (!(self as $large_unsigned)).wrapping_add(1)
+ // convert the negative num to positive by summing 1 to it's 2 complement
+ (!(self as $conv_fn)).wrapping_add(1)
};
- let mut curr = buf.len();
+ let mut curr = buf.len() as isize;
let buf_ptr = buf.as_mut_ptr() as *mut u8;
let lut_ptr = DEC_DIGITS_LUT.as_ptr();
- // Render 4 digits at a time.
- while n >= 10000 {
- let rem = n % 10000;
- n /= 10000;
+ unsafe {
+ // need at least 16 bits for the 4-characters-at-a-time to work.
+ if mem::size_of::<$t>() >= 2 {
+ // eagerly decode 4 characters at a time
+ while n >= 10000 {
+ let rem = (n % 10000) as isize;
+ n /= 10000;
- let d1 = ((rem / 100) << 1) as usize;
- let d2 = ((rem % 100) << 1) as usize;
- curr -= 4;
- unsafe {
- ptr::copy_nonoverlapping(lut_ptr.add(d1), buf_ptr.add(curr), 2);
- ptr::copy_nonoverlapping(lut_ptr.add(d2), buf_ptr.add(curr + 2), 2);
+ let d1 = (rem / 100) << 1;
+ let d2 = (rem % 100) << 1;
+ curr -= 4;
+ ptr::copy_nonoverlapping(lut_ptr.offset(d1), buf_ptr.offset(curr), 2);
+ ptr::copy_nonoverlapping(lut_ptr.offset(d2), buf_ptr.offset(curr + 2), 2);
+ }
+ }
+
+ // if we reach here numbers are <= 9999, so at most 4 chars long
+ let mut n = n as isize; // possibly reduce 64bit math
+
+ // decode 2 more chars, if > 2 chars
+ if n >= 100 {
+ let d1 = (n % 100) << 1;
+ n /= 100;
+ curr -= 2;
+ ptr::copy_nonoverlapping(lut_ptr.offset(d1), buf_ptr.offset(curr), 2);
+ }
+
+ // decode last 1 or 2 chars
+ if n < 10 {
+ curr -= 1;
+ *buf_ptr.offset(curr) = (n as u8) + b'0';
+ } else {
+ let d1 = n << 1;
+ curr -= 2;
+ ptr::copy_nonoverlapping(lut_ptr.offset(d1), buf_ptr.offset(curr), 2);
+ }
+
+ if !is_nonnegative {
+ curr -= 1;
+ *buf_ptr.offset(curr) = b'-';
}
}
- // Render 2 more digits, if >2 digits.
- if n >= 100 {
- let d1 = ((n % 100) << 1) as usize;
- n /= 100;
- curr -= 2;
- unsafe {
- ptr::copy_nonoverlapping(lut_ptr.add(d1), buf_ptr.add(curr), 2);
- }
- }
-
- // Render last 1 or 2 digits.
- if n < 10 {
- curr -= 1;
- unsafe {
- *buf_ptr.add(curr) = (n as u8) + b'0';
- }
- } else {
- let d1 = (n << 1) as usize;
- curr -= 2;
- unsafe {
- ptr::copy_nonoverlapping(lut_ptr.add(d1), buf_ptr.add(curr), 2);
- }
- }
-
- if !is_nonnegative {
- curr -= 1;
- unsafe {
- *buf_ptr.add(curr) = b'-';
- }
- }
-
- let len = buf.len() - curr;
- let bytes = unsafe { slice::from_raw_parts(buf_ptr.add(curr), len) };
+ let len = buf.len() - curr as usize;
+ let bytes = unsafe { slice::from_raw_parts(buf_ptr.offset(curr), len) };
unsafe { str::from_utf8_unchecked(bytes) }
}
}
- };
+ )*};
}
-impl_Integer!(i8[len = 4] as u32);
-impl_Integer!(u8[len = 3] as u32);
-impl_Integer!(i16[len = 6] as u32);
-impl_Integer!(u16[len = 5] as u32);
-impl_Integer!(i32[len = 11] as u32);
-impl_Integer!(u32[len = 10] as u32);
-impl_Integer!(i64[len = 20] as u64);
-impl_Integer!(u64[len = 20] as u64);
+const I8_MAX_LEN: usize = 4;
+const U8_MAX_LEN: usize = 3;
+const I16_MAX_LEN: usize = 6;
+const U16_MAX_LEN: usize = 5;
+const I32_MAX_LEN: usize = 11;
+const U32_MAX_LEN: usize = 10;
+const I64_MAX_LEN: usize = 20;
+const U64_MAX_LEN: usize = 20;
-macro_rules! impl_Integer_size {
- ($t:ty as $primitive:ident #[cfg(target_pointer_width = $width:literal)]) => {
- #[cfg(target_pointer_width = $width)]
- impl Integer for $t {
- const MAX_STR_LEN: usize = <$primitive as Integer>::MAX_STR_LEN;
- }
+impl_Integer!(
+ I8_MAX_LEN => i8,
+ U8_MAX_LEN => u8,
+ I16_MAX_LEN => i16,
+ U16_MAX_LEN => u16,
+ I32_MAX_LEN => i32,
+ U32_MAX_LEN => u32
+ as u32);
- #[cfg(target_pointer_width = $width)]
- impl private::Sealed for $t {
- type Buffer = <$primitive as private::Sealed>::Buffer;
+impl_Integer!(I64_MAX_LEN => i64, U64_MAX_LEN => u64 as u64);
- #[inline]
- #[cfg_attr(feature = "no-panic", no_panic)]
- fn write(self, buf: &mut Self::Buffer) -> &str {
- (self as $primitive).write(buf)
- }
- }
- };
-}
+#[cfg(target_pointer_width = "16")]
+impl_Integer!(I16_MAX_LEN => isize, U16_MAX_LEN => usize as u16);
-impl_Integer_size!(isize as i16 #[cfg(target_pointer_width = "16")]);
-impl_Integer_size!(usize as u16 #[cfg(target_pointer_width = "16")]);
-impl_Integer_size!(isize as i32 #[cfg(target_pointer_width = "32")]);
-impl_Integer_size!(usize as u32 #[cfg(target_pointer_width = "32")]);
-impl_Integer_size!(isize as i64 #[cfg(target_pointer_width = "64")]);
-impl_Integer_size!(usize as u64 #[cfg(target_pointer_width = "64")]);
+#[cfg(target_pointer_width = "32")]
+impl_Integer!(I32_MAX_LEN => isize, U32_MAX_LEN => usize as u32);
+
+#[cfg(target_pointer_width = "64")]
+impl_Integer!(I64_MAX_LEN => isize, U64_MAX_LEN => usize as u64);
macro_rules! impl_Integer128 {
- ($t:ty[len = $max_len:expr]) => {
- impl Integer for $t {
- const MAX_STR_LEN: usize = $max_len;
- }
+ ($($max_len:expr => $t:ident),*) => {$(
+ impl Integer for $t {}
impl private::Sealed for $t {
type Buffer = [MaybeUninit; $max_len];
@@ -264,66 +234,57 @@ macro_rules! impl_Integer128 {
let n = if is_nonnegative {
self as u128
} else {
- // Convert negative number to positive by summing 1 to its two's complement.
+ // convert the negative num to positive by summing 1 to it's 2 complement
(!(self as u128)).wrapping_add(1)
};
- let mut curr = buf.len();
+ let mut curr = buf.len() as isize;
let buf_ptr = buf.as_mut_ptr() as *mut u8;
- // Divide by 10^19 which is the highest power less than 2^64.
- let (n, rem) = udiv128::udivmod_1e19(n);
- let buf1 = unsafe {
- buf_ptr.add(curr - u64::MAX_STR_LEN) as *mut [MaybeUninit; u64::MAX_STR_LEN]
- };
- curr -= rem.write(unsafe { &mut *buf1 }).len();
-
- if n != 0 {
- // Memset the base10 leading zeros of rem.
- let target = buf.len() - 19;
- unsafe {
- ptr::write_bytes(buf_ptr.add(target), b'0', curr - target);
- }
- curr = target;
-
- // Divide by 10^19 again.
+ unsafe {
+ // Divide by 10^19 which is the highest power less than 2^64.
let (n, rem) = udiv128::udivmod_1e19(n);
- let buf2 = unsafe {
- buf_ptr.add(curr - u64::MAX_STR_LEN)
- as *mut [MaybeUninit; u64::MAX_STR_LEN]
- };
- curr -= rem.write(unsafe { &mut *buf2 }).len();
+ let buf1 = buf_ptr.offset(curr - U64_MAX_LEN as isize) as *mut [MaybeUninit; U64_MAX_LEN];
+ curr -= rem.write(&mut *buf1).len() as isize;
if n != 0 {
- // Memset the leading zeros.
- let target = buf.len() - 38;
- unsafe {
- ptr::write_bytes(buf_ptr.add(target), b'0', curr - target);
- }
+ // Memset the base10 leading zeros of rem.
+ let target = buf.len() as isize - 19;
+ ptr::write_bytes(buf_ptr.offset(target), b'0', (curr - target) as usize);
curr = target;
- // There is at most one digit left
- // because u128::MAX / 10^19 / 10^19 is 3.
- curr -= 1;
- unsafe {
- *buf_ptr.add(curr) = (n as u8) + b'0';
+ // Divide by 10^19 again.
+ let (n, rem) = udiv128::udivmod_1e19(n);
+ let buf2 = buf_ptr.offset(curr - U64_MAX_LEN as isize) as *mut [MaybeUninit; U64_MAX_LEN];
+ curr -= rem.write(&mut *buf2).len() as isize;
+
+ if n != 0 {
+ // Memset the leading zeros.
+ let target = buf.len() as isize - 38;
+ ptr::write_bytes(buf_ptr.offset(target), b'0', (curr - target) as usize);
+ curr = target;
+
+ // There is at most one digit left
+ // because u128::max / 10^19 / 10^19 is 3.
+ curr -= 1;
+ *buf_ptr.offset(curr) = (n as u8) + b'0';
}
}
- }
- if !is_nonnegative {
- curr -= 1;
- unsafe {
- *buf_ptr.add(curr) = b'-';
+ if !is_nonnegative {
+ curr -= 1;
+ *buf_ptr.offset(curr) = b'-';
}
- }
- let len = buf.len() - curr;
- let bytes = unsafe { slice::from_raw_parts(buf_ptr.add(curr), len) };
- unsafe { str::from_utf8_unchecked(bytes) }
+ let len = buf.len() - curr as usize;
+ let bytes = slice::from_raw_parts(buf_ptr.offset(curr), len);
+ str::from_utf8_unchecked(bytes)
+ }
}
}
- };
+ )*};
}
-impl_Integer128!(i128[len = 40]);
-impl_Integer128!(u128[len = 39]);
+const U128_MAX_LEN: usize = 39;
+const I128_MAX_LEN: usize = 40;
+
+impl_Integer128!(I128_MAX_LEN => i128, U128_MAX_LEN => u128);
diff --git a/third_party/rust/itoa/tests/test.rs b/third_party/rust/itoa/tests/test.rs
index f8275d6e7a17..1d7e8cb60055 100644
--- a/third_party/rust/itoa/tests/test.rs
+++ b/third_party/rust/itoa/tests/test.rs
@@ -26,5 +26,4 @@ test! {
test_u128_0(0u128, "0")
test_u128_max(u128::max_value(), "340282366920938463463374607431768211455")
test_i128_min(i128::min_value(), "-170141183460469231731687303715884105728")
- test_i128_max(i128::max_value(), "170141183460469231731687303715884105727")
}
diff --git a/third_party/rust/relevancy/.cargo-checksum.json b/third_party/rust/relevancy/.cargo-checksum.json
index bdcea0ea7761..1b5fc535ba5b 100644
--- a/third_party/rust/relevancy/.cargo-checksum.json
+++ b/third_party/rust/relevancy/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"317f4e90836bae2153c14c3e564337f9bbb8defb20c18b877a8122fc427f00c8","src/bin/generate-test-data.rs":"7cc80b56929091d02675b9dd9bf4c657a95cda502656cf2ec8d91f56d7a393c7","src/db.rs":"d9dd44501ee3b19c696d8830d3036f7bfe0e8ad7751d5a057f5d8295ebf0bd4f","src/error.rs":"3a1308e65440769d9435fc95528d4ef42994c84d88e1da04ba058491dea387c4","src/ingest.rs":"09ac45d68470512f13a58f832c64d2a3dd85230b6454b3fd8fbecc6c1c735a7b","src/interest.rs":"e4369a1280867438bca12746f71288a03b4d5e180e156f4bc0335046012565f7","src/lib.rs":"80e69c16d0b84ae4b7434cd7cf43ade1c2f556bfa166bfb72a250b1eca8de075","src/ranker.rs":"e71414fe79ade26f3c79dceb5211af4f37984a9cded8c938dc1da8d8d28c2ad3","src/rs.rs":"3ba6ad925e62bbce1790598cb429328191393ec89f2ebc3d1fbf26b0db5de955","src/schema.rs":"38ea82679da2729a571aad936f96469e732ec1c104d7c21fd869842f7a5f30a3","src/url_hash.rs":"2e908316fb70923644d1990dbf470d69ce2f5e99b0c5c3d95ec691590be8ffa5","test-data":"1ef2cd092d59e7e126cd4a514af983d449ed9f9c98708702fd237464a76c2b5e"},"package":null}
\ No newline at end of file
+{"files":{"Cargo.toml":"6765e7643f3e8ad46145166225fa93a08e8a5eb327eca1460340b29c29cd73f5","src/bin/generate-test-data.rs":"7cc80b56929091d02675b9dd9bf4c657a95cda502656cf2ec8d91f56d7a393c7","src/db.rs":"d9dd44501ee3b19c696d8830d3036f7bfe0e8ad7751d5a057f5d8295ebf0bd4f","src/error.rs":"3a1308e65440769d9435fc95528d4ef42994c84d88e1da04ba058491dea387c4","src/ingest.rs":"9f8f7584be5ed27dc962d9137eaa4730948356c724f687e03048a8370c9ed889","src/interest.rs":"e4369a1280867438bca12746f71288a03b4d5e180e156f4bc0335046012565f7","src/lib.rs":"1e57d2f7ca6452f6fe6e5f89a21e33292a86a2a5174b2e541473a69060fa4a32","src/ranker.rs":"e71414fe79ade26f3c79dceb5211af4f37984a9cded8c938dc1da8d8d28c2ad3","src/rs.rs":"fb12d29f75a59af1bfdd320ad01f9bb5a03cf5a3f84738ebdaccb67b84695eef","src/schema.rs":"38ea82679da2729a571aad936f96469e732ec1c104d7c21fd869842f7a5f30a3","src/url_hash.rs":"2e908316fb70923644d1990dbf470d69ce2f5e99b0c5c3d95ec691590be8ffa5","test-data":"1ef2cd092d59e7e126cd4a514af983d449ed9f9c98708702fd237464a76c2b5e"},"package":null}
\ No newline at end of file
diff --git a/third_party/rust/relevancy/Cargo.toml b/third_party/rust/relevancy/Cargo.toml
index 1010cbf556d5..30eb75af5b94 100644
--- a/third_party/rust/relevancy/Cargo.toml
+++ b/third_party/rust/relevancy/Cargo.toml
@@ -68,8 +68,8 @@ features = ["derive"]
path = "../support/sql"
[dependencies.uniffi]
-version = "0.29.0"
+version = "0.28.2"
[build-dependencies.uniffi]
-version = "0.29.0"
+version = "0.28.2"
features = ["build"]
diff --git a/third_party/rust/relevancy/src/ingest.rs b/third_party/rust/relevancy/src/ingest.rs
index aaf413e29e99..7f504de488bd 100644
--- a/third_party/rust/relevancy/src/ingest.rs
+++ b/third_party/rust/relevancy/src/ingest.rs
@@ -5,25 +5,24 @@
use crate::db::RelevancyDao;
use crate::rs::{
from_json, from_json_slice, RelevancyAttachmentData, RelevancyRecord,
- RelevancyRemoteSettingsClient,
+ RelevancyRemoteSettingsClient, REMOTE_SETTINGS_COLLECTION,
};
use crate::url_hash::UrlHash;
use crate::{Error, Interest, RelevancyDb, Result};
use base64::{engine::general_purpose::STANDARD, Engine};
-use remote_settings::RemoteSettingsRecord;
+use remote_settings::{
+ RemoteSettings, RemoteSettingsConfig, RemoteSettingsRecord, RemoteSettingsServer,
+};
// Number of rows to write when inserting interest data before checking for interruption
const WRITE_CHUNK_SIZE: usize = 100;
-pub fn ensure_interest_data_populated(
- db: &RelevancyDb,
- client: C,
-) -> Result<()> {
+pub fn ensure_interest_data_populated(db: &RelevancyDb) -> Result<()> {
if !db.read(|dao| dao.need_to_load_url_interests())? {
return Ok(());
}
- match fetch_interest_data_inner(client) {
+ match fetch_interest_data() {
Ok(data) => {
db.read_write(move |dao| insert_interest_data(data, dao))?;
}
@@ -35,17 +34,27 @@ pub fn ensure_interest_data_populated(
Ok(())
}
+fn fetch_interest_data() -> Result> {
+ let rs = RemoteSettings::new(RemoteSettingsConfig {
+ collection_name: REMOTE_SETTINGS_COLLECTION.to_string(),
+ server: Some(RemoteSettingsServer::Prod),
+ server_url: None,
+ bucket_name: None,
+ })?;
+ fetch_interest_data_inner(rs)
+}
+
/// Fetch the interest data
-fn fetch_interest_data_inner(
- client: C,
+fn fetch_interest_data_inner(
+ rs: impl RelevancyRemoteSettingsClient,
) -> Result> {
- let remote_settings_response = client.get_records()?;
+ let remote_settings_response = rs.get_records()?;
let mut result = vec![];
for record in remote_settings_response.records {
let attachment_data = match &record.attachment {
None => return Err(Error::FetchInterestDataError),
- Some(a) => client.get_attachment(&a.location)?,
+ Some(a) => rs.get_attachment(&a.location)?,
};
let interest = get_interest(&record)?;
let urls = get_hash_urls(attachment_data)?;
diff --git a/third_party/rust/relevancy/src/lib.rs b/third_party/rust/relevancy/src/lib.rs
index 68892190cd24..dae0bf36b702 100644
--- a/third_party/rust/relevancy/src/lib.rs
+++ b/third_party/rust/relevancy/src/lib.rs
@@ -29,13 +29,14 @@ pub use ranker::score;
use error_support::handle_error;
use db::BanditData;
-use std::collections::HashMap;
+use std::{collections::HashMap, sync::Arc};
uniffi::setup_scaffolding!();
#[derive(uniffi::Object)]
pub struct RelevancyStore {
- inner: RelevancyStoreInner,
+ db: RelevancyDb,
+ cache: Mutex,
}
/// Top-level API for the Relevancy component
@@ -45,113 +46,16 @@ impl RelevancyStore {
/// Construct a new RelevancyStore
///
/// This is non-blocking since databases and other resources are lazily opened.
- #[uniffi::constructor]
- #[handle_error(Error)]
- pub fn new(db_path: String) -> ApiResult {
- Ok(Self {
- inner: RelevancyStoreInner::new(db_path, rs::create_client()?),
- })
- }
-
- /// Close any open resources (for example databases)
- ///
- /// Calling `close` will interrupt any in-progress queries on other threads.
- pub fn close(&self) {
- self.inner.close()
- }
-
- /// Interrupt any current database queries
- pub fn interrupt(&self) {
- self.inner.interrupt()
- }
-
- /// Ingest top URLs to build the user's interest vector.
- ///
- /// Consumer should pass a list of the user's top URLs by frecency to this method. It will
- /// then:
- ///
- /// - Download the URL interest data from remote settings. Eventually this should be cached /
- /// stored in the database, but for now it would be fine to download fresh data each time.
- /// - Match the user's top URls against the interest data to build up their interest vector.
- /// - Store the user's interest vector in the database.
- ///
- /// This method may execute for a long time and should only be called from a worker thread.
- #[handle_error(Error)]
- pub fn ingest(&self, top_urls_by_frecency: Vec) -> ApiResult {
- self.inner.ingest(top_urls_by_frecency)
- }
-
- /// Get the user's interest vector directly.
- ///
- /// This runs after [Self::ingest]. It returns the interest vector directly so that the
- /// consumer can show it in an `about:` page.
- #[handle_error(Error)]
- pub fn user_interest_vector(&self) -> ApiResult {
- self.inner.user_interest_vector()
- }
-
- /// Initializes probability distributions for any uninitialized items (arms) within a bandit model.
- ///
- /// This method takes a `bandit` identifier and a list of `arms` (items) and ensures that each arm
- /// in the list has an initialized probability distribution in the database. For each arm, if the
- /// probability distribution does not already exist, it will be created, using Beta(1,1) as default,
- /// which represents uniform distribution.
- #[handle_error(Error)]
- pub fn bandit_init(&self, bandit: String, arms: &[String]) -> ApiResult<()> {
- self.inner.bandit_init(bandit, arms)
- }
-
- /// Selects the optimal item (arm) to display to the user based on a multi-armed bandit model.
- ///
- /// This method takes in a `bandit` identifier and a list of possible `arms` (items) and uses a
- /// Thompson sampling approach to select the arm with the highest probability of success.
- /// For each arm, it retrieves the Beta distribution parameters (alpha and beta) from the
- /// database, creates a Beta distribution, and samples from it to estimate the arm's probability
- /// of success. The arm with the highest sampled probability is selected and returned.
- #[handle_error(Error)]
- pub fn bandit_select(&self, bandit: String, arms: &[String]) -> ApiResult {
- self.inner.bandit_select(bandit, arms)
- }
-
- /// Updates the bandit model's arm data based on user interaction (selection or non-selection).
- ///
- /// This method takes in a `bandit` identifier, an `arm` identifier, and a `selected` flag.
- /// If `selected` is true, it updates the model to reflect a successful selection of the arm,
- /// reinforcing its positive reward probability. If `selected` is false, it updates the
- /// beta (failure) distribution of the arm, reflecting a lack of selection and reinforcing
- /// its likelihood of a negative outcome.
- #[handle_error(Error)]
- pub fn bandit_update(&self, bandit: String, arm: String, selected: bool) -> ApiResult<()> {
- self.inner.bandit_update(bandit, arm, selected)
- }
-
- /// Retrieves the data for a specific bandit and arm.
- #[handle_error(Error)]
- pub fn get_bandit_data(&self, bandit: String, arm: String) -> ApiResult {
- self.inner.get_bandit_data(bandit, arm)
- }
-
- /// Download the interest data from remote settings if needed
- #[handle_error(Error)]
- pub fn ensure_interest_data_populated(&self) -> ApiResult<()> {
- self.inner.ensure_interest_data_populated()
- }
-}
-
-pub(crate) struct RelevancyStoreInner {
- db: RelevancyDb,
- cache: Mutex,
- client: C,
-}
-
-/// Top-level API for the Relevancy component
-// Impl block to be exported via `UniFFI`.
-impl RelevancyStoreInner {
- pub fn new(db_path: String, client: C) -> Self {
+ #[uniffi::constructor(default(remote_settings_service=None))]
+ pub fn new(
+ db_path: String,
+ #[allow(unused)] remote_settings_service: Option<
+ Arc,
+ >,
+ ) -> Self {
Self {
db: RelevancyDb::new(db_path),
cache: Mutex::new(BanditCache::new()),
- client,
}
}
@@ -178,28 +82,30 @@ impl RelevancyStoreInner {
/// - Store the user's interest vector in the database.
///
/// This method may execute for a long time and should only be called from a worker thread.
- pub fn ingest(&self, top_urls_by_frecency: Vec) -> Result {
+ #[handle_error(Error)]
+ pub fn ingest(&self, top_urls_by_frecency: Vec) -> ApiResult {
+ ingest::ensure_interest_data_populated(&self.db)?;
let interest_vec = self.classify(top_urls_by_frecency)?;
self.db
.read_write(|dao| dao.update_frecency_user_interest_vector(&interest_vec))?;
Ok(interest_vec)
}
- pub fn classify(&self, top_urls_by_frecency: Vec) -> Result {
- let mut interest_vector = InterestVector::default();
- for url in top_urls_by_frecency {
- let interest_count = self.db.read(|dao| dao.get_url_interest_vector(&url))?;
- log::trace!("classified: {url} {}", interest_count.summary());
- interest_vector = interest_vector + interest_count;
- }
- Ok(interest_vector)
+ /// Calculate metrics for the validation phase
+ ///
+ /// This runs after [Self::ingest]. It takes the interest vector that ingest created and
+ /// calculates a set of metrics that we can report to glean.
+ #[handle_error(Error)]
+ pub fn calculate_metrics(&self) -> ApiResult {
+ todo!()
}
/// Get the user's interest vector directly.
///
/// This runs after [Self::ingest]. It returns the interest vector directly so that the
/// consumer can show it in an `about:` page.
- pub fn user_interest_vector(&self) -> Result {
+ #[handle_error(Error)]
+ pub fn user_interest_vector(&self) -> ApiResult {
self.db.read(|dao| dao.get_frecency_user_interest_vector())
}
@@ -209,7 +115,8 @@ impl RelevancyStoreInner {
/// in the list has an initialized probability distribution in the database. For each arm, if the
/// probability distribution does not already exist, it will be created, using Beta(1,1) as default,
/// which represents uniform distribution.
- pub fn bandit_init(&self, bandit: String, arms: &[String]) -> Result<()> {
+ #[handle_error(Error)]
+ pub fn bandit_init(&self, bandit: String, arms: &[String]) -> ApiResult<()> {
self.db.read_write(|dao| {
for arm in arms {
dao.initialize_multi_armed_bandit(&bandit, arm)?;
@@ -227,7 +134,8 @@ impl RelevancyStoreInner {
/// For each arm, it retrieves the Beta distribution parameters (alpha and beta) from the
/// database, creates a Beta distribution, and samples from it to estimate the arm's probability
/// of success. The arm with the highest sampled probability is selected and returned.
- pub fn bandit_select(&self, bandit: String, arms: &[String]) -> Result {
+ #[handle_error(Error)]
+ pub fn bandit_select(&self, bandit: String, arms: &[String]) -> ApiResult {
let mut cache = self.cache.lock();
let mut best_sample = f64::MIN;
let mut selected_arm = String::new();
@@ -247,7 +155,7 @@ impl RelevancyStoreInner {
}
}
- Ok(selected_arm)
+ return Ok(selected_arm);
}
/// Updates the bandit model's arm data based on user interaction (selection or non-selection).
@@ -257,7 +165,8 @@ impl RelevancyStoreInner {
/// reinforcing its positive reward probability. If `selected` is false, it updates the
/// beta (failure) distribution of the arm, reflecting a lack of selection and reinforcing
/// its likelihood of a negative outcome.
- pub fn bandit_update(&self, bandit: String, arm: String, selected: bool) -> Result<()> {
+ #[handle_error(Error)]
+ pub fn bandit_update(&self, bandit: String, arm: String, selected: bool) -> ApiResult<()> {
let mut cache = self.cache.lock();
cache.clear(&bandit, &arm);
@@ -269,17 +178,14 @@ impl RelevancyStoreInner {
}
/// Retrieves the data for a specific bandit and arm.
- pub fn get_bandit_data(&self, bandit: String, arm: String) -> Result {
+ #[handle_error(Error)]
+ pub fn get_bandit_data(&self, bandit: String, arm: String) -> ApiResult {
let bandit_data = self
.db
.read(|dao| dao.retrieve_bandit_data(&bandit, &arm))?;
Ok(bandit_data)
}
-
- pub fn ensure_interest_data_populated(&self) -> Result<()> {
- ingest::ensure_interest_data_populated(&self.db, &self.client)
- }
}
#[derive(Default)]
@@ -334,6 +240,25 @@ impl BanditCache {
}
}
+impl RelevancyStore {
+ /// Download the interest data from remote settings if needed
+ #[handle_error(Error)]
+ pub fn ensure_interest_data_populated(&self) -> ApiResult<()> {
+ ingest::ensure_interest_data_populated(&self.db)?;
+ Ok(())
+ }
+
+ pub fn classify(&self, top_urls_by_frecency: Vec) -> Result {
+ let mut interest_vector = InterestVector::default();
+ for url in top_urls_by_frecency {
+ let interest_count = self.db.read(|dao| dao.get_url_interest_vector(&url))?;
+ log::trace!("classified: {url} {}", interest_count.summary());
+ interest_vector = interest_vector + interest_count;
+ }
+ Ok(interest_vector)
+ }
+}
+
/// Interest metrics that we want to send to Glean as part of the validation process. These contain
/// the cosine similarity when comparing the user's interest against various interest vectors that
/// consumers may use.
@@ -365,7 +290,6 @@ mod test {
use crate::url_hash::hash_url;
use super::*;
- use crate::rs::test::NullRelavancyRemoteSettingsClient;
use rand::Rng;
use std::collections::HashMap;
@@ -387,12 +311,10 @@ mod test {
}
}
- fn setup_store(
- test_id: &'static str,
- ) -> RelevancyStoreInner {
- let relevancy_store = RelevancyStoreInner::new(
+ fn setup_store(test_id: &'static str) -> RelevancyStore {
+ let relevancy_store = RelevancyStore::new(
format!("file:test_{test_id}_data?mode=memory&cache=shared"),
- NullRelavancyRemoteSettingsClient,
+ None,
);
relevancy_store
.db
diff --git a/third_party/rust/relevancy/src/rs.rs b/third_party/rust/relevancy/src/rs.rs
index 1273ebd9cbee..aee4e17a3713 100644
--- a/third_party/rust/relevancy/src/rs.rs
+++ b/third_party/rust/relevancy/src/rs.rs
@@ -4,9 +4,7 @@
*/
use crate::{Error, Result};
-use remote_settings::{
- RemoteSettings, RemoteSettingsConfig, RemoteSettingsResponse, RemoteSettingsServer,
-};
+use remote_settings::RemoteSettingsResponse;
use serde::Deserialize;
/// The Remote Settings collection name.
pub(crate) const REMOTE_SETTINGS_COLLECTION: &str = "content-relevance";
@@ -35,25 +33,6 @@ impl RelevancyRemoteSettingsClient for remote_settings::RemoteSettings {
}
}
-impl RelevancyRemoteSettingsClient for &T {
- fn get_records(&self) -> Result {
- (*self).get_records()
- }
-
- fn get_attachment(&self, location: &str) -> Result> {
- (*self).get_attachment(location)
- }
-}
-
-pub fn create_client() -> Result {
- Ok(RemoteSettings::new(RemoteSettingsConfig {
- collection_name: REMOTE_SETTINGS_COLLECTION.to_string(),
- server: Some(RemoteSettingsServer::Prod),
- server_url: None,
- bucket_name: None,
- })?)
-}
-
/// A record in the Relevancy Remote Settings collection.
#[derive(Clone, Debug, Deserialize)]
pub struct RelevancyRecord {
@@ -104,23 +83,3 @@ pub fn from_json_slice(value: &[u8]) -> Result Result {
- panic!("NullRelavancyRemoteSettingsClient::get_records was called")
- }
-
- fn get_attachment(&self, _location: &str) -> Result> {
- panic!("NullRelavancyRemoteSettingsClient::get_records was called")
- }
- }
-}
diff --git a/third_party/rust/remote_settings/.cargo-checksum.json b/third_party/rust/remote_settings/.cargo-checksum.json
index 21ab0f972eb2..f705e6573504 100644
--- a/third_party/rust/remote_settings/.cargo-checksum.json
+++ b/third_party/rust/remote_settings/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"cb6d9b970b4d2ac267e218f666c2befdcb07030dbac2c428091f80dbc6f1455f","dumps/main/attachments/regions/world":"00b308033d44f61612b962f572765d14a3999586d92fc8b9fff2217a1ae070e8","dumps/main/attachments/regions/world-buffered":"1d3ed6954fac2a5b31302f5d3e8186c5fa08a20239afc0643ca5dfbb4d8a86fc","dumps/main/attachments/regions/world-buffered.meta.json":"914a71376a152036aceccb6877e079fbb9e3373c6219f24f00dd30e901a72cce","dumps/main/attachments/regions/world.meta.json":"2a47d77834997b98e563265d299723e7f7fd64c8c7a5731afc722862333d6fbd","dumps/main/regions.json":"e8990158373f82d3f89fed5089cf29e4177cc85904479128728e05025e9a0c0c","dumps/main/search-config-v2.json":"c33698dd66ed7f9dbbda857cad4f890455189e932e24c0d3b335e3e95b65239f","dumps/main/search-telemetry-v2.json":"140b3d322d6e317d97542725920be9f29c6b1d9c5f224e8c31995dddfec6bf1b","src/cache.rs":"c6179802017b43885136e7d64004890cc13e8c2d4742e04073cf404b578f63db","src/client.rs":"2399ec403771e2a4552d9ea513e3dcca449f45391eeb6bd838b272fc0085bc3b","src/config.rs":"603c7241483861a8c690464f4b50dd3dc281da7edf8aa522f90f175b85a7fa5f","src/error.rs":"20e40a0229842e12888bc43c4159e078f1d09272a43c51dae87989f76952f93b","src/jexl_filter.rs":"e4a9e29a80b216d777771434aaa6c58f627288e4b59ffa11c83dbd8e37889aa5","src/lib.rs":"464157ddf3b906c8f480c73dc3161890c8bc76cc95345c6857fee7a57385dc29","src/macros.rs":"6b06d0ba42ee95235bfd71bac1a0eed02f60c894775ebee64165648b10e932c4","src/schema.rs":"348e0d5ad1840aaae796b537d21381ef91bd75be262138bfec376d9f88d205b3","src/service.rs":"73da6cecc8c804b8e55d35ea3c71c1dd1e4099ad60532b7b0da153f9cde1eb21","src/signatures.rs":"baa2dae76abd8166158fea4676e67e17c17b65af6968de52768350409dbd7092","src/storage.rs":"5ae489964d82a0305a6b250d92f4c1925cc722e44890c24f681dd97b0258b9f4","uniffi.toml":"bd7cc0e7c1981f53938f429c4f2541ac454ed4160a8a0b4670659e38acd23ee5"},"package":null}
\ No newline at end of file
+{"files":{"Cargo.toml":"5d106662de7bd8f65f1c50edb46a11f54f38a91e08ccf57521978d42086fb53e","dumps/main/attachments/regions/world":"00b308033d44f61612b962f572765d14a3999586d92fc8b9fff2217a1ae070e8","dumps/main/attachments/regions/world-buffered":"1d3ed6954fac2a5b31302f5d3e8186c5fa08a20239afc0643ca5dfbb4d8a86fc","dumps/main/attachments/regions/world-buffered.meta.json":"914a71376a152036aceccb6877e079fbb9e3373c6219f24f00dd30e901a72cce","dumps/main/attachments/regions/world.meta.json":"2a47d77834997b98e563265d299723e7f7fd64c8c7a5731afc722862333d6fbd","dumps/main/regions.json":"e8990158373f82d3f89fed5089cf29e4177cc85904479128728e05025e9a0c0c","dumps/main/search-config-v2.json":"c33698dd66ed7f9dbbda857cad4f890455189e932e24c0d3b335e3e95b65239f","dumps/main/search-telemetry-v2.json":"140b3d322d6e317d97542725920be9f29c6b1d9c5f224e8c31995dddfec6bf1b","src/cache.rs":"c6179802017b43885136e7d64004890cc13e8c2d4742e04073cf404b578f63db","src/client.rs":"875a0bc5c8b3572ed86cd4b43277a715f95592c3d8ab1dd6528394ab6ad71ec3","src/config.rs":"603c7241483861a8c690464f4b50dd3dc281da7edf8aa522f90f175b85a7fa5f","src/error.rs":"20e40a0229842e12888bc43c4159e078f1d09272a43c51dae87989f76952f93b","src/jexl_filter.rs":"e4a9e29a80b216d777771434aaa6c58f627288e4b59ffa11c83dbd8e37889aa5","src/lib.rs":"464157ddf3b906c8f480c73dc3161890c8bc76cc95345c6857fee7a57385dc29","src/macros.rs":"6b06d0ba42ee95235bfd71bac1a0eed02f60c894775ebee64165648b10e932c4","src/schema.rs":"348e0d5ad1840aaae796b537d21381ef91bd75be262138bfec376d9f88d205b3","src/service.rs":"73da6cecc8c804b8e55d35ea3c71c1dd1e4099ad60532b7b0da153f9cde1eb21","src/signatures.rs":"baa2dae76abd8166158fea4676e67e17c17b65af6968de52768350409dbd7092","src/storage.rs":"5ae489964d82a0305a6b250d92f4c1925cc722e44890c24f681dd97b0258b9f4","uniffi.toml":"bd7cc0e7c1981f53938f429c4f2541ac454ed4160a8a0b4670659e38acd23ee5"},"package":null}
\ No newline at end of file
diff --git a/third_party/rust/remote_settings/Cargo.toml b/third_party/rust/remote_settings/Cargo.toml
index 5e0a4f9c7120..907fa47273be 100644
--- a/third_party/rust/remote_settings/Cargo.toml
+++ b/third_party/rust/remote_settings/Cargo.toml
@@ -82,7 +82,7 @@ features = ["derive"]
path = "../support/sql"
[dependencies.uniffi]
-version = "0.29.0"
+version = "0.28.2"
[dependencies.viaduct]
path = "../viaduct"
@@ -100,5 +100,5 @@ features = ["preserve_order"]
path = "../support/viaduct-reqwest"
[build-dependencies.uniffi]
-version = "0.29.0"
+version = "0.28.2"
features = ["build"]
diff --git a/third_party/rust/remote_settings/src/client.rs b/third_party/rust/remote_settings/src/client.rs
index c742fcefdb7a..663fabac300b 100644
--- a/third_party/rust/remote_settings/src/client.rs
+++ b/third_party/rust/remote_settings/src/client.rs
@@ -9,7 +9,9 @@ use crate::jexl_filter::JexlFilter;
use crate::signatures;
use crate::storage::Storage;
use crate::RemoteSettingsContext;
-use crate::{packaged_attachments, packaged_collections, RemoteSettingsServer};
+use crate::{
+ packaged_attachments, packaged_collections, RemoteSettingsServer, UniffiCustomTypeConverter,
+};
use parking_lot::Mutex;
use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha256};
@@ -933,9 +935,11 @@ pub struct Attachment {
// conflicted with the declaration in Nimbus. This shouldn't really impact Android, since the type
// is converted into the platform JsonObject thanks to the UniFFI binding.
pub type RsJsonObject = serde_json::Map;
-uniffi::custom_type!(RsJsonObject, String, {
- remote,
- try_lift: |val| {
+uniffi::custom_type!(RsJsonObject, String);
+
+impl UniffiCustomTypeConverter for RsJsonObject {
+ type Builtin = String;
+ fn into_custom(val: Self::Builtin) -> uniffi::Result {
let json: serde_json::Value = serde_json::from_str(&val)?;
match json {
@@ -944,9 +948,12 @@ uniffi::custom_type!(RsJsonObject, String, {
"Unexpected JSON-non-object in the bagging area"
)),
}
- },
- lower: |obj| serde_json::Value::Object(obj).to_string(),
-});
+ }
+
+ fn from_custom(obj: Self) -> Self::Builtin {
+ serde_json::Value::Object(obj).to_string()
+ }
+}
#[derive(Clone, Debug)]
pub(crate) struct RemoteState {
diff --git a/third_party/rust/rinja/.cargo-checksum.json b/third_party/rust/rinja/.cargo-checksum.json
deleted file mode 100644
index 32424bfbe975..000000000000
--- a/third_party/rust/rinja/.cargo-checksum.json
+++ /dev/null
@@ -1 +0,0 @@
-{"files":{"Cargo.toml":"1084eac24e33bf9001d7ef9a5e1ae7df52c746f2270c803c36efd33ede9484d8","LICENSE-APACHE":"87cb0d734c723c083e51c825930ff42bce28596b52dee15567f6b28f19c195e3","LICENSE-MIT":"df20e0180764bf5bd76f74d47bc9e8c0069a666401629c390003a1d5eba99c92","README.md":"61321f42c5a96fb5f612deb3b3ada1a3efdf3b3c829f1afe1e5098e799ad4697","benches/escape.rs":"d365d78696d0e3ec1f7a90dd62a46b94d6e86d691e71a4b5b98a0032677f663e","benches/strings.inc":"76b8ac1b79abeaced7831a03aaedf8dd85057eac78e0cecbe68f589d38f0592b","benches/to-json.rs":"219138903674e5e84fabff3c97ca034ffadb63dc9df90389fd258572e1dbfd57","src/error.rs":"f2689ae450957bae152f407533ef14dec137034143fe154193767b502bbeb023","src/filters/builtin.rs":"9eaa82fc1fd0a1520a07bc1a7e70441be168dd6b950cc867f05af7031ddd7103","src/filters/escape.rs":"c1cf1bfe53de2c775e945ff50142b31148d1684822f2d09a1516a5e4f8f709a6","src/filters/humansize.rs":"cee107916d8f209f2bbef6a04d3fee92c427f52b1b9b71fdd1c6f07ed068c368","src/filters/json.rs":"10113f41912d080881512669bb939723be262dde5f3376eea21eb17794b3a697","src/filters/mod.rs":"127ada7b3f91f97a2f53f70ec7000ac7b393641484b393d58bc8fe0c09a4d224","src/filters/urlencode.rs":"4c7ecae5a53c740aaa260e058ed43777060906d154dc18708adcd8d200e4533a","src/helpers.rs":"076eb47b132e9780d15d424540e50c58f19ff66ea01d10b2f3020fd0563702f7","src/html.rs":"ebf54b05aa1498e84692eb66d216a13a17e86b1589974dc69bac46b3c17fb4ce","src/lib.rs":"0b0c85d13ea4cf7d8c507b5255a745f896749f989894ee2e100de2026bb62046"},"package":"3dc4940d00595430b3d7d5a01f6222b5e5b51395d1120bdb28d854bb8abb17a5"}
\ No newline at end of file
diff --git a/third_party/rust/rinja/Cargo.toml b/third_party/rust/rinja/Cargo.toml
deleted file mode 100644
index 37908737cc4c..000000000000
--- a/third_party/rust/rinja/Cargo.toml
+++ /dev/null
@@ -1,119 +0,0 @@
-# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
-#
-# When uploading crates to the registry Cargo will automatically
-# "normalize" Cargo.toml files for maximal compatibility
-# with all versions of Cargo and also rewrite `path` dependencies
-# to registry (e.g., crates.io) dependencies.
-#
-# If you are reading this file be aware that the original Cargo.toml
-# will likely look very different (and much more reasonable).
-# See Cargo.toml.orig for the original contents.
-
-[package]
-edition = "2021"
-rust-version = "1.71"
-name = "rinja"
-version = "0.3.5"
-build = false
-autobins = false
-autoexamples = false
-autotests = false
-autobenches = false
-description = "Type-safe, compiled Jinja-like templates for Rust"
-homepage = "https://rinja.readthedocs.io/"
-documentation = "https://docs.rs/rinja"
-readme = "README.md"
-keywords = [
- "markup",
- "template",
- "jinja2",
- "html",
-]
-categories = ["template-engine"]
-license = "MIT OR Apache-2.0"
-repository = "https://github.com/rinja-rs/rinja"
-
-[package.metadata.docs.rs]
-features = ["full"]
-rustdoc-args = [
- "--generate-link-to-definition",
- "--cfg=docsrs",
-]
-
-[lib]
-name = "rinja"
-path = "src/lib.rs"
-
-[[bench]]
-name = "escape"
-path = "benches/escape.rs"
-harness = false
-
-[[bench]]
-name = "to-json"
-path = "benches/to-json.rs"
-harness = false
-required-features = ["serde_json"]
-
-[dependencies.humansize]
-version = "2"
-optional = true
-
-[dependencies.itoa]
-version = "1.0.11"
-
-[dependencies.num-traits]
-version = "0.2.6"
-optional = true
-
-[dependencies.percent-encoding]
-version = "2.1.0"
-optional = true
-
-[dependencies.rinja_derive]
-version = "=0.3.5"
-
-[dependencies.serde]
-version = "1.0"
-optional = true
-
-[dependencies.serde_json]
-version = "1.0"
-optional = true
-
-[dev-dependencies.criterion]
-version = "0.5"
-
-[features]
-code-in-doc = ["rinja_derive/code-in-doc"]
-config = ["rinja_derive/config"]
-default = [
- "config",
- "humansize",
- "urlencode",
-]
-full = [
- "default",
- "code-in-doc",
- "serde_json",
-]
-humansize = [
- "rinja_derive/humansize",
- "dep:humansize",
-]
-serde_json = [
- "rinja_derive/serde_json",
- "dep:serde",
- "dep:serde_json",
-]
-urlencode = [
- "rinja_derive/urlencode",
- "dep:percent-encoding",
-]
-with-actix-web = ["rinja_derive/with-actix-web"]
-with-axum = ["rinja_derive/with-axum"]
-with-rocket = ["rinja_derive/with-rocket"]
-with-warp = ["rinja_derive/with-warp"]
-
-[badges.maintenance]
-status = "actively-developed"
diff --git a/third_party/rust/rinja/README.md b/third_party/rust/rinja/README.md
deleted file mode 100644
index 1fc46c89f9d9..000000000000
--- a/third_party/rust/rinja/README.md
+++ /dev/null
@@ -1,81 +0,0 @@
-# rinja
-
-[](https://crates.io/crates/rinja)
-[](https://github.com/rinja-rs/rinja/actions/workflows/rust.yml)
-[](https://rinja.readthedocs.io/)
-[](https://docs.rs/rinja/)
-
-**Rinja** implements a template rendering engine based on [Jinja](https://jinja.palletsprojects.com/),
-and generates type-safe Rust code from your templates at compile time
-based on a user-defined `struct` to hold the template's context.
-See below for an example. It is a fork of [Askama](https://crates.io/crates/askama).
-
-All feedback welcome! Feel free to file bugs, requests for documentation and
-any other feedback to the [issue tracker][issues].
-
-You can find the documentation about our syntax, features, configuration in our book:
-[rinja.readthedocs.io](https://rinja.readthedocs.io/).
-
-Have a look at our [*Rinja Playground*](https://rinja-rs.github.io/play-rinja/),
-if you want to try out rinja's code generation online.
-
-### Feature highlights
-
-* Construct templates using a familiar, easy-to-use syntax
-* Benefit from the safety provided by Rust's type system
-* Template code is compiled into your crate for optimal performance
-* Optional built-in support for Actix, Axum, Rocket, and warp web frameworks
-* Debugging features to assist you in template development
-* Templates must be valid UTF-8 and produce UTF-8 when rendered
-* Works on stable Rust
-
-### Supported in templates
-
-* Template inheritance
-* Loops, if/else statements and include support
-* Macro support
-* Variables (no mutability allowed)
-* Some built-in filters, and the ability to use your own
-* Whitespace suppressing with '-' markers
-* Opt-out HTML escaping
-* Syntax customization
-
-[issues]: https://github.com/rinja-rs/rinja/issues
-
-
-How to get started
-------------------
-
-First, add the rinja dependency to your crate's `Cargo.toml`:
-
-```sh
-cargo add rinja
-```
-
-Now create a directory called `templates` in your crate root.
-In it, create a file called `hello.html`, containing the following:
-
-```jinja
-Hello, {{ name }}!
-```
-
-In any Rust file inside your crate, add the following:
-
-```rust
-use rinja::Template; // bring trait in scope
-
-#[derive(Template)] // this will generate the code...
-#[template(path = "hello.html")] // using the template in this path, relative
- // to the `templates` dir in the crate root
-struct HelloTemplate<'a> { // the name of the struct can be anything
- name: &'a str, // the field name should match the variable name
- // in your template
-}
-
-fn main() {
- let hello = HelloTemplate { name: "world" }; // instantiate your struct
- println!("{}", hello.render().unwrap()); // then render it.
-}
-```
-
-You should now be able to compile and run this code.
diff --git a/third_party/rust/rinja/benches/escape.rs b/third_party/rust/rinja/benches/escape.rs
deleted file mode 100644
index 74d73f5addf1..000000000000
--- a/third_party/rust/rinja/benches/escape.rs
+++ /dev/null
@@ -1,19 +0,0 @@
-use criterion::{Criterion, black_box, criterion_group, criterion_main};
-use rinja::filters::{Html, escape};
-
-criterion_main!(benches);
-criterion_group!(benches, functions);
-
-fn functions(c: &mut Criterion) {
- c.bench_function("Escaping", escaping);
-}
-
-fn escaping(b: &mut criterion::Bencher<'_>) {
- b.iter(|| {
- for &s in black_box(STRINGS) {
- let _ = black_box(format!("{}", escape(s, Html).unwrap()));
- }
- });
-}
-
-const STRINGS: &[&str] = include!("strings.inc");
diff --git a/third_party/rust/rinja/benches/to-json.rs b/third_party/rust/rinja/benches/to-json.rs
deleted file mode 100644
index 9d805ac60594..000000000000
--- a/third_party/rust/rinja/benches/to-json.rs
+++ /dev/null
@@ -1,70 +0,0 @@
-use criterion::{Criterion, black_box, criterion_group, criterion_main};
-use rinja::Template;
-
-criterion_main!(benches);
-criterion_group!(benches, functions);
-
-fn functions(c: &mut Criterion) {
- c.bench_function("escape JSON", escape_json);
- c.bench_function("escape JSON (pretty)", escape_json_pretty);
- c.bench_function("escape JSON for HTML", escape_json_for_html);
- c.bench_function("escape JSON for HTML (pretty)", escape_json_for_html_pretty);
-}
-
-fn escape_json(b: &mut criterion::Bencher<'_>) {
- #[derive(Template)]
- #[template(ext = "html", source = "{{self.0|json|safe}}")]
- struct Tmpl(&'static str);
-
- b.iter(|| {
- let mut len = 0;
- for &s in black_box(STRINGS) {
- len += Tmpl(s).to_string().len();
- }
- len
- });
-}
-
-fn escape_json_pretty(b: &mut criterion::Bencher<'_>) {
- #[derive(Template)]
- #[template(ext = "html", source = "{{self.0|json(2)|safe}}")]
- struct Tmpl(&'static str);
-
- b.iter(|| {
- let mut len = 0;
- for &s in black_box(STRINGS) {
- len += Tmpl(s).to_string().len();
- }
- len
- });
-}
-
-fn escape_json_for_html(b: &mut criterion::Bencher<'_>) {
- #[derive(Template)]
- #[template(ext = "html", source = "{{self.0|json}}")]
- struct Tmpl(&'static str);
-
- b.iter(|| {
- let mut len = 0;
- for &s in black_box(STRINGS) {
- len += Tmpl(s).to_string().len();
- }
- len
- });
-}
-
-fn escape_json_for_html_pretty(b: &mut criterion::Bencher<'_>) {
- #[derive(Template)]
- #[template(ext = "html", source = "{{self.0|json(2)}}")]
- struct Tmpl(&'static str);
-
- b.iter(|| {
- let mut len = 0;
- for &s in black_box(STRINGS) {
- len += Tmpl(s).to_string().len();
- }
- len
- });
-}
-
-const STRINGS: &[&str] = include!("strings.inc");
diff --git a/third_party/rust/rinja/src/filters/builtin.rs b/third_party/rust/rinja/src/filters/builtin.rs
deleted file mode 100644
index c8a2fa32ead6..000000000000
--- a/third_party/rust/rinja/src/filters/builtin.rs
+++ /dev/null
@@ -1,1144 +0,0 @@
-use std::cell::Cell;
-use std::convert::Infallible;
-use std::fmt::{self, Write};
-use std::ops::Deref;
-use std::pin::Pin;
-
-use super::escape::{FastWritable, HtmlSafeOutput};
-use crate::{Error, Result};
-
-// MAX_LEN is maximum allowed length for filters.
-const MAX_LEN: usize = 10_000;
-
-/// Formats arguments according to the specified format
-///
-/// The *second* argument to this filter must be a string literal (as in normal
-/// Rust). The two arguments are passed through to the `format!()`
-/// [macro](https://doc.rust-lang.org/stable/std/macro.format.html) by
-/// the Rinja code generator, but the order is swapped to support filter
-/// composition.
-///
-/// ```ignore
-/// {{ value|fmt("{:?}") }}
-/// ```
-///
-/// ```
-/// # #[cfg(feature = "code-in-doc")] {
-/// # use rinja::Template;
-/// /// ```jinja
-/// /// {{ value|fmt("{:?}") }}
-/// /// ```
-/// #[derive(Template)]
-/// #[template(ext = "html", in_doc = true)]
-/// struct Example {
-/// value: (usize, usize),
-/// }
-///
-/// assert_eq!(
-/// Example { value: (3, 4) }.to_string(),
-/// "(3, 4)
"
-/// );
-/// # }
-/// ```
-///
-/// Compare with [format](./fn.format.html).
-pub fn fmt() {}
-
-/// Formats arguments according to the specified format
-///
-/// The first argument to this filter must be a string literal (as in normal
-/// Rust). All arguments are passed through to the `format!()`
-/// [macro](https://doc.rust-lang.org/stable/std/macro.format.html) by
-/// the Rinja code generator.
-///
-/// ```ignore
-/// {{ "{:?}{:?}"|format(value, other_value) }}
-/// ```
-///
-/// ```
-/// # #[cfg(feature = "code-in-doc")] {
-/// # use rinja::Template;
-/// /// ```jinja
-/// /// {{ "{:?}"|format(value) }}
-/// /// ```
-/// #[derive(Template)]
-/// #[template(ext = "html", in_doc = true)]
-/// struct Example {
-/// value: (usize, usize),
-/// }
-///
-/// assert_eq!(
-/// Example { value: (3, 4) }.to_string(),
-/// "(3, 4)
"
-/// );
-/// # }
-/// ```
-///
-/// Compare with [fmt](./fn.fmt.html).
-pub fn format() {}
-
-/// Replaces line breaks in plain text with appropriate HTML
-///
-/// A single newline becomes an HTML line break ` ` and a new line
-/// followed by a blank line becomes a paragraph break ``.
-///
-/// ```
-/// # #[cfg(feature = "code-in-doc")] {
-/// # use rinja::Template;
-/// /// ```jinja
-/// ///
{{ example|linebreaks }}
-/// /// ```
-/// #[derive(Template)]
-/// #[template(ext = "html", in_doc = true)]
-/// struct Example<'a> {
-/// example: &'a str,
-/// }
-///
-/// assert_eq!(
-/// Example { example: "Foo\nBar\n\nBaz" }.to_string(),
-/// ""
-/// );
-/// # }
-/// ```
-#[inline]
-pub fn linebreaks(s: impl fmt::Display) -> Result, fmt::Error> {
- fn linebreaks(s: String) -> String {
- let linebroken = s.replace("\n\n", "").replace('\n', " ");
- format!("
{linebroken}
")
- }
- Ok(HtmlSafeOutput(linebreaks(try_to_string(s)?)))
-}
-
-/// Converts all newlines in a piece of plain text to HTML line breaks
-///
-/// ```
-/// # #[cfg(feature = "code-in-doc")] {
-/// # use rinja::Template;
-/// /// ```jinja
-/// /// {{ lines|linebreaksbr }}
-/// /// ```
-/// #[derive(Template)]
-/// #[template(ext = "html", in_doc = true)]
-/// struct Example<'a> {
-/// lines: &'a str,
-/// }
-///
-/// assert_eq!(
-/// Example { lines: "a\nb\nc" }.to_string(),
-/// "a b c
"
-/// );
-/// # }
-/// ```
-#[inline]
-pub fn linebreaksbr(s: impl fmt::Display) -> Result, fmt::Error> {
- fn linebreaksbr(s: String) -> String {
- s.replace('\n', " ")
- }
- Ok(HtmlSafeOutput(linebreaksbr(try_to_string(s)?)))
-}
-
-/// Replaces only paragraph breaks in plain text with appropriate HTML
-///
-/// A new line followed by a blank line becomes a paragraph break ``.
-/// Paragraph tags only wrap content; empty paragraphs are removed.
-/// No ` ` tags are added.
-///
-/// ```
-/// # #[cfg(feature = "code-in-doc")] {
-/// # use rinja::Template;
-/// /// ```jinja
-/// /// {{ lines|paragraphbreaks }}
-/// /// ```
-/// #[derive(Template)]
-/// #[template(ext = "html", in_doc = true)]
-/// struct Example<'a> {
-/// lines: &'a str,
-/// }
-///
-/// assert_eq!(
-/// Example { lines: "Foo\nBar\n\nBaz" }.to_string(),
-/// "
Foo\nBar
Baz
"
-/// );
-/// # }
-/// ```
-#[inline]
-pub fn paragraphbreaks(s: impl fmt::Display) -> Result, fmt::Error> {
- fn paragraphbreaks(s: String) -> String {
- let linebroken = s.replace("\n\n", "").replace("
", "");
- format!("{linebroken}
")
- }
- Ok(HtmlSafeOutput(paragraphbreaks(try_to_string(s)?)))
-}
-
-/// Converts to lowercase
-///
-/// ```
-/// # #[cfg(feature = "code-in-doc")] {
-/// # use rinja::Template;
-/// /// ```jinja
-/// /// {{ word|lower }}
-/// /// ```
-/// #[derive(Template)]
-/// #[template(ext = "html", in_doc = true)]
-/// struct Example<'a> {
-/// word: &'a str,
-/// }
-///
-/// assert_eq!(
-/// Example { word: "FOO" }.to_string(),
-/// "foo
"
-/// );
-///
-/// assert_eq!(
-/// Example { word: "FooBar" }.to_string(),
-/// "foobar
"
-/// );
-/// # }
-/// ```
-#[inline]
-pub fn lower(s: impl fmt::Display) -> Result {
- fn lower(s: String) -> Result {
- Ok(s.to_lowercase())
- }
- lower(try_to_string(s)?)
-}
-
-/// Converts to lowercase, alias for the `|lower` filter
-///
-/// ```
-/// # #[cfg(feature = "code-in-doc")] {
-/// # use rinja::Template;
-/// /// ```jinja
-/// /// {{ word|lowercase }}
-/// /// ```
-/// #[derive(Template)]
-/// #[template(ext = "html", in_doc = true)]
-/// struct Example<'a> {
-/// word: &'a str,
-/// }
-///
-/// assert_eq!(
-/// Example { word: "FOO" }.to_string(),
-/// "foo
"
-/// );
-///
-/// assert_eq!(
-/// Example { word: "FooBar" }.to_string(),
-/// "foobar
"
-/// );
-/// # }
-/// ```
-#[inline]
-pub fn lowercase(s: impl fmt::Display) -> Result {
- lower(s)
-}
-
-/// Converts to uppercase
-///
-/// ```
-/// # #[cfg(feature = "code-in-doc")] {
-/// # use rinja::Template;
-/// /// ```jinja
-/// /// {{ word|upper }}
-/// /// ```
-/// #[derive(Template)]
-/// #[template(ext = "html", in_doc = true)]
-/// struct Example<'a> {
-/// word: &'a str,
-/// }
-///
-/// assert_eq!(
-/// Example { word: "foo" }.to_string(),
-/// "FOO
"
-/// );
-///
-/// assert_eq!(
-/// Example { word: "FooBar" }.to_string(),
-/// "FOOBAR
"
-/// );
-/// # }
-/// ```
-#[inline]
-pub fn upper(s: impl fmt::Display) -> Result {
- fn upper(s: String) -> Result {
- Ok(s.to_uppercase())
- }
- upper(try_to_string(s)?)
-}
-
-/// Converts to uppercase, alias for the `|upper` filter
-///
-/// ```
-/// # #[cfg(feature = "code-in-doc")] {
-/// # use rinja::Template;
-/// /// ```jinja
-/// /// {{ word|uppercase }}
-/// /// ```
-/// #[derive(Template)]
-/// #[template(ext = "html", in_doc = true)]
-/// struct Example<'a> {
-/// word: &'a str,
-/// }
-///
-/// assert_eq!(
-/// Example { word: "foo" }.to_string(),
-/// "FOO
"
-/// );
-///
-/// assert_eq!(
-/// Example { word: "FooBar" }.to_string(),
-/// "FOOBAR
"
-/// );
-/// # }
-/// ```
-#[inline]
-pub fn uppercase(s: impl fmt::Display) -> Result {
- upper(s)
-}
-
-/// Strip leading and trailing whitespace
-///
-/// ```
-/// # #[cfg(feature = "code-in-doc")] {
-/// # use rinja::Template;
-/// /// ```jinja
-/// /// {{ example|trim }}
-/// /// ```
-/// #[derive(Template)]
-/// #[template(ext = "html", in_doc = true)]
-/// struct Example<'a> {
-/// example: &'a str,
-/// }
-///
-/// assert_eq!(
-/// Example { example: " Hello\tworld\t" }.to_string(),
-/// "Hello\tworld
"
-/// );
-/// # }
-/// ```
-pub fn trim(s: T) -> Result {
- struct Collector(String);
-
- impl fmt::Write for Collector {
- fn write_str(&mut self, s: &str) -> fmt::Result {
- match self.0.is_empty() {
- true => self.0.write_str(s.trim_start()),
- false => self.0.write_str(s),
- }
- }
- }
-
- let mut collector = Collector(String::new());
- write!(collector, "{s}")?;
- let Collector(mut s) = collector;
- s.truncate(s.trim_end().len());
- Ok(s)
-}
-
-/// Limit string length, appends '...' if truncated
-///
-/// ```
-/// # #[cfg(feature = "code-in-doc")] {
-/// # use rinja::Template;
-/// /// ```jinja
-/// /// {{ example|truncate(2) }}
-/// /// ```
-/// #[derive(Template)]
-/// #[template(ext = "html", in_doc = true)]
-/// struct Example<'a> {
-/// example: &'a str,
-/// }
-///
-/// assert_eq!(
-/// Example { example: "hello" }.to_string(),
-/// "he...
"
-/// );
-/// # }
-/// ```
-#[inline]
-pub fn truncate(
- source: S,
- remaining: usize,
-) -> Result, Infallible> {
- Ok(TruncateFilter { source, remaining })
-}
-
-pub struct TruncateFilter {
- source: S,
- remaining: usize,
-}
-
-impl fmt::Display for TruncateFilter {
- #[inline]
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- write!(TruncateWriter::new(f, self.remaining), "{}", self.source)
- }
-}
-
-impl FastWritable for TruncateFilter {
- #[inline]
- fn write_into(&self, dest: &mut W) -> fmt::Result {
- self.source
- .write_into(&mut TruncateWriter::new(dest, self.remaining))
- }
-}
-
-struct TruncateWriter {
- dest: Option,
- remaining: usize,
-}
-
-impl TruncateWriter {
- fn new(dest: W, remaining: usize) -> Self {
- TruncateWriter {
- dest: Some(dest),
- remaining,
- }
- }
-}
-
-impl fmt::Write for TruncateWriter {
- fn write_str(&mut self, s: &str) -> fmt::Result {
- let Some(dest) = &mut self.dest else {
- return Ok(());
- };
- let mut rem = self.remaining;
- if rem >= s.len() {
- dest.write_str(s)?;
- self.remaining -= s.len();
- } else {
- if rem > 0 {
- while !s.is_char_boundary(rem) {
- rem += 1;
- }
- if rem == s.len() {
- // Don't write "..." if the char bound extends to the end of string.
- self.remaining = 0;
- return dest.write_str(s);
- }
- dest.write_str(&s[..rem])?;
- }
- dest.write_str("...")?;
- self.dest = None;
- }
- Ok(())
- }
-
- #[inline]
- fn write_char(&mut self, c: char) -> fmt::Result {
- match self.dest.is_some() {
- true => self.write_str(c.encode_utf8(&mut [0; 4])),
- false => Ok(()),
- }
- }
-
- #[inline]
- fn write_fmt(&mut self, args: fmt::Arguments<'_>) -> fmt::Result {
- match self.dest.is_some() {
- true => fmt::write(self, args),
- false => Ok(()),
- }
- }
-}
-
-/// Indent lines with `width` spaces
-///
-/// ```
-/// # #[cfg(feature = "code-in-doc")] {
-/// # use rinja::Template;
-/// /// ```jinja
-/// /// {{ example|indent(4) }}
-/// /// ```
-/// #[derive(Template)]
-/// #[template(ext = "html", in_doc = true)]
-/// struct Example<'a> {
-/// example: &'a str,
-/// }
-///
-/// assert_eq!(
-/// Example { example: "hello\nfoo\nbar" }.to_string(),
-/// "hello\n foo\n bar
"
-/// );
-/// # }
-/// ```
-#[inline]
-pub fn indent(s: impl fmt::Display, width: usize) -> Result {
- fn indent(s: String, width: usize) -> Result {
- if width >= MAX_LEN || s.len() >= MAX_LEN {
- return Ok(s);
- }
- let mut indented = String::new();
- for (i, c) in s.char_indices() {
- indented.push(c);
-
- if c == '\n' && i < s.len() - 1 {
- for _ in 0..width {
- indented.push(' ');
- }
- }
- }
- Ok(indented)
- }
- indent(try_to_string(s)?, width)
-}
-
-/// Joins iterable into a string separated by provided argument
-///
-/// ```
-/// # #[cfg(feature = "code-in-doc")] {
-/// # use rinja::Template;
-/// /// ```jinja
-/// /// {{ example|join(", ") }}
-/// /// ```
-/// #[derive(Template)]
-/// #[template(ext = "html", in_doc = true)]
-/// struct Example<'a> {
-/// example: &'a [&'a str],
-/// }
-///
-/// assert_eq!(
-/// Example { example: &["foo", "bar", "bazz"] }.to_string(),
-/// "foo, bar, bazz
"
-/// );
-/// # }
-/// ```
-#[inline]
-pub fn join(input: I, separator: S) -> Result, Infallible>
-where
- I: IntoIterator,
- I::Item: fmt::Display,
- S: fmt::Display,
-{
- Ok(JoinFilter(Cell::new(Some((input, separator)))))
-}
-
-/// Result of the filter [`join()`].
-///
-/// ## Note
-///
-/// This struct implements [`fmt::Display`], but only produces a string once.
-/// Any subsequent call to `.to_string()` will result in an empty string, because the iterator is
-/// already consumed.
-// The filter contains a [`Cell`], so we can modify iterator inside a method that takes `self` by
-// reference: [`fmt::Display::fmt()`] normally has the contract that it will produce the same result
-// in multiple invocations for the same object. We break this contract, because have to consume the
-// iterator, unless we want to enforce `I: Clone`, nor do we want to "memorize" the result of the
-// joined data.
-pub struct JoinFilter(Cell>);
-
-impl fmt::Display for JoinFilter
-where
- I: IntoIterator,
- I::Item: fmt::Display,
- S: fmt::Display,
-{
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- let Some((iter, separator)) = self.0.take() else {
- return Ok(());
- };
- for (idx, token) in iter.into_iter().enumerate() {
- match idx {
- 0 => f.write_fmt(format_args!("{token}"))?,
- _ => f.write_fmt(format_args!("{separator}{token}"))?,
- }
- }
- Ok(())
- }
-}
-
-/// Capitalize a value. The first character will be uppercase, all others lowercase.
-///
-/// ```
-/// # #[cfg(feature = "code-in-doc")] {
-/// # use rinja::Template;
-/// /// ```jinja
-/// /// {{ example|capitalize }}
-/// /// ```
-/// #[derive(Template)]
-/// #[template(ext = "html", in_doc = true)]
-/// struct Example<'a> {
-/// example: &'a str,
-/// }
-///
-/// assert_eq!(
-/// Example { example: "hello" }.to_string(),
-/// "Hello
"
-/// );
-///
-/// assert_eq!(
-/// Example { example: "hElLO" }.to_string(),
-/// "Hello
"
-/// );
-/// # }
-/// ```
-#[inline]
-pub fn capitalize(s: impl fmt::Display) -> Result {
- fn capitalize(s: String) -> Result {
- match s.chars().next() {
- Some(c) => {
- let mut replacement: String = c.to_uppercase().collect();
- replacement.push_str(&s[c.len_utf8()..].to_lowercase());
- Ok(replacement)
- }
- _ => Ok(s),
- }
- }
- capitalize(try_to_string(s)?)
-}
-
-/// Centers the value in a field of a given width
-///
-/// ```
-/// # #[cfg(feature = "code-in-doc")] {
-/// # use rinja::Template;
-/// /// ```jinja
-/// /// -{{ example|center(5) }}-
-/// /// ```
-/// #[derive(Template)]
-/// #[template(ext = "html", in_doc = true)]
-/// struct Example<'a> {
-/// example: &'a str,
-/// }
-///
-/// assert_eq!(
-/// Example { example: "a" }.to_string(),
-/// "- a -
"
-/// );
-/// # }
-/// ```
-#[inline]
-pub fn center(src: T, width: usize) -> Result, Infallible> {
- Ok(Center { src, width })
-}
-
-pub struct Center {
- src: T,
- width: usize,
-}
-
-impl fmt::Display for Center {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- if self.width < MAX_LEN {
- write!(f, "{: ^1$}", self.src, self.width)
- } else {
- write!(f, "{}", self.src)
- }
- }
-}
-
-/// Count the words in that string.
-///
-/// ```
-/// # #[cfg(feature = "code-in-doc")] {
-/// # use rinja::Template;
-/// /// ```jinja
-/// /// {{ example|wordcount }}
-/// /// ```
-/// #[derive(Template)]
-/// #[template(ext = "html", in_doc = true)]
-/// struct Example<'a> {
-/// example: &'a str,
-/// }
-///
-/// assert_eq!(
-/// Example { example: "rinja is sort of cool" }.to_string(),
-/// "5
"
-/// );
-/// # }
-/// ```
-#[inline]
-pub fn wordcount(s: impl fmt::Display) -> Result {
- fn wordcount(s: String) -> Result {
- Ok(s.split_whitespace().count())
- }
- wordcount(try_to_string(s)?)
-}
-
-/// Return a title cased version of the value. Words will start with uppercase letters, all
-/// remaining characters are lowercase.
-///
-/// ```
-/// # #[cfg(feature = "code-in-doc")] {
-/// # use rinja::Template;
-/// /// ```jinja
-/// /// {{ example|title }}
-/// /// ```
-/// #[derive(Template)]
-/// #[template(ext = "html", in_doc = true)]
-/// struct Example<'a> {
-/// example: &'a str,
-/// }
-///
-/// assert_eq!(
-/// Example { example: "hello WORLD" }.to_string(),
-/// "Hello World
"
-/// );
-/// # }
-/// ```
-pub fn title(s: impl fmt::Display) -> Result {
- let s = try_to_string(s)?;
- let mut need_capitalization = true;
-
- // Sadly enough, we can't mutate a string when iterating over its chars, likely because it could
- // change the size of a char, "breaking" the char indices.
- let mut output = String::with_capacity(s.len());
- for c in s.chars() {
- if c.is_whitespace() {
- output.push(c);
- need_capitalization = true;
- } else if need_capitalization {
- match c.is_uppercase() {
- true => output.push(c),
- false => output.extend(c.to_uppercase()),
- }
- need_capitalization = false;
- } else {
- match c.is_lowercase() {
- true => output.push(c),
- false => output.extend(c.to_lowercase()),
- }
- }
- }
- Ok(output)
-}
-
-/// For a value of `±1` by default an empty string `""` is returned, otherwise `"s"`.
-///
-/// # Examples
-///
-/// ## With default arguments
-///
-/// ```
-/// # #[cfg(feature = "code-in-doc")] {
-/// # use rinja::Template;
-/// /// ```jinja
-/// /// I have {{dogs}} dog{{dogs|pluralize}} and {{cats}} cat{{cats|pluralize}}.
-/// /// ```
-/// #[derive(Template)]
-/// #[template(ext = "html", in_doc = true)]
-/// struct Pets {
-/// dogs: i8,
-/// cats: i8,
-/// }
-///
-/// assert_eq!(
-/// Pets { dogs: 0, cats: 0 }.to_string(),
-/// "I have 0 dogs and 0 cats."
-/// );
-/// assert_eq!(
-/// Pets { dogs: 1, cats: 1 }.to_string(),
-/// "I have 1 dog and 1 cat."
-/// );
-/// assert_eq!(
-/// Pets { dogs: -1, cats: 99 }.to_string(),
-/// "I have -1 dog and 99 cats."
-/// );
-/// # }
-/// ```
-///
-/// ## Overriding the singular case
-///
-/// ```
-/// # #[cfg(feature = "code-in-doc")] {
-/// # use rinja::Template;
-/// /// ```jinja
-/// /// I have {{dogs}} dog{{ dogs|pluralize("go") }}.
-/// /// ```
-/// #[derive(Template)]
-/// #[template(ext = "html", in_doc = true)]
-/// struct Dog {
-/// dogs: i8,
-/// }
-///
-/// assert_eq!(
-/// Dog { dogs: 0 }.to_string(),
-/// "I have 0 dogs."
-/// );
-/// assert_eq!(
-/// Dog { dogs: 1 }.to_string(),
-/// "I have 1 doggo."
-/// );
-/// # }
-/// ```
-///
-/// ## Overriding singular and plural cases
-///
-/// ```
-/// # #[cfg(feature = "code-in-doc")] {
-/// # use rinja::Template;
-/// /// ```jinja
-/// /// I have {{mice}} {{ mice|pluralize("mouse", "mice") }}.
-/// /// ```
-/// #[derive(Template)]
-/// #[template(ext = "html", in_doc = true)]
-/// struct Mice {
-/// mice: i8,
-/// }
-///
-/// assert_eq!(
-/// Mice { mice: 42 }.to_string(),
-/// "I have 42 mice."
-/// );
-/// assert_eq!(
-/// Mice { mice: 1 }.to_string(),
-/// "I have 1 mouse."
-/// );
-/// # }
-/// ```
-///
-/// ## Arguments get escaped
-///
-/// ```
-/// # #[cfg(feature = "code-in-doc")] {
-/// # use rinja::Template;
-/// /// ```jinja
-/// /// You are number {{ number|pluralize("ONE ", number) }}!
-/// /// ```
-/// #[derive(Template)]
-/// #[template(ext = "html", in_doc = true)]
-/// struct Number {
-/// number: usize
-/// }
-///
-/// assert_eq!(
-/// Number { number: 1 }.to_string(),
-/// "You are number <b>ONE</b>!",
-/// );
-/// assert_eq!(
-/// Number { number: 9000 }.to_string(),
-/// "You are number 9000!",
-/// );
-/// # }
-/// ```
-#[inline]
-pub fn pluralize(count: C, singular: S, plural: P) -> Result, C::Error>
-where
- C: PluralizeCount,
-{
- match count.is_singular()? {
- true => Ok(Pluralize::Singular(singular)),
- false => Ok(Pluralize::Plural(plural)),
- }
-}
-
-/// An integer that can have the value `+1` and maybe `-1`.
-pub trait PluralizeCount {
- /// A possible error that can occur while checking the value.
- type Error: Into;
-
- /// Returns `true` if and only if the value is `±1`.
- fn is_singular(&self) -> Result;
-}
-
-const _: () = {
- crate::impl_for_ref! {
- impl PluralizeCount for T {
- type Error = T::Error;
-
- #[inline]
- fn is_singular(&self) -> Result {
- ::is_singular(self)
- }
- }
- }
-
- impl PluralizeCount for Pin
- where
- T: Deref,
- ::Target: PluralizeCount,
- {
- type Error = <::Target as PluralizeCount>::Error;
-
- #[inline]
- fn is_singular(&self) -> Result {
- self.as_ref().get_ref().is_singular()
- }
- }
-
- /// implement `PluralizeCount` for unsigned integer types
- macro_rules! impl_pluralize_for_unsigned_int {
- ($($ty:ty)*) => { $(
- impl PluralizeCount for $ty {
- type Error = Infallible;
-
- #[inline]
- fn is_singular(&self) -> Result {
- Ok(*self == 1)
- }
- }
- )* };
- }
-
- impl_pluralize_for_unsigned_int!(u8 u16 u32 u64 u128 usize);
-
- /// implement `PluralizeCount` for signed integer types
- macro_rules! impl_pluralize_for_signed_int {
- ($($ty:ty)*) => { $(
- impl PluralizeCount for $ty {
- type Error = Infallible;
-
- #[inline]
- fn is_singular(&self) -> Result {
- Ok(*self == 1 || *self == -1)
- }
- }
- )* };
- }
-
- impl_pluralize_for_signed_int!(i8 i16 i32 i64 i128 isize);
-
- /// implement `PluralizeCount` for non-zero integer types
- macro_rules! impl_pluralize_for_non_zero {
- ($($ty:ident)*) => { $(
- impl PluralizeCount for std::num::$ty {
- type Error = Infallible;
-
- #[inline]
- fn is_singular(&self) -> Result {
- self.get().is_singular()
- }
- }
- )* };
- }
-
- impl_pluralize_for_non_zero! {
- NonZeroI8 NonZeroI16 NonZeroI32 NonZeroI64 NonZeroI128 NonZeroIsize
- NonZeroU8 NonZeroU16 NonZeroU32 NonZeroU64 NonZeroU128 NonZeroUsize
- }
-};
-
-pub enum Pluralize {
- Singular(S),
- Plural(P),
-}
-
-impl fmt::Display for Pluralize {
- #[inline]
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- match self {
- Pluralize::Singular(value) => write!(f, "{value}"),
- Pluralize::Plural(value) => write!(f, "{value}"),
- }
- }
-}
-
-impl FastWritable for Pluralize {
- #[inline]
- fn write_into(&self, dest: &mut W) -> fmt::Result {
- match self {
- Pluralize::Singular(value) => value.write_into(dest),
- Pluralize::Plural(value) => value.write_into(dest),
- }
- }
-}
-
-fn try_to_string(s: impl fmt::Display) -> Result {
- let mut result = String::new();
- write!(result, "{s}")?;
- Ok(result)
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
-
- #[test]
- fn test_linebreaks() {
- assert_eq!(
- linebreaks("Foo\nBar Baz").unwrap().to_string(),
- "Foo Bar Baz
"
- );
- assert_eq!(
- linebreaks("Foo\nBar\n\nBaz").unwrap().to_string(),
- "Foo Bar
Baz
"
- );
- }
-
- #[test]
- fn test_linebreaksbr() {
- assert_eq!(linebreaksbr("Foo\nBar").unwrap().to_string(), "Foo Bar");
- assert_eq!(
- linebreaksbr("Foo\nBar\n\nBaz").unwrap().to_string(),
- "Foo Bar Baz"
- );
- }
-
- #[test]
- fn test_paragraphbreaks() {
- assert_eq!(
- paragraphbreaks("Foo\nBar Baz").unwrap().to_string(),
- "Foo\nBar Baz
"
- );
- assert_eq!(
- paragraphbreaks("Foo\nBar\n\nBaz").unwrap().to_string(),
- "Foo\nBar
Baz
"
- );
- assert_eq!(
- paragraphbreaks("Foo\n\n\n\n\nBar\n\nBaz")
- .unwrap()
- .to_string(),
- "Foo
\nBar
Baz
"
- );
- }
-
- #[test]
- fn test_lower() {
- assert_eq!(lower("Foo").unwrap().to_string(), "foo");
- assert_eq!(lower("FOO").unwrap().to_string(), "foo");
- assert_eq!(lower("FooBar").unwrap().to_string(), "foobar");
- assert_eq!(lower("foo").unwrap().to_string(), "foo");
- }
-
- #[test]
- fn test_upper() {
- assert_eq!(upper("Foo").unwrap().to_string(), "FOO");
- assert_eq!(upper("FOO").unwrap().to_string(), "FOO");
- assert_eq!(upper("FooBar").unwrap().to_string(), "FOOBAR");
- assert_eq!(upper("foo").unwrap().to_string(), "FOO");
- }
-
- #[test]
- fn test_trim() {
- assert_eq!(trim(" Hello\tworld\t").unwrap().to_string(), "Hello\tworld");
- }
-
- #[test]
- fn test_truncate() {
- assert_eq!(truncate("hello", 2).unwrap().to_string(), "he...");
- let a = String::from("您好");
- assert_eq!(a.len(), 6);
- assert_eq!(String::from("您").len(), 3);
- assert_eq!(truncate("您好", 1).unwrap().to_string(), "您...");
- assert_eq!(truncate("您好", 2).unwrap().to_string(), "您...");
- assert_eq!(truncate("您好", 3).unwrap().to_string(), "您...");
- assert_eq!(truncate("您好", 4).unwrap().to_string(), "您好");
- assert_eq!(truncate("您好", 5).unwrap().to_string(), "您好");
- assert_eq!(truncate("您好", 6).unwrap().to_string(), "您好");
- assert_eq!(truncate("您好", 7).unwrap().to_string(), "您好");
- let s = String::from("🤚a🤚");
- assert_eq!(s.len(), 9);
- assert_eq!(String::from("🤚").len(), 4);
- assert_eq!(truncate("🤚a🤚", 1).unwrap().to_string(), "🤚...");
- assert_eq!(truncate("🤚a🤚", 2).unwrap().to_string(), "🤚...");
- assert_eq!(truncate("🤚a🤚", 3).unwrap().to_string(), "🤚...");
- assert_eq!(truncate("🤚a🤚", 4).unwrap().to_string(), "🤚...");
- assert_eq!(truncate("🤚a🤚", 5).unwrap().to_string(), "🤚a...");
- assert_eq!(truncate("🤚a🤚", 6).unwrap().to_string(), "🤚a🤚");
- assert_eq!(truncate("🤚a🤚", 6).unwrap().to_string(), "🤚a🤚");
- assert_eq!(truncate("🤚a🤚", 7).unwrap().to_string(), "🤚a🤚");
- assert_eq!(truncate("🤚a🤚", 8).unwrap().to_string(), "🤚a🤚");
- assert_eq!(truncate("🤚a🤚", 9).unwrap().to_string(), "🤚a🤚");
- assert_eq!(truncate("🤚a🤚", 10).unwrap().to_string(), "🤚a🤚");
- }
-
- #[test]
- fn test_indent() {
- assert_eq!(indent("hello", 2).unwrap().to_string(), "hello");
- assert_eq!(indent("hello\n", 2).unwrap().to_string(), "hello\n");
- assert_eq!(indent("hello\nfoo", 2).unwrap().to_string(), "hello\n foo");
- assert_eq!(
- indent("hello\nfoo\n bar", 4).unwrap().to_string(),
- "hello\n foo\n bar"
- );
- assert_eq!(
- indent("hello", 267_332_238_858).unwrap().to_string(),
- "hello"
- );
- }
-
- #[allow(clippy::needless_borrow)]
- #[test]
- fn test_join() {
- assert_eq!(
- join((&["hello", "world"]).iter(), ", ")
- .unwrap()
- .to_string(),
- "hello, world"
- );
- assert_eq!(
- join((&["hello"]).iter(), ", ").unwrap().to_string(),
- "hello"
- );
-
- let empty: &[&str] = &[];
- assert_eq!(join(empty.iter(), ", ").unwrap().to_string(), "");
-
- let input: Vec = vec!["foo".into(), "bar".into(), "bazz".into()];
- assert_eq!(join(input.iter(), ":").unwrap().to_string(), "foo:bar:bazz");
-
- let input: &[String] = &["foo".into(), "bar".into()];
- assert_eq!(join(input.iter(), ":").unwrap().to_string(), "foo:bar");
-
- let real: String = "blah".into();
- let input: Vec<&str> = vec![&real];
- assert_eq!(join(input.iter(), ";").unwrap().to_string(), "blah");
-
- assert_eq!(
- join((&&&&&["foo", "bar"]).iter(), ", ")
- .unwrap()
- .to_string(),
- "foo, bar"
- );
- }
-
- #[test]
- fn test_capitalize() {
- assert_eq!(capitalize("foo").unwrap().to_string(), "Foo".to_string());
- assert_eq!(capitalize("f").unwrap().to_string(), "F".to_string());
- assert_eq!(capitalize("fO").unwrap().to_string(), "Fo".to_string());
- assert_eq!(capitalize("").unwrap().to_string(), String::new());
- assert_eq!(capitalize("FoO").unwrap().to_string(), "Foo".to_string());
- assert_eq!(
- capitalize("foO BAR").unwrap().to_string(),
- "Foo bar".to_string()
- );
- assert_eq!(
- capitalize("äØÄÅÖ").unwrap().to_string(),
- "Äøäåö".to_string()
- );
- assert_eq!(capitalize("ß").unwrap().to_string(), "SS".to_string());
- assert_eq!(capitalize("ßß").unwrap().to_string(), "SSß".to_string());
- }
-
- #[test]
- fn test_center() {
- assert_eq!(center("f", 3).unwrap().to_string(), " f ".to_string());
- assert_eq!(center("f", 4).unwrap().to_string(), " f ".to_string());
- assert_eq!(center("foo", 1).unwrap().to_string(), "foo".to_string());
- assert_eq!(
- center("foo bar", 8).unwrap().to_string(),
- "foo bar ".to_string()
- );
- assert_eq!(
- center("foo", 111_669_149_696).unwrap().to_string(),
- "foo".to_string()
- );
- }
-
- #[test]
- fn test_wordcount() {
- assert_eq!(wordcount("").unwrap(), 0);
- assert_eq!(wordcount(" \n\t").unwrap(), 0);
- assert_eq!(wordcount("foo").unwrap(), 1);
- assert_eq!(wordcount("foo bar").unwrap(), 2);
- assert_eq!(wordcount("foo bar").unwrap(), 2);
- }
-
- #[test]
- fn test_title() {
- assert_eq!(&title("").unwrap(), "");
- assert_eq!(&title(" \n\t").unwrap(), " \n\t");
- assert_eq!(&title("foo").unwrap(), "Foo");
- assert_eq!(&title(" foo").unwrap(), " Foo");
- assert_eq!(&title("foo bar").unwrap(), "Foo Bar");
- assert_eq!(&title("foo bar ").unwrap(), "Foo Bar ");
- assert_eq!(&title("fOO").unwrap(), "Foo");
- assert_eq!(&title("fOo BaR").unwrap(), "Foo Bar");
- }
-
- #[test]
- fn fuzzed_indent_filter() {
- let s = "hello\nfoo\nbar".to_string().repeat(1024);
- assert_eq!(indent(s.clone(), 4).unwrap().to_string(), s);
- }
-}
diff --git a/third_party/rust/rinja/src/filters/escape.rs b/third_party/rust/rinja/src/filters/escape.rs
deleted file mode 100644
index 0f5a45be92ad..000000000000
--- a/third_party/rust/rinja/src/filters/escape.rs
+++ /dev/null
@@ -1,760 +0,0 @@
-use std::convert::Infallible;
-use std::fmt::{self, Formatter, Write};
-use std::ops::Deref;
-use std::pin::Pin;
-use std::{borrow, str};
-
-/// Marks a string (or other `Display` type) as safe
-///
-/// Use this if you want to allow markup in an expression, or if you know
-/// that the expression's contents don't need to be escaped.
-///
-/// Rinja will automatically insert the first (`Escaper`) argument,
-/// so this filter only takes a single argument of any type that implements
-/// `Display`.
-///
-/// ```
-/// # #[cfg(feature = "code-in-doc")] {
-/// # use rinja::Template;
-/// /// ```jinja
-/// /// {{ example|safe }}
-/// /// ```
-/// #[derive(Template)]
-/// #[template(ext = "html", in_doc = true)]
-/// struct Example<'a> {
-/// example: &'a str,
-/// }
-///
-/// assert_eq!(
-/// Example { example: "I'm Safe
" }.to_string(),
-/// ""
-/// );
-/// # }
-/// ```
-#[inline]
-pub fn safe(text: T, escaper: E) -> Result, Infallible> {
- let _ = escaper; // it should not be part of the interface that the `escaper` is unused
- Ok(Safe(text))
-}
-
-/// Escapes strings according to the escape mode.
-///
-/// Rinja will automatically insert the first (`Escaper`) argument,
-/// so this filter only takes a single argument of any type that implements
-/// `Display`.
-///
-/// It is possible to optionally specify an escaper other than the default for
-/// the template's extension, like `{{ val|escape("txt") }}`.
-///
-/// ```
-/// # #[cfg(feature = "code-in-doc")] {
-/// # use rinja::Template;
-/// /// ```jinja
-/// /// {{ example|escape }}
-/// /// ```
-/// #[derive(Template)]
-/// #[template(ext = "html", in_doc = true)]
-/// struct Example<'a> {
-/// example: &'a str,
-/// }
-///
-/// assert_eq!(
-/// Example { example: "Escape <>&" }.to_string(),
-/// "Escape <>&
"
-/// );
-/// # }
-/// ```
-#[inline]
-pub fn escape(text: T, escaper: E) -> Result>, Infallible> {
- Ok(Safe(EscapeDisplay(text, escaper)))
-}
-
-pub struct EscapeDisplay(T, E);
-
-impl fmt::Display for EscapeDisplay {
- #[inline]
- fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
- write!(EscapeWriter(fmt, self.1), "{}", &self.0)
- }
-}
-
-impl