Bug 1942046 - update vendored taskgraph to v12.2.0. r=taskgraph-reviewers,mach-reviewers,ahal

Differential Revision: https://phabricator.services.mozilla.com/D234493
This commit is contained in:
Julien Cristau
2025-01-16 17:28:58 +00:00
parent 66a3c693d3
commit 9b4c98067a
22 changed files with 149 additions and 90 deletions

View File

@@ -42,7 +42,7 @@ def build_image(name, tag, args=None):
tag = tag or docker_image(name, by_tag=True)
buf = BytesIO()
stream_context_tar(GECKO, image_dir, buf, name, args)
stream_context_tar(GECKO, image_dir, buf, args)
docker.post_to_docker(buf.getvalue(), "/build", nocache=1, t=tag)
print(f"Successfully built {name} and tagged with {tag}")

View File

@@ -53,7 +53,7 @@ dependencies = [
"setuptools==74.0.0",
"six==1.16.0",
"slugid==2.0.0",
"taskcluster-taskgraph~=12.1",
"taskcluster-taskgraph~=12.2",
"taskcluster-urls==13.0.1",
"taskcluster==75.0.1",
"toml==0.10.2",

View File

@@ -631,9 +631,9 @@ slugid==2.0.0 \
taskcluster==75.0.1 \
--hash=sha256:3100ce68e7a655701cd78290067f0df1b05e0a85c69df29f60ceaad6baf0fc53 \
--hash=sha256:6b16a0d8ffa3431a66a2ffe428f8e5c7874b9cbeddba7d5ce0e8d8783d4c95a3
taskcluster-taskgraph==12.1.0 \
--hash=sha256:c37b0ff65ab6ae3ae322bf1ecd4d3453db01b37f22164d9a33ab77e634a34d9a \
--hash=sha256:222ba9f729e6d970de8c177251e3a5f29010332d7cc6ca8967fd8c8b73fa2c1b
taskcluster-taskgraph==12.2.0 \
--hash=sha256:b62d10e4d7deba9721a0d0d3766825cdcb6a8ea0f3e7fcb7d718931647709d20 \
--hash=sha256:594cbfe1afd823b547d4ae9bfbb7700f43e03c856d6a02717764eb9bbb537f5f
taskcluster-urls==13.0.1 \
--hash=sha256:b25e122ecec249c4299ac7b20b08db76e3e2025bdaeb699a9d444556de5fd367 \
--hash=sha256:f66dcbd6572a6216ab65949f0fa0b91f2df647918028436c384e6af5cd12ae2b

View File

@@ -1,10 +1,11 @@
Metadata-Version: 2.3
Metadata-Version: 2.4
Name: taskcluster-taskgraph
Version: 12.1.0
Version: 12.2.0
Summary: Build taskcluster taskgraphs
Project-URL: Repository, https://github.com/taskcluster/taskgraph
Project-URL: Issues, https://github.com/taskcluster/taskgraph/issues
Author-email: Mozilla Release Engineering <release+taskgraph@mozilla.com>
License-File: LICENSE
Classifier: Development Status :: 5 - Production/Stable
Classifier: Environment :: Console
Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)

View File

@@ -1,14 +1,14 @@
taskgraph/__init__.py,sha256=_H9_zoYEWGO9c2L_M63SN76YWczgIqzggz_fETwqurI,718
taskgraph/__init__.py,sha256=ZbH3Ffj6EU2ID7GicBYZ7G4a_Ih8EHLnq3tIryAHSsE,718
taskgraph/config.py,sha256=55xqo02311bi9SKpys5izMl8lQaLhRZKtW2AwHyMz40,5172
taskgraph/create.py,sha256=sUKEUAFp3X_1Bs99alh4jw7cFXtDFsbh4T2pAkp5mC0,5315
taskgraph/decision.py,sha256=lAHwcojZUWKIsyW1CgPHYyMtT9ULd0_fQNyUrWimC-w,13897
taskgraph/docker.py,sha256=tP2MRJqAPIUZsCuYAnwg1Qo8AB3N0RjcbYZx07wwMwo,8511
taskgraph/docker.py,sha256=psTY9h69moU95nCOLXqDLKwVd3YlXWRi9PfjUoA_4L0,8507
taskgraph/filter_tasks.py,sha256=R7tYXiaVPGIkQ6O1c9-QJrKZ59m9pFXCloUlPraVnZU,866
taskgraph/generator.py,sha256=GEgOuy3X_trmGC8wJJYVoTGjU3Mp2OsLcvUCcDqAs8U,15903
taskgraph/generator.py,sha256=S1JkWoi6q2dUd2jEzbuZJmjbVuUJv5TobPTRIuk0b7I,15845
taskgraph/graph.py,sha256=qvihwxxdc3C55ZJ5FLc2phFV0D7oZNXKXEHBIA_GI1U,4696
taskgraph/main.py,sha256=74h2sd3dbqWBdJQ7G0UfGyXHt-k9Bbi71lqgUyVMCQs,29275
taskgraph/main.py,sha256=htl6Mx9MzTrmPUJUxERshxn79HxCCGt5QrCKRoPanDM,30021
taskgraph/morph.py,sha256=kR8Yo-ZHguvPQvhY4p6EGXjjw77cw53e5jhFEolQrus,9255
taskgraph/parameters.py,sha256=UyblfjPaPuXYZR3X_zMZUFOvBancAh3xFeKP5WnC5Yg,12438
taskgraph/parameters.py,sha256=ZcMdWuIiqWuweWndtzvwRffgtnExTpBTDpBs-0OIoAY,12482
taskgraph/target_tasks.py,sha256=9_v66bzmQFELPsfIDGITXrqzsmEiLq1EeuJFhycKL0M,3356
taskgraph/task.py,sha256=H-t0-v8xoX6uzAT5bY30b7uYBM4taJTg-ft3_kVn9VA,3234
taskgraph/taskgraph.py,sha256=caU9adRz7MbHAXpXKIY2jxV04PaYogkofnMRQ0EBmZQ,2471
@@ -26,9 +26,9 @@ taskgraph/loader/transform.py,sha256=olUBPjxk3eEIg25sduxlcyqhjoig4ts5kPlT_zs6g9g
taskgraph/optimize/__init__.py,sha256=Oqpq1RW8QzOcu7zaMlNQ3BHT9ws9e_93FWfCqzNcQps,123
taskgraph/optimize/base.py,sha256=ltJBr3WR53DyNnEsvUm9fHjFAGh9gGncmekKD1Dlb-w,20221
taskgraph/optimize/strategies.py,sha256=tnyDw6-z5S2K7710Vww-EsiqtEqZl5rVYRqg8joJ1dk,3638
taskgraph/run-task/fetch-content,sha256=_bBs9T6YSNfKL338XqPWmzXbDBc7YzKwG5YMmnnSYvE,33643
taskgraph/run-task/fetch-content,sha256=5tytiNR0J1Wls6luPsJlnkfkDLXIXpXbSCAVD8t4wvM,35194
taskgraph/run-task/hgrc,sha256=BybWLDR89bWi3pE5T05UqmDHs02CbLypE-omLZWU6Uk,896
taskgraph/run-task/robustcheckout.py,sha256=OhW98v-VKajX55AToL5nHeUANEOPL5fIJgsvsagGfnU,30954
taskgraph/run-task/robustcheckout.py,sha256=DZ-MB68P7FmZY1vkj7o3Xp-d21aOiFnLMimv25S3FTs,30778
taskgraph/run-task/run-task,sha256=SS_NegHr87dWNXBx_zK2Tuq2XUMP_t__iUZ5N5rdjB4,46229
taskgraph/transforms/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
taskgraph/transforms/base.py,sha256=OYIsK4-pPFPhuK2wV7qyeZSSsPaZdJQuI-ixHf4fbeQ,5155
@@ -36,8 +36,8 @@ taskgraph/transforms/cached_tasks.py,sha256=Omg7NGnig-ts3Gq_3oB9YQQ4lQW_KGCP0tL7
taskgraph/transforms/chunking.py,sha256=7z9oXiA2dDguYwJPaZYCi-fEzbc--O9avZAFS3vP_kg,2592
taskgraph/transforms/code_review.py,sha256=6z4Kc9e812PXwnDbCfDG_ZpUY6vrUpd4U59_5Ss0O9E,712
taskgraph/transforms/docker_image.py,sha256=ZZALcDXK2vfFbODiREIClG5i5wAQbeJk0i9Hb0jM2rY,7509
taskgraph/transforms/fetch.py,sha256=4zOz_GPWPcyTTTw3LvTWTycfEqJJdNEDhYJ7o6XvJq4,10639
taskgraph/transforms/from_deps.py,sha256=VOmu0nilCLrQCF8V1eCbmRUleVLTLsdhAJDE8rny0uY,8888
taskgraph/transforms/fetch.py,sha256=kGSmsoBFgLggLls0zMKqNqo35G57N80ScfeWV8DaF-Y,10635
taskgraph/transforms/from_deps.py,sha256=WD7Lim5tdRhtkyqobEM7aAxF9b1rN-lPru7VqZr8XWI,8911
taskgraph/transforms/matrix.py,sha256=lP9kwUsJtFd2uWGyxiDG94ZSWk3xTwtWZBtyl_CSavg,3445
taskgraph/transforms/notify.py,sha256=-SezY83_WLftI2iapYNzt2NEgEAws78wQUxxAJ-Zsmg,6020
taskgraph/transforms/task.py,sha256=zUNFiVIpDSq44CZqHuFR3pWaK8YrFqNV0l4rUbx9O0M,52967
@@ -48,12 +48,12 @@ taskgraph/transforms/run/index_search.py,sha256=qLXgbHszb3gEVtFDwLo39tvG3wODpgMV
taskgraph/transforms/run/run_task.py,sha256=3xYHJF1jphNDh84Rp5ELoLFcmK2SKNosUatH8ajDiiU,8294
taskgraph/transforms/run/toolchain.py,sha256=3dzNOA2GriihcLgoJLfBG2v1F3Mka8aJdQ2rCXC1lv4,6141
taskgraph/util/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
taskgraph/util/archive.py,sha256=qfqp00RmH6Fd1cM-7uW8_u3DEZSW2BedxAodg28-RoU,4700
taskgraph/util/archive.py,sha256=4XcH0FvUZIDs6MYzopFVlItM5J5JllFEXg9buZB3QVc,5104
taskgraph/util/attributes.py,sha256=pPOFmwkDQQ-IqfDpVghZ10YI_qXRY4Bi5JP3xr6XVvc,2964
taskgraph/util/cached_tasks.py,sha256=fV4fNiroV1OlWIQSbY48njK2k4Mj619CpVMhiDRVM9Y,4166
taskgraph/util/copy.py,sha256=-AgF3oPF3lfiUHvAOiGkhUzS68i9kcWRsyDSiBkAVCw,1577
taskgraph/util/dependencies.py,sha256=3Qba3zI87JYR5fk5FndGzEVW-5NIzzZrBf9rVYcnLD0,2734
taskgraph/util/docker.py,sha256=1F29PyNjYRb-bk9a-XMgyHCco3CySvOm__CTNh_hhg8,8120
taskgraph/util/docker.py,sha256=nY9A8BH5O4pkEOziJfpER87yvOpYh7LrSUGB1Q3ouYs,8183
taskgraph/util/hash.py,sha256=l3l_urIfUN0u1S9xa496_p_Te1Ab4rW_gr0h0JcRzEI,1694
taskgraph/util/keyed_by.py,sha256=EMWNRRqYB0AS7A4Y4lthYf2HB7G2ercGFf4hN9zwyaY,3348
taskgraph/util/parameterization.py,sha256=DiPE-4jappGMPljDhhZI52BP7dLBGZHu5EI1cW4aRYg,3392
@@ -62,7 +62,7 @@ taskgraph/util/python_path.py,sha256=O77PqZZTx6KIZc0B9FExFer2A1JU20u5hz7zexAJXYw
taskgraph/util/readonlydict.py,sha256=4D9ZV9nqvVO86Jjv12GCmrd5xiOeqJ0IrukjLsAAHFE,1223
taskgraph/util/schema.py,sha256=8HqMiFjrj6fAKIfATffrzgsace-uqF_35ADJoZhRNLo,8292
taskgraph/util/set_name.py,sha256=cha9awo2nMQ9jfSEcbyNkZkCq_1Yg_kKJTfvDzabHSc,1134
taskgraph/util/shell.py,sha256=nf__ly0Ikhj92AiEBCQtvyyckm8UfO_3DSgz0SU-7QA,1321
taskgraph/util/shell.py,sha256=dQML0222Svp0wW6HbLeXvuBXkEtkXnSlFEUwZW0SCYg,1319
taskgraph/util/taskcluster.py,sha256=zzL0tdIm5F05alFBYvuBcRtisU3-iHr51Is1s4oUdUU,17012
taskgraph/util/taskgraph.py,sha256=TcMDSffQcMwBQhcjmil8smqPYNNH8m2CQZCgU0URUkc,1968
taskgraph/util/templates.py,sha256=qRh1nFNCuRHTb7xcFlWqomV2J_Tk7s6-liNQXfoQTtM,2865
@@ -72,8 +72,8 @@ taskgraph/util/vcs.py,sha256=3PZd2fq7YkqP22OnZSpugzIHf6WPALm76CHk-YfK2us,18657
taskgraph/util/verify.py,sha256=RbjKw6aAux5skXf2BihCdH7qyLWAU_plTYQ4GlRLBr0,8983
taskgraph/util/workertypes.py,sha256=dR5NkwvmcY-no_I4l22mw93EKyvGbe_Xv7N9hzkiG1U,2570
taskgraph/util/yaml.py,sha256=29h6RE7JA4z0U2V3WCu-S39lPMBS9CEZEglv1delDvw,1075
taskcluster_taskgraph-12.1.0.dist-info/METADATA,sha256=hHjxzh9-muODo4PH68zt0MYV2KSvF06dBBOa-Gh5t-I,4925
taskcluster_taskgraph-12.1.0.dist-info/WHEEL,sha256=C2FUgwZgiLbznR-k0b_5k3Ai_1aASOXDss3lzCUsUug,87
taskcluster_taskgraph-12.1.0.dist-info/entry_points.txt,sha256=2hxDzE3qq_sHh-J3ROqwpxgQgxO-196phWAQREl2-XA,50
taskcluster_taskgraph-12.1.0.dist-info/licenses/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
taskcluster_taskgraph-12.1.0.dist-info/RECORD,,
taskcluster_taskgraph-12.2.0.dist-info/METADATA,sha256=X9x3yBP1UaVSwjKlP6cFTi0bLzXTLC5Pi1c5tyU07-E,4947
taskcluster_taskgraph-12.2.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
taskcluster_taskgraph-12.2.0.dist-info/entry_points.txt,sha256=2hxDzE3qq_sHh-J3ROqwpxgQgxO-196phWAQREl2-XA,50
taskcluster_taskgraph-12.2.0.dist-info/licenses/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
taskcluster_taskgraph-12.2.0.dist-info/RECORD,,

View File

@@ -1,4 +1,4 @@
Wheel-Version: 1.0
Generator: hatchling 1.26.3
Generator: hatchling 1.27.0
Root-Is-Purelib: true
Tag: py3-none-any

View File

@@ -2,7 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
__version__ = "12.1.0"
__version__ = "12.2.0"
# Maximum number of dependencies a single task can have
# https://docs.taskcluster.net/docs/reference/platform/queue/api#createTask

View File

@@ -122,7 +122,7 @@ def build_image(name, tag, args=None):
tag = tag or docker.docker_image(name, by_tag=True)
buf = BytesIO()
docker.stream_context_tar(".", image_dir, buf, "", args)
docker.stream_context_tar(".", image_dir, buf, args)
cmdargs = ["docker", "image", "build", "--no-cache", "-"]
if tag:
cmdargs.insert(-1, f"-t={tag}")

View File

@@ -338,8 +338,7 @@ class TaskGraphGenerator:
all_tasks, Graph(frozenset(full_task_set.graph.nodes), frozenset(edges))
)
logger.info(
"Full task graph contains %d tasks and %d dependencies"
% (len(full_task_set.graph.nodes), len(edges))
f"Full task graph contains {len(full_task_set.graph.nodes)} tasks and {len(edges)} dependencies"
)
yield self.verify("full_task_graph", full_task_graph, graph_config, parameters)
@@ -356,8 +355,7 @@ class TaskGraphGenerator:
Graph(frozenset(target_tasks), frozenset()),
)
logger.info(
"Filter %s pruned %d tasks (%d remain)"
% (fltr.__name__, old_len - len(target_tasks), len(target_tasks))
f"Filter {fltr.__name__} pruned {old_len - len(target_tasks)} tasks ({len(target_tasks)} remain)"
)
yield self.verify("target_task_set", target_task_set, graph_config, parameters)
@@ -375,8 +373,7 @@ class TaskGraphGenerator:
else:
always_target_tasks = set()
logger.info(
"Adding %d tasks with `always_target` attribute"
% (len(always_target_tasks) - len(always_target_tasks & target_tasks)) # type: ignore
f"Adding {len(always_target_tasks) - len(always_target_tasks & target_tasks)} tasks with `always_target` attribute" # type: ignore
)
requested_tasks = target_tasks | always_target_tasks # type: ignore
target_graph = full_task_graph.graph.transitive_closure(requested_tasks)

View File

@@ -228,30 +228,32 @@ def generate_taskgraph(options, parameters, overrides, logdir):
@command(
"tasks",
help="Show all tasks in the taskgraph.",
help="Show the full task set in the task graph. The full task set includes all tasks defined by any kind, without edges (dependencies) between them.",
defaults={"graph_attr": "full_task_set"},
)
@command(
"full", help="Show the full taskgraph.", defaults={"graph_attr": "full_task_graph"}
"full",
help="Show the full task graph. The full task graph consists of the full task set, with edges (dependencies) between tasks.",
defaults={"graph_attr": "full_task_graph"},
)
@command(
"target",
help="Show the set of target tasks.",
help="Show the target task set in the task graph. The target task set includes the tasks which have indicated they should be run, without edges (dependencies) between them.",
defaults={"graph_attr": "target_task_set"},
)
@command(
"target-graph",
help="Show the target graph.",
help="Show the target task graph. The target task graph consists of the target task set, with edges (dependencies) between tasks.",
defaults={"graph_attr": "target_task_graph"},
)
@command(
"optimized",
help="Show the optimized graph.",
help="Show the optimized task graph, which is the target task set with tasks optimized out (filtered, omitted, or replaced) and edges representing dependencies.",
defaults={"graph_attr": "optimized_task_graph"},
)
@command(
"morphed",
help="Show the morphed graph.",
help="Show the morphed graph, which is the optimized task graph with additional morphs applied. It retains the same meaning as the optimized task graph but in a form more palatable to TaskCluster.",
defaults={"graph_attr": "morphed_task_graph"},
)
@argument("--root", "-r", help="root of the taskgraph definition relative to topsrcdir")

View File

@@ -110,7 +110,7 @@ def _get_defaults(repo_root=None):
"do_not_optimize": [],
"enable_always_target": True,
"existing_tasks": {},
"files_changed": repo.get_changed_files("AM"),
"files_changed": lambda: repo.get_changed_files("AM"),
"filters": ["target_tasks_method"],
"head_ref": repo.branch or repo.head_rev,
"head_repository": repo_url,
@@ -210,7 +210,7 @@ class Parameters(ReadOnlyDict):
for name, default in defaults.items():
if name not in kwargs:
kwargs[name] = default
kwargs[name] = default() if callable(default) else default
return kwargs
def check(self):

View File

@@ -18,6 +18,7 @@ import os
import pathlib
import random
import re
import ssl
import stat
import subprocess
import sys
@@ -190,9 +191,11 @@ def stream_download(url, sha256=None, size=None, headers=None):
req_headers[key.strip()] = val.strip()
req = urllib.request.Request(url, None, req_headers)
with urllib.request.urlopen(
req, timeout=60, cafile=certifi.where()
) if certifi else urllib.request.urlopen(req, timeout=60) as fh:
kwargs = {}
if certifi:
ssl_context = ssl.create_default_context(cafile=certifi.where())
kwargs["context"] = context = ssl_context
with urllib.request.urlopen(req, timeout=60, **kwargs) as fh:
if not url.endswith(".gz") and fh.info().get("Content-Encoding") == "gzip":
fh = gzip.GzipFile(fileobj=fh)
else:
@@ -480,12 +483,34 @@ def should_repack_archive(
return True
EXECUTABLE_SIGNATURES = set([
b"\xFE\xED\xFA\xCE", # mach-o 32-bits big endian
b"\xCE\xFA\xED\xFE", # mach-o 32-bits little endian
b"\xFE\xED\xFA\xCF", # mach-o 64-bits big endian
b"\xCF\xFA\xED\xFE", # mach-o 64-bits little endian
b"\xCA\xFE\xBA\xBE", # mach-o FAT binary
b"\x7F\x45\x4C\x46", # Elf binary
])
def repack_archive(
orig: pathlib.Path, dest: pathlib.Path, strip_components=0, prefix=""
orig: pathlib.Path,
dest: pathlib.Path,
strip_components=0,
prefix="",
force_archive=False,
):
assert orig != dest
log(f"Repacking {orig} as {dest}")
orig_typ, ifh = open_stream(orig)
try:
orig_typ, ifh = open_stream(orig)
except ArchiveTypeNotSupported:
if force_archive:
ifh = io.BufferedReader(orig.open(mode="rb"))
signature = ifh.peek(4)[:4]
orig_typ = "exec" if signature in EXECUTABLE_SIGNATURES else None
else:
raise
typ = archive_type(dest)
if not typ:
raise Exception("Archive type not supported for %s" % dest.name)
@@ -510,7 +535,20 @@ def repack_archive(
with rename_after_close(dest, "wb") as fh:
ctx = ZstdCompressor()
if orig_typ == "zip":
if orig_typ in ("exec", None):
with ctx.stream_writer(fh) as compressor, tarfile.open(
fileobj=compressor,
mode="w:",
) as tar:
tarinfo = tarfile.TarInfo()
tarinfo.name = filter(orig.name) if filter else orig.name
st = orig.stat()
tarinfo.size = st.st_size
tarinfo.mtime = st.st_mtime
tarinfo.mode = 0o0755 if orig_typ == "exec" else 0o0644
tar.addfile(tarinfo, ifh)
elif orig_typ == "zip":
assert typ == "tar"
zip = zipfile.ZipFile(ifh)
# Convert the zip stream to a tar on the fly.
@@ -824,8 +862,12 @@ def command_static_url(args):
if gpg_sig_url:
gpg_verify_path(dl_dest, gpg_key, gpg_signature)
if should_repack_archive(dl_dest, dest, args.strip_components, args.add_prefix):
repack_archive(dl_dest, dest, args.strip_components, args.add_prefix)
if args.force_archive or should_repack_archive(
dl_dest, dest, args.strip_components, args.add_prefix
):
repack_archive(
dl_dest, dest, args.strip_components, args.add_prefix, args.force_archive
)
elif dl_dest != dest:
log(f"Renaming {dl_dest} to {dest}")
dl_dest.rename(dest)
@@ -960,6 +1002,11 @@ def main():
dest="headers",
help="Header to send as part of the request, can be passed " "multiple times",
)
url.add_argument(
"--force-archive",
action="store_true",
help="Create an archive even when the downloaded file is not an archive",
)
url.add_argument("url", help="URL to fetch")
url.add_argument("dest", help="Destination path")

View File

@@ -20,9 +20,9 @@ import socket
import ssl
import time
from mercurial.i18n import _ # type: ignore
from mercurial.node import hex, nullid # type: ignore
from mercurial import ( # type: ignore
from mercurial.i18n import _
from mercurial.node import hex, nullid
from mercurial import (
commands,
configitems,
error,
@@ -57,7 +57,7 @@ configitem(b"robustcheckout", b"retryjittermax", default=configitems.dynamicdefa
def getsparse():
from mercurial import sparse # type: ignore
from mercurial import sparse
return sparse
@@ -79,7 +79,7 @@ def peerlookup(remote, v):
b"",
b"networkattempts",
3,
b"Maximum number of attempts for network " b"operations",
b"Maximum number of attempts for network operations",
),
(b"", b"sparseprofile", b"", b"Sparse checkout profile to use (path in repo)"),
(
@@ -150,7 +150,7 @@ def robustcheckout(
or not re.match(b"^[a-f0-9]+$", revision)
):
raise error.Abort(
b"--revision must be a SHA-1 fragment 12-40 " b"characters long"
b"--revision must be a SHA-1 fragment 12-40 characters long"
)
sharebase = sharebase or ui.config(b"share", b"pool")
@@ -171,7 +171,7 @@ def robustcheckout(
extensions.find(b"sparse")
except KeyError:
raise error.Abort(
b"sparse extension must be enabled to use " b"--sparseprofile"
b"sparse extension must be enabled to use --sparseprofile"
)
ui.warn(b"(using Mercurial %s)\n" % util.version())
@@ -337,7 +337,6 @@ def _docheckout(
@contextlib.contextmanager
def timeit(op, behavior):
behaviors.add(behavior)
start = 0
errored = False
try:
start = time.time()
@@ -381,14 +380,14 @@ def _docheckout(
# enabled sparse, we would lock them out.
if destvfs.exists() and sparse_profile and not destvfs.exists(b".hg/sparse"):
raise error.Abort(
b"cannot enable sparse profile on existing " b"non-sparse checkout",
b"cannot enable sparse profile on existing non-sparse checkout",
hint=b"use a separate working directory to use sparse",
)
# And the other direction for symmetry.
if not sparse_profile and destvfs.exists(b".hg/sparse"):
raise error.Abort(
b"cannot use non-sparse checkout on existing sparse " b"checkout",
b"cannot use non-sparse checkout on existing sparse checkout",
hint=b"use a separate working directory to use sparse",
)
@@ -408,7 +407,7 @@ def _docheckout(
ui.warn(b"(shared store does not exist; deleting destination)\n")
with timeit("removed_missing_shared_store", "remove-wdir"):
destvfs.rmtree(forcibly=True)
elif not re.search(rb"[a-f0-9]{40}/\.hg$", storepath.replace(b"\\", b"/")):
elif not re.search(b"[a-f0-9]{40}/\\.hg$", storepath.replace(b"\\", b"/")):
ui.warn(
b"(shared store does not belong to pooled storage; "
b"deleting destination to improve efficiency)\n"
@@ -430,7 +429,7 @@ def _docheckout(
ui.warn(b"(abandoned transaction found; trying to recover)\n")
repo = hg.repository(ui, dest)
if not repo.recover():
ui.warn(b"(could not recover repo state; " b"deleting shared store)\n")
ui.warn(b"(could not recover repo state; deleting shared store)\n")
with timeit("remove_unrecovered_shared_store", "remove-store"):
deletesharedstore()
@@ -445,7 +444,7 @@ def _docheckout(
def handlenetworkfailure():
if networkattempts[0] >= networkattemptlimit:
raise error.Abort(
b"reached maximum number of network attempts; " b"giving up\n"
b"reached maximum number of network attempts; giving up\n"
)
ui.warn(
@@ -539,7 +538,7 @@ def _docheckout(
clonepeer = hg.peer(ui, {}, cloneurl)
rootnode = peerlookup(clonepeer, b"0")
except error.RepoLookupError:
raise error.Abort(b"unable to resolve root revision from clone " b"source")
raise error.Abort(b"unable to resolve root revision from clone source")
except (
error.Abort,
ssl.SSLError,
@@ -673,7 +672,6 @@ def _docheckout(
# We only pull if we are using symbolic names or the requested revision
# doesn't exist.
havewantedrev = False
checkoutrevision = None
if revision:
try:
@@ -685,7 +683,7 @@ def _docheckout(
if not ctx.hex().startswith(revision):
raise error.Abort(
b"--revision argument is ambiguous",
hint=b"must be the first 12+ characters of a " b"SHA-1 fragment",
hint=b"must be the first 12+ characters of a SHA-1 fragment",
)
checkoutrevision = ctx.hex()
@@ -750,7 +748,6 @@ def _docheckout(
# Mercurial 4.3 doesn't purge files outside the sparse checkout.
# See https://bz.mercurial-scm.org/show_bug.cgi?id=5626. Force
# purging by monkeypatching the sparse matcher.
old_sparse_fn = None
try:
old_sparse_fn = getattr(repo.dirstate, "_sparsematchfn", None)
if old_sparse_fn is not None:
@@ -764,7 +761,7 @@ def _docheckout(
abort_on_err=True,
# The function expects all arguments to be
# defined.
**{"print": None, "print0": None, "dirs": None, "files": None},
**{"print": None, "print0": None, "dirs": None, "files": None}
):
raise error.Abort(b"error purging")
finally:

View File

@@ -231,11 +231,11 @@ def create_fetch_url_task(config, name, fetch):
"--sha256",
fetch["sha256"],
"--size",
"%d" % fetch["size"],
f"{fetch['size']}",
]
if fetch.get("strip-components"):
args.extend(["--strip-components", "%d" % fetch["strip-components"]])
args.extend(["--strip-components", f'{fetch["strip-components"]}'])
if fetch.get("add-prefix"):
args.extend(["--add-prefix", fetch["add-prefix"]])

View File

@@ -48,6 +48,7 @@ FROM_DEPS_SCHEMA = Schema(
),
): Any(
None,
False,
*SET_NAME_MAP,
{Any(*SET_NAME_MAP): object},
),

View File

@@ -7,6 +7,7 @@ import gzip
import os
import stat
import tarfile
from contextlib import contextmanager
# 2016-01-01T00:00:00+0000
DEFAULT_MTIME = 1451606400
@@ -104,14 +105,14 @@ def create_tar_from_files(fp, files):
tf.addfile(ti, f)
def create_tar_gz_from_files(fp, files, filename=None, compresslevel=9):
"""Create a tar.gz file deterministically from files.
@contextmanager
def gzip_compressor(fp, filename=None, compresslevel=9):
"""Create a deterministic GzipFile writer.
This is a glorified wrapper around ``create_tar_from_files`` that
adds gzip compression.
This is a glorified wrapper around ``GzipFile`` that adds some
determinism.
The passed file handle should be opened for writing in binary mode.
When the function returns, all data has been written to the handle.
"""
# Offset 3-7 in the gzip header contains an mtime. Pin it to a known
# value so output is deterministic.
@@ -123,4 +124,17 @@ def create_tar_gz_from_files(fp, files, filename=None, compresslevel=9):
mtime=DEFAULT_MTIME,
)
with gf:
yield gf
def create_tar_gz_from_files(fp, files, filename=None, compresslevel=9):
"""Create a tar.gz file deterministically from files.
This is a glorified wrapper around ``create_tar_from_files`` that
adds gzip compression.
The passed file handle should be opened for writing in binary mode.
When the function returns, all data has been written to the handle.
"""
with gzip_compressor(fp, filename, compresslevel) as gf:
create_tar_from_files(gf, files)

View File

@@ -10,7 +10,7 @@ import os
import re
from typing import Optional
from taskgraph.util.archive import create_tar_gz_from_files
from taskgraph.util.archive import create_tar_from_files, gzip_compressor
IMAGE_DIR = os.path.join(".", "taskcluster", "docker")
@@ -76,10 +76,15 @@ class HashingWriter:
def __init__(self, writer):
self._hash = hashlib.sha256()
self._writer = writer
self._written = 0
def write(self, buf):
self._hash.update(buf)
self._writer.write(buf)
self._written += len(buf)
def tell(self):
return self._written
def hexdigest(self):
return self._hash.hexdigest()
@@ -108,13 +113,8 @@ def create_context_tar(topsrcdir, context_dir, out_path, args=None):
Returns the SHA-256 hex digest of the created archive.
"""
with open(out_path, "wb") as fh:
return stream_context_tar(
topsrcdir,
context_dir,
fh,
image_name=os.path.basename(out_path),
args=args,
)
with gzip_compressor(fh, filename=os.path.basename(out_path)) as gf:
return stream_context_tar(topsrcdir, context_dir, gf, args=args)
RUN_TASK_ROOT = os.path.join(os.path.dirname(os.path.dirname(__file__)), "run-task")
@@ -135,7 +135,7 @@ RUN_TASK_SNIPPET = [
]
def stream_context_tar(topsrcdir, context_dir, out_file, image_name=None, args=None):
def stream_context_tar(topsrcdir, context_dir, out_file, args=None):
"""Like create_context_tar, but streams the tar file to the `out_file` file
object."""
archive_files = {}
@@ -201,7 +201,7 @@ def stream_context_tar(topsrcdir, context_dir, out_file, image_name=None, args=N
archive_files["Dockerfile"] = io.BytesIO("".join(content).encode("utf-8"))
writer = HashingWriter(out_file)
create_tar_gz_from_files(writer, archive_files, image_name)
create_tar_from_files(writer, archive_files)
return writer.hexdigest()

View File

@@ -15,7 +15,7 @@ def _quote(s):
not enclosed in quotes.
"""
if isinstance(s, int):
return "%d" % s
return f"{s}"
# Empty strings need to be quoted to have any significance
if s and not SHELL_QUOTE_RE.search(s) and not s.startswith("~"):

8
third_party/python/uv.lock generated vendored
View File

@@ -877,7 +877,7 @@ requires-dist = [
{ name = "six", specifier = "==1.16.0" },
{ name = "slugid", specifier = "==2.0.0" },
{ name = "taskcluster", specifier = "==75.0.1" },
{ name = "taskcluster-taskgraph", specifier = "~=12.1" },
{ name = "taskcluster-taskgraph", specifier = "~=12.2" },
{ name = "taskcluster-urls", specifier = "==13.0.1" },
{ name = "toml", specifier = "==0.10.2" },
{ name = "tomlkit", specifier = "==0.12.3" },
@@ -1377,7 +1377,7 @@ wheels = [
[[package]]
name = "taskcluster-taskgraph"
version = "12.1.0"
version = "12.2.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "appdirs" },
@@ -1391,9 +1391,9 @@ dependencies = [
{ name = "taskcluster-urls" },
{ name = "voluptuous" },
]
sdist = { url = "https://files.pythonhosted.org/packages/2a/f0/d7beb4940c92a6ca0b163e3d5eb5f435586a67e5f6d8924810a06563eb04/taskcluster_taskgraph-12.1.0.tar.gz", hash = "sha256:c37b0ff65ab6ae3ae322bf1ecd4d3453db01b37f22164d9a33ab77e634a34d9a", size = 375550 }
sdist = { url = "https://files.pythonhosted.org/packages/de/6c/cb9f1a02b1689a83d172cb9aa487b08a4eaeb9116a4a324a0fe969183369/taskcluster_taskgraph-12.2.0.tar.gz", hash = "sha256:b62d10e4d7deba9721a0d0d3766825cdcb6a8ea0f3e7fcb7d718931647709d20", size = 408949 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/73/83/f5ba327be8c2482721029bddf2188450d82d86435a1c5cbc353f2ad5c76b/taskcluster_taskgraph-12.1.0-py3-none-any.whl", hash = "sha256:222ba9f729e6d970de8c177251e3a5f29010332d7cc6ca8967fd8c8b73fa2c1b", size = 193953 },
{ url = "https://files.pythonhosted.org/packages/7a/7d/20388dd8919d541575ff0a8005f13c2e5c37d72841f82d986e564766fbcd/taskcluster_taskgraph-12.2.0-py3-none-any.whl", hash = "sha256:594cbfe1afd823b547d4ae9bfbb7700f43e03c856d6a02717764eb9bbb537f5f", size = 194633 },
]
[[package]]

View File

@@ -1 +1 @@
e7177b1ce1420e6ed494e8eeb66b09fc772ebdad5769f40561e4a1618eb3fc49
bf21ae5c163768025b5859d20f81d6a77266f999ab6ff009a8d5bf2a31c7356b