Bug 1715287 - [lint] Enable remaining pyupgrade rules and lint tree r=linter-reviewers,webdriver-reviewers,translations-reviewers,whimboo,sylvestre
Differential Revision: https://phabricator.services.mozilla.com/D245320
This commit is contained in:
@@ -8,7 +8,7 @@ import re
|
||||
|
||||
|
||||
def generate(relH, relIdl):
|
||||
input = open(relIdl, "rt").read()
|
||||
input = open(relIdl).read()
|
||||
relations = re.findall(
|
||||
r"const unsigned long RELATION_([A-Z_]+) = ([x0-9a-f]+);", input
|
||||
)
|
||||
|
||||
@@ -8,7 +8,7 @@ import re
|
||||
|
||||
|
||||
def generate(roleH, roleIdl):
|
||||
input = open(roleIdl, "rt").read()
|
||||
input = open(roleIdl).read()
|
||||
roles = re.findall(r"const unsigned long ROLE_([A-Z_]+) = (\d+);", input)
|
||||
|
||||
roleH.write(
|
||||
|
||||
@@ -27,9 +27,7 @@ if not options.version:
|
||||
# builds), but also so that newly-built older versions (e.g. beta build) aren't
|
||||
# considered "newer" than previously-built newer versions (e.g. a trunk nightly)
|
||||
|
||||
define, MOZ_BUILDID, buildid = (
|
||||
open(options.buildid, "r", encoding="utf-8").read().split()
|
||||
)
|
||||
define, MOZ_BUILDID, buildid = open(options.buildid, encoding="utf-8").read().split()
|
||||
|
||||
# extract only the major version (i.e. "14" from "14.0b1")
|
||||
majorVersion = re.match(r"^(\d+)[^\d].*", options.version).group(1)
|
||||
|
||||
@@ -10,7 +10,7 @@ from marionette_harness import MarionetteTestCase, WindowManagerMixin
|
||||
|
||||
|
||||
def inline(doc):
|
||||
return "data:text/html;charset=utf-8,{}".format(quote(doc))
|
||||
return f"data:text/html;charset=utf-8,{quote(doc)}"
|
||||
|
||||
|
||||
# Each list element represents a window of tabs loaded at
|
||||
@@ -441,9 +441,7 @@ class SessionStoreTestCase(WindowManagerMixin, MarionetteTestCase):
|
||||
self.assertEqual(
|
||||
current_windows_set,
|
||||
self.all_windows,
|
||||
msg="Not all requested windows have been opened. Expected {}, got {}.".format(
|
||||
self.all_windows, current_windows_set
|
||||
),
|
||||
msg=f"Not all requested windows have been opened. Expected {self.all_windows}, got {current_windows_set}.",
|
||||
)
|
||||
|
||||
self.marionette.quit(callback=lambda: self.simulate_os_shutdown())
|
||||
|
||||
@@ -13,9 +13,7 @@ from session_store_test_case import SessionStoreTestCase
|
||||
|
||||
|
||||
def inline(title):
|
||||
return "data:text/html;charset=utf-8,<html><head><title>{}</title></head><body></body></html>".format(
|
||||
title
|
||||
)
|
||||
return f"data:text/html;charset=utf-8,<html><head><title>{title}</title></head><body></body></html>"
|
||||
|
||||
|
||||
class TestSessionRestoreClosedTabs(SessionStoreTestCase):
|
||||
|
||||
@@ -8,7 +8,7 @@ from marionette_harness import MarionetteTestCase, WindowManagerMixin
|
||||
|
||||
|
||||
def inline(doc):
|
||||
return "data:text/html;charset=utf-8,{}".format(quote(doc))
|
||||
return f"data:text/html;charset=utf-8,{quote(doc)}"
|
||||
|
||||
|
||||
class TestRestoreLoadingPage(WindowManagerMixin, MarionetteTestCase):
|
||||
|
||||
@@ -12,9 +12,7 @@ from session_store_test_case import SessionStoreTestCase
|
||||
|
||||
|
||||
def inline(title):
|
||||
return "data:text/html;charset=utf-8,<html><head><title>{}</title></head><body></body></html>".format(
|
||||
title
|
||||
)
|
||||
return f"data:text/html;charset=utf-8,<html><head><title>{title}</title></head><body></body></html>"
|
||||
|
||||
|
||||
class TestSessionRestoreManually(SessionStoreTestCase):
|
||||
|
||||
@@ -14,7 +14,7 @@ from session_store_test_case import SessionStoreTestCase
|
||||
|
||||
|
||||
def inline(doc):
|
||||
return "data:text/html;charset=utf-8,{}".format(quote(doc))
|
||||
return f"data:text/html;charset=utf-8,{quote(doc)}"
|
||||
|
||||
|
||||
class TestSessionRestoreWithPinnedTabs(SessionStoreTestCase):
|
||||
|
||||
@@ -14,7 +14,7 @@ from session_store_test_case import SessionStoreTestCase
|
||||
|
||||
|
||||
def inline(doc):
|
||||
return "data:text/html;charset=utf-8,{}".format(quote(doc))
|
||||
return f"data:text/html;charset=utf-8,{quote(doc)}"
|
||||
|
||||
|
||||
class TestSessionRestoreWithTabGroups(SessionStoreTestCase):
|
||||
|
||||
@@ -12,9 +12,7 @@ from session_store_test_case import SessionStoreTestCase
|
||||
|
||||
|
||||
def inline(title):
|
||||
return "data:text/html;charset=utf-8,<html><head><title>{}</title></head><body></body></html>".format(
|
||||
title
|
||||
)
|
||||
return f"data:text/html;charset=utf-8,<html><head><title>{title}</title></head><body></body></html>"
|
||||
|
||||
|
||||
class TestSessionRestore(SessionStoreTestCase):
|
||||
|
||||
@@ -12,9 +12,7 @@ from session_store_test_case import SessionStoreTestCase
|
||||
|
||||
|
||||
def inline(title):
|
||||
return "data:text/html;charset=utf-8,<html><head><title>{}</title></head><body></body></html>".format(
|
||||
title
|
||||
)
|
||||
return f"data:text/html;charset=utf-8,<html><head><title>{title}</title></head><body></body></html>"
|
||||
|
||||
|
||||
class TestSessionRestore(SessionStoreTestCase):
|
||||
|
||||
@@ -12,9 +12,7 @@ from session_store_test_case import SessionStoreTestCase
|
||||
|
||||
|
||||
def inline(title):
|
||||
return "data:text/html;charset=utf-8,<html><head><title>{}</title></head><body></body></html>".format(
|
||||
title
|
||||
)
|
||||
return f"data:text/html;charset=utf-8,<html><head><title>{title}</title></head><body></body></html>"
|
||||
|
||||
|
||||
class TestSessionStoreEnabledAllWindows(SessionStoreTestCase):
|
||||
|
||||
@@ -14,9 +14,7 @@ from session_store_test_case import SessionStoreTestCase
|
||||
|
||||
|
||||
def inline(title):
|
||||
return "data:text/html;charset=utf-8,<html><head><title>{}</title></head><body></body></html>".format(
|
||||
title
|
||||
)
|
||||
return f"data:text/html;charset=utf-8,<html><head><title>{title}</title></head><body></body></html>"
|
||||
|
||||
|
||||
class TestManualRestoreWithTaskbarTabs(SessionStoreTestCase):
|
||||
|
||||
@@ -12,9 +12,7 @@ from session_store_test_case import SessionStoreTestCase
|
||||
|
||||
|
||||
def inline(title):
|
||||
return "data:text/html;charset=utf-8,<html><head><title>{}</title></head><body></body></html>".format(
|
||||
title
|
||||
)
|
||||
return f"data:text/html;charset=utf-8,<html><head><title>{title}</title></head><body></body></html>"
|
||||
|
||||
|
||||
class TestTaskbarTabSessionState(SessionStoreTestCase):
|
||||
|
||||
@@ -97,7 +97,7 @@ def build_storybook_manifest(command_context):
|
||||
config_environment = command_context.config_environment
|
||||
storybook_chrome_map_path = "browser/components/storybook/.storybook/chrome-map.js"
|
||||
chrome_map_path = mozpath.join(config_environment.topobjdir, "chrome-map.json")
|
||||
with open(chrome_map_path, "r") as chrome_map_f:
|
||||
with open(chrome_map_path) as chrome_map_f:
|
||||
with open(storybook_chrome_map_path, "w") as storybook_chrome_map_f:
|
||||
storybook_chrome_map_f.write("module.exports = ")
|
||||
storybook_chrome_map_f.write(chrome_map_f.read())
|
||||
|
||||
@@ -134,7 +134,7 @@ def updated_env(env):
|
||||
|
||||
def build_tar_package(name, base, directory):
|
||||
name = os.path.realpath(name)
|
||||
print("tarring {} from {}/{}".format(name, base, directory), file=sys.stderr)
|
||||
print(f"tarring {name} from {base}/{directory}", file=sys.stderr)
|
||||
assert name.endswith(".tar.zst")
|
||||
|
||||
cctx = zstandard.ZstdCompressor()
|
||||
@@ -559,16 +559,14 @@ def main():
|
||||
del config[key]
|
||||
elif type(old_value) is not type(value):
|
||||
raise Exception(
|
||||
"{} is overriding `{}` with a value of the wrong type".format(
|
||||
c.name, key
|
||||
)
|
||||
f"{c.name} is overriding `{key}` with a value of the wrong type"
|
||||
)
|
||||
elif isinstance(old_value, list):
|
||||
for v in value:
|
||||
if v not in old_value:
|
||||
old_value.append(v)
|
||||
elif isinstance(old_value, dict):
|
||||
raise Exception("{} is setting `{}` to a dict?".format(c.name, key))
|
||||
raise Exception(f"{c.name} is setting `{key}` to a dict?")
|
||||
else:
|
||||
config[key] = value
|
||||
|
||||
|
||||
@@ -77,7 +77,7 @@ clang_target_link_libraries(clangTidyMozillaModule
|
||||
|
||||
|
||||
def add_moz_module(cmake_path):
|
||||
with open(cmake_path, "r") as f:
|
||||
with open(cmake_path) as f:
|
||||
lines = f.readlines()
|
||||
f.close()
|
||||
|
||||
@@ -89,7 +89,7 @@ def add_moz_module(cmake_path):
|
||||
for line in lines:
|
||||
f.write(line)
|
||||
except ValueError:
|
||||
raise Exception("Unable to find ALL_CLANG_TIDY_CHECKS in {}".format(cmake_path))
|
||||
raise Exception(f"Unable to find ALL_CLANG_TIDY_CHECKS in {cmake_path}")
|
||||
|
||||
|
||||
def write_third_party_paths(mozilla_path, module_path):
|
||||
|
||||
@@ -30,7 +30,7 @@ generated_header = """
|
||||
"""
|
||||
|
||||
|
||||
class MozbuildWriter(object):
|
||||
class MozbuildWriter:
|
||||
def __init__(self, fh):
|
||||
self._fh = fh
|
||||
self.indent = ""
|
||||
@@ -745,7 +745,7 @@ def generate_gn_config(
|
||||
if preprocessor:
|
||||
preprocessor.main(gn_config_file)
|
||||
|
||||
with open(gn_config_file, "r") as fh:
|
||||
with open(gn_config_file) as fh:
|
||||
gn_out = json.load(fh)
|
||||
gn_out = filter_gn_config(
|
||||
resolved_tempdir, gn_out, sandbox_variables, input_variables, gn_target
|
||||
@@ -772,7 +772,7 @@ def main():
|
||||
if not gn_binary:
|
||||
raise Exception("The GN program must be present to generate GN configs.")
|
||||
|
||||
with open(args.config, "r") as fh:
|
||||
with open(args.config) as fh:
|
||||
config = json.load(fh)
|
||||
|
||||
topsrcdir = Path(__file__).parent.parent.resolve()
|
||||
|
||||
@@ -476,9 +476,7 @@ def _create_state_dir():
|
||||
if state_dir:
|
||||
if not os.path.exists(state_dir):
|
||||
print(
|
||||
"Creating global state directory from environment variable: {}".format(
|
||||
state_dir
|
||||
)
|
||||
f"Creating global state directory from environment variable: {state_dir}"
|
||||
)
|
||||
else:
|
||||
state_dir = os.path.expanduser("~/.mozbuild")
|
||||
@@ -486,7 +484,7 @@ def _create_state_dir():
|
||||
if not os.environ.get("MOZ_AUTOMATION"):
|
||||
print(STATE_DIR_FIRST_RUN.format(state_dir))
|
||||
|
||||
print("Creating default state directory: {}".format(state_dir))
|
||||
print(f"Creating default state directory: {state_dir}")
|
||||
|
||||
os.makedirs(state_dir, mode=0o770, exist_ok=True)
|
||||
return state_dir
|
||||
|
||||
@@ -114,7 +114,7 @@ def preprocess(base, input, flags):
|
||||
subprocess.run(command, stdout=open(preprocessed, "wb"), check=True)
|
||||
# Read the resulting file, and search for imports, that we'll want to
|
||||
# preprocess as well.
|
||||
with open(preprocessed, "r") as fh:
|
||||
with open(preprocessed) as fh:
|
||||
for line in fh:
|
||||
if not line.startswith("import"):
|
||||
continue
|
||||
@@ -211,7 +211,7 @@ def merge_dlldata(out, *inputs):
|
||||
# If for some reason, we don't get lines that are entirely different
|
||||
# from each other, we have some unexpected input.
|
||||
print(
|
||||
"Error while merging dlldata. Last lines read: {}".format(lines),
|
||||
f"Error while merging dlldata. Last lines read: {lines}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return 1
|
||||
|
||||
@@ -91,7 +91,7 @@ def writeCertspecForServerLocations(fd):
|
||||
fd.write(
|
||||
"issuer:printableString/CN=Temporary Certificate Authority/O=Mozilla Testing/OU=Profile Guided Optimization\n" # NOQA: E501
|
||||
)
|
||||
fd.write("subject:{}\n".format(SAN[0]))
|
||||
fd.write(f"subject:{SAN[0]}\n")
|
||||
fd.write("extension:subjectAlternativeName:{}\n".format(",".join(SAN)))
|
||||
|
||||
|
||||
@@ -100,7 +100,7 @@ def constructCertDatabase(build, srcDir):
|
||||
certutil = build.get_binary_path(what="certutil")
|
||||
pk12util = build.get_binary_path(what="pk12util")
|
||||
except BinaryNotFoundException as e:
|
||||
print("{}\n\n{}\n".format(e, e.help()))
|
||||
print(f"{e}\n\n{e.help()}\n")
|
||||
return 1
|
||||
openssl = shutil.which("openssl")
|
||||
pycert = os.path.join(build.topsrcdir, "security", "manager", "tools", "pycert.py")
|
||||
@@ -124,9 +124,7 @@ def constructCertDatabase(build, srcDir):
|
||||
# Write a certspec for the "server-locations.txt" file to that temporary directory
|
||||
pgoserver_certspec = os.path.join(pemfolder, "pgoserver.certspec")
|
||||
if os.path.exists(pgoserver_certspec):
|
||||
raise Exception(
|
||||
"{} already exists, which isn't allowed".format(pgoserver_certspec)
|
||||
)
|
||||
raise Exception(f"{pgoserver_certspec} already exists, which isn't allowed")
|
||||
with open(pgoserver_certspec, "w") as fd:
|
||||
writeCertspecForServerLocations(fd)
|
||||
|
||||
@@ -134,11 +132,11 @@ def constructCertDatabase(build, srcDir):
|
||||
for root, dirs, files in os.walk(pemfolder):
|
||||
for certspec in [i for i in files if i.endswith(".certspec")]:
|
||||
name = certspec.split(".certspec")[0]
|
||||
pem = os.path.join(pemfolder, "{}.cert.pem".format(name))
|
||||
pem = os.path.join(pemfolder, f"{name}.cert.pem")
|
||||
|
||||
print("Generating public certificate {} (pem={})".format(name, pem))
|
||||
print(f"Generating public certificate {name} (pem={pem})")
|
||||
|
||||
with open(os.path.join(root, certspec), "r") as certspec_file:
|
||||
with open(os.path.join(root, certspec)) as certspec_file:
|
||||
certspec_data = certspec_file.read()
|
||||
with open(pem, "w") as pem_file:
|
||||
status = runUtil(
|
||||
@@ -172,14 +170,14 @@ def constructCertDatabase(build, srcDir):
|
||||
key_type = parts[1]
|
||||
if key_type not in ["ca", "client", "server"]:
|
||||
raise Exception(
|
||||
"{}: keyspec filenames must be of the form XXX.client.keyspec "
|
||||
"or XXX.ca.keyspec (key_type={})".format(keyspec, key_type)
|
||||
f"{keyspec}: keyspec filenames must be of the form XXX.client.keyspec "
|
||||
f"or XXX.ca.keyspec (key_type={key_type})"
|
||||
)
|
||||
key_pem = os.path.join(pemfolder, "{}.key.pem".format(name))
|
||||
key_pem = os.path.join(pemfolder, f"{name}.key.pem")
|
||||
|
||||
print("Generating private key {} (pem={})".format(name, key_pem))
|
||||
print(f"Generating private key {name} (pem={key_pem})")
|
||||
|
||||
with open(os.path.join(root, keyspec), "r") as keyspec_file:
|
||||
with open(os.path.join(root, keyspec)) as keyspec_file:
|
||||
keyspec_data = keyspec_file.read()
|
||||
with open(key_pem, "w") as pem_file:
|
||||
status = runUtil(
|
||||
@@ -188,17 +186,15 @@ def constructCertDatabase(build, srcDir):
|
||||
if status:
|
||||
return status
|
||||
|
||||
cert_pem = os.path.join(pemfolder, "{}.cert.pem".format(name))
|
||||
cert_pem = os.path.join(pemfolder, f"{name}.cert.pem")
|
||||
if not os.path.exists(cert_pem):
|
||||
raise Exception(
|
||||
"There has to be a corresponding certificate named {} for "
|
||||
"the keyspec {}".format(cert_pem, keyspec)
|
||||
f"There has to be a corresponding certificate named {cert_pem} for "
|
||||
f"the keyspec {keyspec}"
|
||||
)
|
||||
|
||||
p12 = os.path.join(pemfolder, "{}.key.p12".format(name))
|
||||
print(
|
||||
"Converting private key {} to PKCS12 (p12={})".format(key_pem, p12)
|
||||
)
|
||||
p12 = os.path.join(pemfolder, f"{name}.key.p12")
|
||||
print(f"Converting private key {key_pem} to PKCS12 (p12={p12})")
|
||||
status = runUtil(
|
||||
openssl,
|
||||
[
|
||||
@@ -219,7 +215,7 @@ def constructCertDatabase(build, srcDir):
|
||||
if status:
|
||||
return status
|
||||
|
||||
print("Importing private key {} to database".format(key_pem))
|
||||
print(f"Importing private key {key_pem} to database")
|
||||
status = runUtil(
|
||||
pk12util,
|
||||
["-i", p12, "-d", srcDir, "-w", pwfile.name, "-k", pwfile.name],
|
||||
@@ -228,16 +224,14 @@ def constructCertDatabase(build, srcDir):
|
||||
return status
|
||||
|
||||
if key_type == "ca":
|
||||
shutil.copyfile(
|
||||
cert_pem, os.path.join(srcDir, "{}.ca".format(name))
|
||||
)
|
||||
shutil.copyfile(cert_pem, os.path.join(srcDir, f"{name}.ca"))
|
||||
elif key_type == "client":
|
||||
shutil.copyfile(p12, os.path.join(srcDir, "{}.client".format(name)))
|
||||
shutil.copyfile(p12, os.path.join(srcDir, f"{name}.client"))
|
||||
elif key_type == "server":
|
||||
pass # Nothing to do for server keys
|
||||
else:
|
||||
raise Exception(
|
||||
"State error: Unknown keyspec key_type: {}".format(key_type)
|
||||
f"State error: Unknown keyspec key_type: {key_type}"
|
||||
)
|
||||
|
||||
return 0
|
||||
|
||||
@@ -71,7 +71,7 @@ if __name__ == "__main__":
|
||||
try:
|
||||
binary = build.get_binary_path(where="staged-package")
|
||||
except BinaryNotFoundException as e:
|
||||
print("{}\n\n{}\n".format(e, e.help()))
|
||||
print(f"{e}\n\n{e.help()}\n")
|
||||
sys.exit(1)
|
||||
binary = os.path.normpath(os.path.abspath(binary))
|
||||
|
||||
@@ -104,7 +104,7 @@ if __name__ == "__main__":
|
||||
with TemporaryDirectory() as profilePath:
|
||||
# TODO: refactor this into mozprofile
|
||||
profile_data_dir = os.path.join(build.topsrcdir, "testing", "profiles")
|
||||
with open(os.path.join(profile_data_dir, "profiles.json"), "r") as fh:
|
||||
with open(os.path.join(profile_data_dir, "profiles.json")) as fh:
|
||||
base_profiles = json.load(fh)["profileserver"]
|
||||
|
||||
prefpaths = [
|
||||
@@ -215,7 +215,7 @@ if __name__ == "__main__":
|
||||
with open(log) as f:
|
||||
for line in f.readlines():
|
||||
if "LLVM Profile Error" in line:
|
||||
print("Error [{}]: '{}'".format(log, line.strip()))
|
||||
print(f"Error [{log}]: '{line.strip()}'")
|
||||
should_err = True
|
||||
|
||||
if should_err:
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
|
||||
|
||||
def copy(out_file, in_path):
|
||||
with open(in_path, "r") as fh:
|
||||
with open(in_path) as fh:
|
||||
out_file.write(fh.read())
|
||||
|
||||
@@ -77,14 +77,10 @@ def upload_worker(queue, event, bucket, session_args):
|
||||
"ContentEncoding": "gzip",
|
||||
"ContentType": "text/plain",
|
||||
}
|
||||
log.info(
|
||||
'Uploading "{}" ({} bytes)'.format(pathname, len(compressed.getvalue()))
|
||||
)
|
||||
log.info(f'Uploading "{pathname}" ({len(compressed.getvalue())} bytes)')
|
||||
with timed() as elapsed:
|
||||
s3.upload_fileobj(compressed, bucket, pathname, ExtraArgs=extra_args)
|
||||
log.info(
|
||||
'Finished uploading "{}" in {:0.3f}s'.format(pathname, elapsed())
|
||||
)
|
||||
log.info(f'Finished uploading "{pathname}" in {elapsed():0.3f}s')
|
||||
queue.task_done()
|
||||
except Exception:
|
||||
log.exception("Thread encountered exception:")
|
||||
@@ -104,9 +100,7 @@ def do_work(artifact, region, bucket):
|
||||
secrets_url = "http://taskcluster/secrets/v1/secret/project/releng/gecko/build/level-{}/gecko-generated-sources-upload".format( # noqa
|
||||
level
|
||||
)
|
||||
log.info(
|
||||
'Using AWS credentials from the secrets service: "{}"'.format(secrets_url)
|
||||
)
|
||||
log.info(f'Using AWS credentials from the secrets service: "{secrets_url}"')
|
||||
res = session.get(secrets_url)
|
||||
res.raise_for_status()
|
||||
secret = res.json()
|
||||
@@ -118,19 +112,17 @@ def do_work(artifact, region, bucket):
|
||||
log.info("Trying to use your AWS credentials..")
|
||||
|
||||
# First, fetch the artifact containing the sources.
|
||||
log.info('Fetching generated sources artifact: "{}"'.format(artifact))
|
||||
log.info(f'Fetching generated sources artifact: "{artifact}"')
|
||||
with timed() as elapsed:
|
||||
res = session.get(artifact)
|
||||
log.info(
|
||||
"Fetch HTTP status: {}, {} bytes downloaded in {:0.3f}s".format(
|
||||
res.status_code, len(res.content), elapsed()
|
||||
)
|
||||
f"Fetch HTTP status: {res.status_code}, {len(res.content)} bytes downloaded in {elapsed():0.3f}s"
|
||||
)
|
||||
res.raise_for_status()
|
||||
# Create a queue and worker threads for uploading.
|
||||
q = Queue()
|
||||
event = Event()
|
||||
log.info("Creating {} worker threads".format(NUM_WORKER_THREADS))
|
||||
log.info(f"Creating {NUM_WORKER_THREADS} worker threads")
|
||||
for i in range(NUM_WORKER_THREADS):
|
||||
t = Thread(target=upload_worker, args=(q, event, bucket, session_args))
|
||||
t.daemon = True
|
||||
@@ -140,7 +132,7 @@ def do_work(artifact, region, bucket):
|
||||
for entry in tar:
|
||||
if event.is_set():
|
||||
break
|
||||
log.info('Queueing "{}"'.format(entry.name))
|
||||
log.info(f'Queueing "{entry.name}"')
|
||||
q.put((entry.name, tar.extractfile(entry).read()))
|
||||
# Wait until all uploads are finished.
|
||||
# We don't use q.join() here because we want to also monitor event.
|
||||
@@ -161,7 +153,7 @@ def main(argv):
|
||||
|
||||
with timed() as elapsed:
|
||||
do_work(region=region, bucket=bucket, artifact=args.artifact)
|
||||
log.info("Finished in {:.03f}s".format(elapsed()))
|
||||
log.info(f"Finished in {elapsed():.03f}s")
|
||||
return 0
|
||||
|
||||
|
||||
|
||||
@@ -59,7 +59,7 @@ def valgrind_test(command_context, suppressions):
|
||||
profile_data_dir = os.path.join(
|
||||
command_context.topsrcdir, "testing", "profiles"
|
||||
)
|
||||
with open(os.path.join(profile_data_dir, "profiles.json"), "r") as fh:
|
||||
with open(os.path.join(profile_data_dir, "profiles.json")) as fh:
|
||||
base_profiles = json.load(fh)["valgrind"]
|
||||
|
||||
prefpaths = [
|
||||
|
||||
@@ -6,7 +6,7 @@ import logging
|
||||
import re
|
||||
|
||||
|
||||
class OutputHandler(object):
|
||||
class OutputHandler:
|
||||
"""
|
||||
A class for handling Valgrind output.
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ def get_buildid():
|
||||
import buildconfig
|
||||
|
||||
path = os.path.join(buildconfig.topobjdir, "buildid.h")
|
||||
_define, _MOZ_BUILDID, buildid = open(path, "r", encoding="utf-8").read().split()
|
||||
_define, _MOZ_BUILDID, buildid = open(path, encoding="utf-8").read().split()
|
||||
return buildid
|
||||
|
||||
|
||||
|
||||
@@ -74,7 +74,7 @@ def toggle_trailing_blank_line(depname):
|
||||
|
||||
|
||||
def get_trailing_blank_line_state(depname):
|
||||
lines = open(depname, "r").readlines()
|
||||
lines = open(depname).readlines()
|
||||
if not lines:
|
||||
print("unexpected short file", file=sys.stderr)
|
||||
return "no blank line"
|
||||
|
||||
@@ -25,11 +25,11 @@ ignore_files = [
|
||||
|
||||
|
||||
def log_pass(filename, text):
|
||||
print("TEST-PASS | {} | {} | {}".format(scriptname, filename, text))
|
||||
print(f"TEST-PASS | {scriptname} | {filename} | {text}")
|
||||
|
||||
|
||||
def log_fail(filename, text):
|
||||
print("TEST-UNEXPECTED-FAIL | {} | {} | {}".format(scriptname, filename, text))
|
||||
print(f"TEST-UNEXPECTED-FAIL | {scriptname} | {filename} | {text}")
|
||||
|
||||
|
||||
def check_single_file(filename):
|
||||
@@ -38,7 +38,7 @@ def check_single_file(filename):
|
||||
try:
|
||||
data.decode(expected_encoding)
|
||||
except Exception:
|
||||
log_fail(filename, "not in {} encoding".format(expected_encoding))
|
||||
log_fail(filename, f"not in {expected_encoding} encoding")
|
||||
|
||||
log_pass(filename, "ok")
|
||||
return True
|
||||
|
||||
@@ -15,11 +15,11 @@ topsrcdir = os.path.dirname(os.path.dirname(__file__))
|
||||
|
||||
|
||||
def log_pass(text):
|
||||
print("TEST-PASS | {} | {}".format(scriptname, text))
|
||||
print(f"TEST-PASS | {scriptname} | {text}")
|
||||
|
||||
|
||||
def log_fail(text):
|
||||
print("TEST-UNEXPECTED-FAIL | {} | {}".format(scriptname, text))
|
||||
print(f"TEST-UNEXPECTED-FAIL | {scriptname} | {text}")
|
||||
|
||||
|
||||
def check_opcode():
|
||||
|
||||
@@ -30,34 +30,28 @@ for f in files:
|
||||
details[f] = num
|
||||
|
||||
if count == expected_count:
|
||||
print(
|
||||
"TEST-PASS | check_source_count.py {0} | {1}".format(
|
||||
search_string, expected_count
|
||||
)
|
||||
)
|
||||
print(f"TEST-PASS | check_source_count.py {search_string} | {expected_count}")
|
||||
|
||||
else:
|
||||
print(
|
||||
"TEST-UNEXPECTED-FAIL | check_source_count.py {0} | ".format(search_string),
|
||||
f"TEST-UNEXPECTED-FAIL | check_source_count.py {search_string} | ",
|
||||
end="",
|
||||
)
|
||||
if count < expected_count:
|
||||
print(
|
||||
"There are fewer occurrences of /{0}/ than expected. "
|
||||
f"There are fewer occurrences of /{search_string}/ than expected. "
|
||||
"This may mean that you have removed some, but forgotten to "
|
||||
"account for it {1}.".format(search_string, error_location)
|
||||
f"account for it {error_location}."
|
||||
)
|
||||
else:
|
||||
print(
|
||||
"There are more occurrences of /{0}/ than expected. We're trying "
|
||||
"to prevent an increase in the number of {1}'s, using {2} if "
|
||||
f"There are more occurrences of /{search_string}/ than expected. We're trying "
|
||||
f"to prevent an increase in the number of {search_string}'s, using {replacement} if "
|
||||
"possible. If it is unavoidable, you should update the expected "
|
||||
"count {3}.".format(
|
||||
search_string, search_string, replacement, error_location
|
||||
)
|
||||
f"count {error_location}."
|
||||
)
|
||||
|
||||
print("Expected: {0}; found: {1}".format(expected_count, count))
|
||||
print(f"Expected: {expected_count}; found: {count}")
|
||||
for k in sorted(details):
|
||||
print("Found {0} occurences in {1}".format(details[k], k))
|
||||
print(f"Found {details[k]} occurences in {k}")
|
||||
sys.exit(-1)
|
||||
|
||||
@@ -254,7 +254,7 @@ def error(filename, linenum, *lines):
|
||||
out("")
|
||||
|
||||
|
||||
class FileKind(object):
|
||||
class FileKind:
|
||||
C = 1
|
||||
CPP = 2
|
||||
INL_H = 3
|
||||
@@ -436,7 +436,7 @@ def is_module_header(enclosing_inclname, header_inclname):
|
||||
return False
|
||||
|
||||
|
||||
class Include(object):
|
||||
class Include:
|
||||
"""Important information for a single #include statement."""
|
||||
|
||||
def __init__(self, include_prefix, inclname, line_suffix, linenum, is_system):
|
||||
@@ -503,7 +503,7 @@ class Include(object):
|
||||
return self.include_prefix + self.quote() + self.line_suffix + "\n"
|
||||
|
||||
|
||||
class CppBlock(object):
|
||||
class CppBlock:
|
||||
"""C preprocessor block: a whole file or a single #if/#elif/#else block.
|
||||
|
||||
A #if/#endif block is the contents of a #if/#endif (or similar) section.
|
||||
@@ -620,7 +620,7 @@ class CppBlock(object):
|
||||
return self.start + "".join(kid.to_source() for kid in self.kids) + self.end
|
||||
|
||||
|
||||
class OrdinaryCode(object):
|
||||
class OrdinaryCode:
|
||||
"""A list of lines of code that aren't #include/#if/#else/#endif lines."""
|
||||
|
||||
def __init__(self, lines=None):
|
||||
|
||||
@@ -80,7 +80,7 @@ def preprocess(path, defines):
|
||||
pp.context.update(defines)
|
||||
pp.out = io.StringIO()
|
||||
pp.do_filter("substitution")
|
||||
pp.do_include(open(path, "r", encoding="latin1"))
|
||||
pp.do_include(open(path, encoding="latin1"))
|
||||
pp.out.seek(0)
|
||||
return pp.out
|
||||
|
||||
@@ -265,9 +265,7 @@ def generate_module_rc(binary="", rcinclude=None):
|
||||
overrides = {}
|
||||
|
||||
if rcinclude:
|
||||
include = "// From included resource {}\n{}".format(
|
||||
rcinclude, preprocess(rcinclude, defines).read()
|
||||
)
|
||||
include = f"// From included resource {rcinclude}\n{preprocess(rcinclude, defines).read()}"
|
||||
else:
|
||||
include = ""
|
||||
|
||||
@@ -303,7 +301,7 @@ def generate_module_rc(binary="", rcinclude=None):
|
||||
manifest_path = os.path.join(srcdir, binary + ".manifest")
|
||||
if os.path.exists(manifest_path):
|
||||
manifest_path = manifest_path.replace("\\", "\\\\")
|
||||
data += '\n{} RT_MANIFEST "{}"\n'.format(manifest_id, manifest_path)
|
||||
data += f'\n{manifest_id} RT_MANIFEST "{manifest_path}"\n'
|
||||
|
||||
with open("{}.rc".format(binary or "module"), "w", encoding="latin1") as fh:
|
||||
fh.write(data)
|
||||
|
||||
@@ -60,7 +60,7 @@ def generate_precomplete(root_path):
|
||||
precomplete_file_path = os.path.join(root_path, rel_path_precomplete)
|
||||
# Open the file so it exists before building the list of files and open it
|
||||
# in binary mode to prevent OS specific line endings.
|
||||
precomplete_file = open(precomplete_file_path, mode="wt", newline="\n")
|
||||
precomplete_file = open(precomplete_file_path, mode="w", newline="\n")
|
||||
rel_file_path_list, rel_dir_path_list = get_build_entries(root_path)
|
||||
for rel_file_path in rel_file_path_list:
|
||||
precomplete_file.write('remove "' + rel_file_path + '"\n')
|
||||
|
||||
@@ -110,7 +110,7 @@ replaced_variables = """
|
||||
|
||||
|
||||
def generate_config(output, config_h_in):
|
||||
file_config_h_in = open(config_h_in, "r")
|
||||
file_config_h_in = open(config_h_in)
|
||||
lines = file_config_h_in.readlines()
|
||||
|
||||
# Remove the known cmake variables
|
||||
|
||||
@@ -11,7 +11,7 @@ from mozbuild.util import FileAvoidWrite
|
||||
|
||||
|
||||
def gen_wrappers(unused, template_file, outdir, compiler, *header_list):
|
||||
template = open(template_file, "r").read()
|
||||
template = open(template_file).read()
|
||||
|
||||
for header in header_list:
|
||||
with FileAvoidWrite(os.path.join(outdir, header)) as f:
|
||||
|
||||
@@ -18,7 +18,7 @@ decl_re = re.compile(
|
||||
|
||||
def read_decls(filename):
|
||||
"""Parse & yield C-style decls from an input file"""
|
||||
with open(filename, "r") as fd:
|
||||
with open(filename) as fd:
|
||||
# Strip comments from the source text.
|
||||
text = comment_re.sub("", fd.read())
|
||||
|
||||
@@ -37,7 +37,7 @@ def read_decls(filename):
|
||||
|
||||
def generate(fd, consts_path, unicodes_path, template_path, compiler):
|
||||
# Parse the template
|
||||
with open(template_path, "r") as template_fd:
|
||||
with open(template_path) as template_fd:
|
||||
template = string.Template(template_fd.read())
|
||||
|
||||
decls = ""
|
||||
@@ -49,15 +49,13 @@ def generate(fd, consts_path, unicodes_path, template_path, compiler):
|
||||
assert args is None, "parameters in const decl!"
|
||||
|
||||
decls += textwrap.dedent(
|
||||
"""
|
||||
f"""
|
||||
#ifdef {name}
|
||||
constexpr {ty} _tmp_{name} = {name};
|
||||
#undef {name}
|
||||
constexpr {ty} {name} = _tmp_{name};
|
||||
#endif
|
||||
""".format(
|
||||
ty=ty, name=name
|
||||
)
|
||||
"""
|
||||
)
|
||||
|
||||
# Each unicode declaration defines a static inline function with the
|
||||
@@ -72,7 +70,7 @@ def generate(fd, consts_path, unicodes_path, template_path, compiler):
|
||||
args = ", ".join("a%d" % i for i in range(len(args)))
|
||||
|
||||
decls += textwrap.dedent(
|
||||
"""
|
||||
f"""
|
||||
#ifdef {name}
|
||||
#undef {name}
|
||||
static inline {ty} WINAPI
|
||||
@@ -85,9 +83,7 @@ def generate(fd, consts_path, unicodes_path, template_path, compiler):
|
||||
= delete;
|
||||
#endif
|
||||
#endif
|
||||
""".format(
|
||||
ty=ty, name=name, params=params, args=args
|
||||
)
|
||||
"""
|
||||
)
|
||||
|
||||
# Write out the resulting file
|
||||
|
||||
@@ -80,13 +80,13 @@ class _MozTestResult(_TestResult):
|
||||
def addError(self, test, err):
|
||||
_TestResult.addError(self, test, err)
|
||||
self.printFail(test, err)
|
||||
self.stream.writeln("ERROR: {0}".format(self.getDescription(test)))
|
||||
self.stream.writeln(f"ERROR: {self.getDescription(test)}")
|
||||
self.stream.writeln(self.errors[-1][1])
|
||||
|
||||
def addFailure(self, test, err):
|
||||
_TestResult.addFailure(self, test, err)
|
||||
self.printFail(test, err)
|
||||
self.stream.writeln("FAIL: {0}".format(self.getDescription(test)))
|
||||
self.stream.writeln(f"FAIL: {self.getDescription(test)}")
|
||||
self.stream.writeln(self.failures[-1][1])
|
||||
|
||||
def printFail(self, test, err):
|
||||
@@ -99,7 +99,7 @@ class _MozTestResult(_TestResult):
|
||||
tb = tb.tb_next
|
||||
if tb:
|
||||
_, ln, _ = inspect.getframeinfo(tb)[:3]
|
||||
message = "line {0}: {1}".format(ln, message)
|
||||
message = f"line {ln}: {message}"
|
||||
self.printStatus("TEST-UNEXPECTED-FAIL", test, message)
|
||||
|
||||
|
||||
@@ -151,7 +151,7 @@ def normcase(path):
|
||||
return path
|
||||
|
||||
|
||||
class _MockBaseOpen(object):
|
||||
class _MockBaseOpen:
|
||||
"""Callable that acts like the open() function; see MockedOpen for more
|
||||
info.
|
||||
"""
|
||||
@@ -221,7 +221,7 @@ class _MockOpen(_MockBaseOpen):
|
||||
return MockedStringFile(self, name, content)
|
||||
|
||||
|
||||
class MockedOpen(object):
|
||||
class MockedOpen:
|
||||
"""
|
||||
Context manager diverting the open builtin such that opening files
|
||||
can open "virtual" file instances given when creating a MockedOpen.
|
||||
|
||||
@@ -42,7 +42,7 @@ def _nsinstall_internal(argv):
|
||||
# The remaining arguments are not used in our tree, thus they're not
|
||||
# implented.
|
||||
def BadArg(option, opt, value, parser):
|
||||
parser.error("option not supported: {0}".format(opt))
|
||||
parser.error(f"option not supported: {opt}")
|
||||
|
||||
p.add_option(
|
||||
"-C", action="callback", metavar="CWD", callback=BadArg, help="NOT SUPPORTED"
|
||||
@@ -69,7 +69,7 @@ def _nsinstall_internal(argv):
|
||||
try:
|
||||
options.m = int(options.m, 8)
|
||||
except Exception:
|
||||
sys.stderr.write("nsinstall: {0} is not a valid mode\n".format(options.m))
|
||||
sys.stderr.write(f"nsinstall: {options.m} is not a valid mode\n")
|
||||
return 1
|
||||
|
||||
# just create one directory?
|
||||
@@ -77,7 +77,7 @@ def _nsinstall_internal(argv):
|
||||
dir = os.path.abspath(dir)
|
||||
if os.path.exists(dir):
|
||||
if not os.path.isdir(dir):
|
||||
print("nsinstall: {0} is not a directory".format(dir), file=sys.stderr)
|
||||
print(f"nsinstall: {dir} is not a directory", file=sys.stderr)
|
||||
return 1
|
||||
if mode:
|
||||
os.chmod(dir, mode)
|
||||
@@ -92,7 +92,7 @@ def _nsinstall_internal(argv):
|
||||
# We might have hit EEXIST due to a race condition (see bug 463411) -- try again once
|
||||
if try_again:
|
||||
return maybe_create_dir(dir, mode, False)
|
||||
print("nsinstall: failed to create directory {0}: {1}".format(dir, e))
|
||||
print(f"nsinstall: failed to create directory {dir}: {e}")
|
||||
return 1
|
||||
else:
|
||||
return 0
|
||||
|
||||
@@ -25,26 +25,26 @@ class TestMozUnit(unittest.TestCase):
|
||||
self.assertFalse(os.path.exists("foo/file1"))
|
||||
|
||||
# Check the contents of the files given at MockedOpen creation.
|
||||
self.assertEqual(open("file1", "r").read(), "content1")
|
||||
self.assertEqual(open("file2", "r").read(), "content2")
|
||||
self.assertEqual(open("file1").read(), "content1")
|
||||
self.assertEqual(open("file2").read(), "content2")
|
||||
|
||||
# Check that overwriting these files alters their content.
|
||||
with open("file1", "w") as file:
|
||||
file.write("foo")
|
||||
self.assertTrue(os.path.exists("file1"))
|
||||
self.assertEqual(open("file1", "r").read(), "foo")
|
||||
self.assertEqual(open("file1").read(), "foo")
|
||||
|
||||
# ... but not until the file is closed.
|
||||
file = open("file2", "w")
|
||||
file.write("bar")
|
||||
self.assertEqual(open("file2", "r").read(), "content2")
|
||||
self.assertEqual(open("file2").read(), "content2")
|
||||
file.close()
|
||||
self.assertEqual(open("file2", "r").read(), "bar")
|
||||
self.assertEqual(open("file2").read(), "bar")
|
||||
|
||||
# Check that appending to a file does append
|
||||
with open("file1", "a") as file:
|
||||
file.write("bar")
|
||||
self.assertEqual(open("file1", "r").read(), "foobar")
|
||||
self.assertEqual(open("file1").read(), "foobar")
|
||||
|
||||
self.assertFalse(os.path.exists("file3"))
|
||||
|
||||
@@ -55,27 +55,27 @@ class TestMozUnit(unittest.TestCase):
|
||||
# Check that writing a new file does create the file.
|
||||
with open("file3", "w") as file:
|
||||
file.write("baz")
|
||||
self.assertEqual(open("file3", "r").read(), "baz")
|
||||
self.assertEqual(open("file3").read(), "baz")
|
||||
self.assertTrue(os.path.exists("file3"))
|
||||
|
||||
# Check the content of the file created outside MockedOpen.
|
||||
self.assertEqual(open(path, "r").read(), "foobar")
|
||||
self.assertEqual(open(path).read(), "foobar")
|
||||
|
||||
# Check that overwriting a file existing on the file system
|
||||
# does modify its content.
|
||||
with open(path, "w") as file:
|
||||
file.write("bazqux")
|
||||
self.assertEqual(open(path, "r").read(), "bazqux")
|
||||
self.assertEqual(open(path).read(), "bazqux")
|
||||
|
||||
with MockedOpen():
|
||||
# Check that appending to a file existing on the file system
|
||||
# does modify its content.
|
||||
with open(path, "a") as file:
|
||||
file.write("bazqux")
|
||||
self.assertEqual(open(path, "r").read(), "foobarbazqux")
|
||||
self.assertEqual(open(path).read(), "foobarbazqux")
|
||||
|
||||
# Check that the file was not actually modified on the file system.
|
||||
self.assertEqual(open(path, "r").read(), "foobar")
|
||||
self.assertEqual(open(path).read(), "foobar")
|
||||
os.remove(path)
|
||||
|
||||
# Check that the file created inside MockedOpen wasn't actually
|
||||
|
||||
@@ -120,9 +120,7 @@ class TestNsinstall(unittest.TestCase):
|
||||
mode = 0o600
|
||||
os.chmod(testfile, mode)
|
||||
testdir = self.mkdirs("testdir")
|
||||
self.assertEqual(
|
||||
nsinstall(["-m", "{0:04o}".format(mode), testfile, testdir]), 0
|
||||
)
|
||||
self.assertEqual(nsinstall(["-m", f"{mode:04o}", testfile, testdir]), 0)
|
||||
destfile = os.path.join(testdir, "testfile")
|
||||
self.assertTrue(os.path.isfile(destfile))
|
||||
self.assertEqual(os.stat(testfile).st_mode, os.stat(destfile).st_mode)
|
||||
|
||||
@@ -197,7 +197,7 @@ allfiles = reduce(
|
||||
)
|
||||
|
||||
for first in allfiles:
|
||||
testbasename = "test{0}_".format(getid(first))
|
||||
testbasename = f"test{getid(first)}_"
|
||||
test = [None, "_write" + getid(first), None]
|
||||
for second in atomics:
|
||||
test[0] = testbasename + getid([second])
|
||||
|
||||
@@ -45,7 +45,7 @@ else:
|
||||
|
||||
def main(argv):
|
||||
# Check for CRLF line endings.
|
||||
with open(__file__, "r") as fh:
|
||||
with open(__file__) as fh:
|
||||
data = fh.read()
|
||||
if "\r" in data:
|
||||
print(
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
|
||||
# Put the content of `filenames[0]` file into `output` file pointer
|
||||
def main(output, *filenames):
|
||||
with open(filenames[0], "r", encoding="utf-8") as f:
|
||||
with open(filenames[0], encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
output.write(content)
|
||||
|
||||
@@ -7,7 +7,7 @@ import re
|
||||
|
||||
def read_conf(conf_filename):
|
||||
# Can't read/write from a single StringIO, so make a new one for reading.
|
||||
stream = open(conf_filename, "r")
|
||||
stream = open(conf_filename)
|
||||
|
||||
def parse_counters(stream):
|
||||
for line_num, full_line in enumerate(stream):
|
||||
|
||||
@@ -67,7 +67,7 @@ class WebIDLPool:
|
||||
return WebIDLPool.GeneratorState._generate_build_files_for_webidl(filename)
|
||||
|
||||
|
||||
class BuildResult(object):
|
||||
class BuildResult:
|
||||
"""Represents the result of processing WebIDL files.
|
||||
|
||||
This holds a summary of output file generation during code generation.
|
||||
@@ -283,7 +283,7 @@ class WebIDLCodegenManager(LoggingMixin):
|
||||
self._state = WebIDLCodegenManagerState()
|
||||
|
||||
if os.path.exists(state_path):
|
||||
with open(state_path, "r") as fh:
|
||||
with open(state_path) as fh:
|
||||
try:
|
||||
self._state = WebIDLCodegenManagerState(fh=fh)
|
||||
except Exception as e:
|
||||
@@ -442,7 +442,7 @@ class WebIDLCodegenManager(LoggingMixin):
|
||||
parser = WebIDL.Parser(self._cache_dir, lexer=None)
|
||||
|
||||
for path in sorted(self._input_paths):
|
||||
with open(path, "r", encoding="utf-8") as fh:
|
||||
with open(path, encoding="utf-8") as fh:
|
||||
data = fh.read()
|
||||
hashes[path] = hashlib.sha1(data.encode()).hexdigest()
|
||||
parser.parse(data, path)
|
||||
@@ -716,7 +716,7 @@ def create_build_system_manager(topsrcdir=None, topobjdir=None, dist_dir=None):
|
||||
obj_dir = os.path.join(topobjdir, "dom", "bindings")
|
||||
webidl_root = os.path.join(topsrcdir, "dom", "webidl")
|
||||
|
||||
with open(os.path.join(obj_dir, "file-lists.json"), "r") as fh:
|
||||
with open(os.path.join(obj_dir, "file-lists.json")) as fh:
|
||||
files = json.load(fh)
|
||||
|
||||
inputs = (
|
||||
|
||||
@@ -126,7 +126,7 @@ class TestWebIDLCodegenManager(unittest.TestCase):
|
||||
|
||||
self.assertTrue(os.path.isfile(manager._state_path))
|
||||
|
||||
with open(manager._state_path, "r") as fh:
|
||||
with open(manager._state_path) as fh:
|
||||
state = json.load(fh)
|
||||
self.assertEqual(state["version"], 3)
|
||||
self.assertIn("webidls", state)
|
||||
@@ -192,7 +192,7 @@ class TestWebIDLCodegenManager(unittest.TestCase):
|
||||
break
|
||||
|
||||
self.assertIsNotNone(child_path)
|
||||
child_content = open(child_path, "r").read()
|
||||
child_content = open(child_path).read()
|
||||
|
||||
with MockedOpen({child_path: child_content + "\n/* */"}):
|
||||
m2 = WebIDLCodegenManager(**args)
|
||||
@@ -216,7 +216,7 @@ class TestWebIDLCodegenManager(unittest.TestCase):
|
||||
child_path = p
|
||||
|
||||
self.assertIsNotNone(parent_path)
|
||||
parent_content = open(parent_path, "r").read()
|
||||
parent_content = open(parent_path).read()
|
||||
|
||||
with MockedOpen({parent_path: parent_content + "\n/* */"}):
|
||||
m2 = WebIDLCodegenManager(**args)
|
||||
@@ -253,7 +253,7 @@ class TestWebIDLCodegenManager(unittest.TestCase):
|
||||
result = m1.generate_build_files(processes=1)
|
||||
l = len(result.inputs)
|
||||
|
||||
with open(fake_path, "wt", newline="\n") as fh:
|
||||
with open(fake_path, "w", newline="\n") as fh:
|
||||
fh.write("# Modified content")
|
||||
|
||||
m2 = WebIDLCodegenManager(**args)
|
||||
|
||||
@@ -12,7 +12,7 @@ import traceback
|
||||
import WebIDL
|
||||
|
||||
|
||||
class TestHarness(object):
|
||||
class TestHarness:
|
||||
def __init__(self, test, verbose):
|
||||
self.test = test
|
||||
self.verbose = verbose
|
||||
|
||||
@@ -66,7 +66,7 @@ def ChooseSubsuite(name):
|
||||
elif split[1] == "textures" and split[2] != "misc":
|
||||
category = "ext"
|
||||
|
||||
return "webgl{}-{}".format(version, category)
|
||||
return f"webgl{version}-{category}"
|
||||
|
||||
|
||||
########################################################################
|
||||
@@ -133,7 +133,7 @@ def AccumTests(pathStr, listFile, allowWebGL1, allowWebGL2, out_testList):
|
||||
listPath = listPathStr.replace("/", os.sep)
|
||||
assert os.path.exists(listPath), "Bad `listPath`: " + listPath
|
||||
|
||||
with open(listPath, "r") as fIn:
|
||||
with open(listPath) as fIn:
|
||||
lineNum = 0
|
||||
for line in fIn:
|
||||
lineNum += 1
|
||||
@@ -164,9 +164,7 @@ def AccumTests(pathStr, listFile, allowWebGL1, allowWebGL2, out_testList):
|
||||
elif flag == "--slow":
|
||||
continue # TODO
|
||||
else:
|
||||
text = "Unknown flag '{}': {}:{}: {}".format(
|
||||
flag, listPath, lineNum, line
|
||||
)
|
||||
text = f"Unknown flag '{flag}': {listPath}:{lineNum}: {line}"
|
||||
assert False, text
|
||||
continue
|
||||
|
||||
@@ -210,7 +208,7 @@ def FillTemplate(inFilePath, templateDict, outFilePath):
|
||||
|
||||
|
||||
def ImportTemplate(inFilePath):
|
||||
with open(inFilePath, "r") as f:
|
||||
with open(inFilePath) as f:
|
||||
return TemplateShell(f)
|
||||
|
||||
|
||||
@@ -379,7 +377,7 @@ def WriteWrappers(testEntryList):
|
||||
WriteWrapper(entry.path, True, templateShell, wrapperPathList)
|
||||
continue
|
||||
|
||||
print("{} wrappers written.\n".format(len(wrapperPathList)))
|
||||
print(f"{len(wrapperPathList)} wrappers written.\n")
|
||||
return wrapperPathList
|
||||
|
||||
|
||||
@@ -469,7 +467,7 @@ def LoadTOML(path):
|
||||
key = ""
|
||||
val = ""
|
||||
|
||||
with open(path, "r") as f:
|
||||
with open(path) as f:
|
||||
for rawLine in f:
|
||||
lineNum += 1
|
||||
if multiLineVal:
|
||||
@@ -484,11 +482,11 @@ def LoadTOML(path):
|
||||
if line[0] in [";", "#"]:
|
||||
continue
|
||||
if line[0] == "[":
|
||||
assert line[-1] == "]", "{}:{}".format(path, lineNum)
|
||||
assert line[-1] == "]", f"{path}:{lineNum}"
|
||||
curSectionName = line[1:-1].strip('"')
|
||||
assert (
|
||||
curSectionName not in ret
|
||||
), "Line {}: Duplicate section: {}".format(lineNum, line)
|
||||
), f"Line {lineNum}: Duplicate section: {line}"
|
||||
curSectionMap = {}
|
||||
ret[curSectionName] = (lineNum, curSectionMap)
|
||||
continue
|
||||
@@ -517,14 +515,14 @@ def LoadErrata():
|
||||
continue
|
||||
elif sectionName != "DEFAULT":
|
||||
path = sectionName.replace("/", os.sep)
|
||||
assert os.path.exists(path), "Errata line {}: Invalid file: {}".format(
|
||||
sectionLineNum, sectionName
|
||||
)
|
||||
assert os.path.exists(
|
||||
path
|
||||
), f"Errata line {sectionLineNum}: Invalid file: {sectionName}"
|
||||
|
||||
for key, (lineNum, val) in sectionMap.items():
|
||||
assert key in ACCEPTABLE_ERRATA_KEYS, "Line {}: {}".format(lineNum, key)
|
||||
assert key in ACCEPTABLE_ERRATA_KEYS, f"Line {lineNum}: {key}"
|
||||
|
||||
curLine = "{} = {}".format(key, val)
|
||||
curLine = f"{key} = {val}"
|
||||
curLines.append(curLine)
|
||||
continue
|
||||
|
||||
|
||||
@@ -214,7 +214,7 @@ for c in COMBOS:
|
||||
|
||||
# -
|
||||
|
||||
with open(DIR / "reftest.list", "r") as f:
|
||||
with open(DIR / "reftest.list") as f:
|
||||
reftest_list_text = f.read()
|
||||
|
||||
for args in todo:
|
||||
|
||||
@@ -96,9 +96,9 @@ def fetch_balrog_xml(
|
||||
for channel in results:
|
||||
print(", ".join(matching_channels[channel]))
|
||||
for target in targets:
|
||||
print("\t{}".format(target))
|
||||
print(f"\t{target}")
|
||||
for url in results[channel][target]:
|
||||
print("\t\t{}".format(url))
|
||||
print(f"\t\t{url}")
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
@@ -163,14 +163,14 @@ def calculate_gmpopenh264_json(version: str, version_hash: str, url_base: str) -
|
||||
return (
|
||||
"{\n"
|
||||
+ ' "hashFunction": "sha512",\n'
|
||||
+ ' "name": "OpenH264-{}",\n'.format(version)
|
||||
+ f' "name": "OpenH264-{version}",\n'
|
||||
+ ' "schema_version": 1000,\n'
|
||||
+ ' "vendors": {\n'
|
||||
+ ' "gmp-gmpopenh264": {\n'
|
||||
+ ' "platforms": {\n'
|
||||
+ generate_json_for_cdms(cdms)
|
||||
+ " },\n"
|
||||
+ ' "version": "{}"\n'.format(version)
|
||||
+ f' "version": "{version}"\n'
|
||||
+ " }\n"
|
||||
+ " }\n"
|
||||
+ "}"
|
||||
@@ -203,14 +203,14 @@ def calculate_widevinecdm_json(version: str, url_base: str) -> str:
|
||||
return (
|
||||
"{\n"
|
||||
+ ' "hashFunction": "sha512",\n'
|
||||
+ ' "name": "Widevine-{}",\n'.format(version)
|
||||
+ f' "name": "Widevine-{version}",\n'
|
||||
+ ' "schema_version": 1000,\n'
|
||||
+ ' "vendors": {\n'
|
||||
+ ' "gmp-widevinecdm": {\n'
|
||||
+ ' "platforms": {\n'
|
||||
+ generate_json_for_cdms(cdms)
|
||||
+ " },\n"
|
||||
+ ' "version": "{}"\n'.format(version)
|
||||
+ f' "version": "{version}"\n'
|
||||
+ " }\n"
|
||||
+ " }\n"
|
||||
+ "}"
|
||||
@@ -232,14 +232,14 @@ def calculate_chrome_component_json(
|
||||
return (
|
||||
"{\n"
|
||||
+ ' "hashFunction": "sha512",\n'
|
||||
+ ' "name": "{}-{}",\n'.format(name, version)
|
||||
+ f' "name": "{name}-{version}",\n'
|
||||
+ ' "schema_version": 1000,\n'
|
||||
+ ' "vendors": {\n'
|
||||
+ ' "gmp-{}": {{\n'.format(altname)
|
||||
+ f' "gmp-{altname}": {{\n'
|
||||
+ ' "platforms": {\n'
|
||||
+ generate_json_for_cdms(cdms)
|
||||
+ " },\n"
|
||||
+ ' "version": "{}"\n'.format(version)
|
||||
+ f' "version": "{version}"\n'
|
||||
+ " }\n"
|
||||
+ " }\n"
|
||||
+ "}"
|
||||
|
||||
@@ -33,10 +33,8 @@ formats = {
|
||||
|
||||
for rate in rates:
|
||||
for channel_count in channels:
|
||||
wav_filename = "{}-{}ch-{}.wav".format(name, channel_count, rate)
|
||||
wav_command = "sox -V -r {} -n -b 16 -c {} {} synth {} sin {} vol {}".format(
|
||||
rate, channel_count, wav_filename, duration, frequency, volume
|
||||
)
|
||||
wav_filename = f"{name}-{channel_count}ch-{rate}.wav"
|
||||
wav_command = f"sox -V -r {rate} -n -b 16 -c {channel_count} {wav_filename} synth {duration} sin {frequency} vol {volume}"
|
||||
print(wav_command)
|
||||
os.system(wav_command)
|
||||
for container, codecs in formats.items():
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
def main(gn_config_file):
|
||||
target_dir = "abseil-cpp"
|
||||
raw_file_contents = ""
|
||||
with open(gn_config_file, "r") as fh:
|
||||
with open(gn_config_file) as fh:
|
||||
raw_file_contents = fh.read()
|
||||
raw_file_contents = raw_file_contents.replace(f"{target_dir}/", "")
|
||||
raw_file_contents = raw_file_contents.replace(f"{target_dir}:", ":")
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
def main(gn_config_file):
|
||||
target_dir = "libwebrtc"
|
||||
raw_file_contents = ""
|
||||
with open(gn_config_file, "r") as fh:
|
||||
with open(gn_config_file) as fh:
|
||||
raw_file_contents = fh.read()
|
||||
raw_file_contents = raw_file_contents.replace(f"{target_dir}/", "")
|
||||
raw_file_contents = raw_file_contents.replace(f"{target_dir}:", ":")
|
||||
|
||||
@@ -37,7 +37,7 @@ def fetch_branch_head_dict():
|
||||
|
||||
def read_dict_from_cache(cache_path):
|
||||
if cache_path is not None and os.path.exists(cache_path):
|
||||
with open(cache_path, "r") as ifile:
|
||||
with open(cache_path) as ifile:
|
||||
return json.loads(ifile.read(), object_hook=jsonKeys2int)
|
||||
return {}
|
||||
|
||||
|
||||
@@ -62,7 +62,7 @@ def getFunctionName(location):
|
||||
pieces = location.split("#l")
|
||||
src_url = pieces[0]
|
||||
line = int(pieces[1])
|
||||
closest_name = "<Unknown {}>".format(line)
|
||||
closest_name = f"<Unknown {line}>"
|
||||
closest_start = 0
|
||||
functions = getSourceFunctions(src_url)
|
||||
for fn in functions:
|
||||
|
||||
@@ -12,7 +12,7 @@ TELEMETRY_BASE_URL = "https://sql.telemetry.mozilla.org/api/"
|
||||
|
||||
|
||||
def query(key, query, p_params):
|
||||
headers = {"Authorization": "Key {}".format(key)}
|
||||
headers = {"Authorization": f"Key {key}"}
|
||||
start_url = TELEMETRY_BASE_URL + f"queries/{query}/refresh?{p_params}"
|
||||
info(f"Starting job using url {start_url}")
|
||||
resp = requests.post(url=start_url, headers=headers)
|
||||
|
||||
@@ -11,7 +11,7 @@ from qm_try_analysis.logging import error, info, warning
|
||||
|
||||
|
||||
def readJSONFile(FileName):
|
||||
f = open(FileName, "r")
|
||||
f = open(FileName)
|
||||
p = json.load(f)
|
||||
f.close()
|
||||
return p
|
||||
@@ -57,7 +57,7 @@ def fetchBuildRevisions(buildids):
|
||||
|
||||
|
||||
def readExecutionFile(workdir):
|
||||
exefile = "{}/qmexecutions.json".format(workdir)
|
||||
exefile = f"{workdir}/qmexecutions.json"
|
||||
try:
|
||||
return readJSONFile(exefile)
|
||||
except OSError:
|
||||
@@ -65,7 +65,7 @@ def readExecutionFile(workdir):
|
||||
|
||||
|
||||
def writeExecutionFile(workdir, executions):
|
||||
exefile = "{}/qmexecutions.json".format(workdir)
|
||||
exefile = f"{workdir}/qmexecutions.json"
|
||||
try:
|
||||
writeJSONFile(exefile, executions)
|
||||
except OSError:
|
||||
|
||||
@@ -3,7 +3,7 @@ import copy
|
||||
from qm_try_analysis.report import remove_duplicates
|
||||
|
||||
|
||||
class MockClient(object):
|
||||
class MockClient:
|
||||
def __init__(self, search_results, remote_bugs):
|
||||
self.search_results = copy.deepcopy(search_results)
|
||||
self.remote_bugs = copy.deepcopy(remote_bugs)
|
||||
|
||||
@@ -12,13 +12,11 @@ def web_socket_transfer_data(request):
|
||||
expected_message = expected_message.encode("latin-1")
|
||||
message = msgutil.receive_message(request)
|
||||
if message == expected_message:
|
||||
msgutil.send_message(request, "PASS: Message #{:d}.".format(test_number))
|
||||
msgutil.send_message(request, f"PASS: Message #{test_number:d}.")
|
||||
else:
|
||||
msgutil.send_message(
|
||||
request,
|
||||
"FAIL: Message #{:d}: Received unexpected message: {!r}".format(
|
||||
test_number, message
|
||||
),
|
||||
f"FAIL: Message #{test_number:d}: Received unexpected message: {message!r}",
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -78,10 +78,10 @@ def run_fxc(shader_model, shader_file, shader_name, output_fp):
|
||||
argv = [
|
||||
fxc_location,
|
||||
"-nologo",
|
||||
"-T{0}".format(shader_model),
|
||||
f"-T{shader_model}",
|
||||
os.path.relpath(shader_file),
|
||||
"-E{0}".format(shader_name),
|
||||
"-Vn{0}".format(shader_name),
|
||||
f"-E{shader_name}",
|
||||
f"-Vn{shader_name}",
|
||||
"-Vi",
|
||||
]
|
||||
if "WINNT" not in buildconfig.substs["HOST_OS_ARCH"]:
|
||||
@@ -93,7 +93,7 @@ def run_fxc(shader_model, shader_file, shader_name, output_fp):
|
||||
|
||||
deps = None
|
||||
with ScopedTempFilename() as temp_filename:
|
||||
argv += ["-Fh{0}".format(os.path.relpath(temp_filename))]
|
||||
argv += [f"-Fh{os.path.relpath(temp_filename)}"]
|
||||
|
||||
sys.stdout.write("{0}\n".format(" ".join(argv)))
|
||||
sys.stdout.flush()
|
||||
@@ -102,10 +102,12 @@ def run_fxc(shader_model, shader_file, shader_name, output_fp):
|
||||
deps = find_dependencies(proc_stdout)
|
||||
assert "fxc2" in fxc_location or len(deps) > 0
|
||||
|
||||
with open(temp_filename, "r") as temp_fp:
|
||||
with open(temp_filename) as temp_fp:
|
||||
output_fp.write(temp_fp.read())
|
||||
|
||||
output_fp.write("ShaderBytes s{0} = {{ {0}, sizeof({0}) }};\n".format(shader_name))
|
||||
output_fp.write(
|
||||
f"ShaderBytes s{shader_name} = {{ {shader_name}, sizeof({shader_name}) }};\n"
|
||||
)
|
||||
return deps
|
||||
|
||||
|
||||
@@ -154,7 +156,7 @@ def decode_console_text(pipe, text):
|
||||
# wrapper for this since TemporaryNamedFile holds the file open.
|
||||
|
||||
|
||||
class ScopedTempFilename(object):
|
||||
class ScopedTempFilename:
|
||||
def __init__(self):
|
||||
self.name = None
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
def main(header, propFile):
|
||||
mappings = {}
|
||||
|
||||
with open(propFile, "r") as f:
|
||||
with open(propFile) as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if not line.startswith("#"):
|
||||
|
||||
@@ -126,12 +126,7 @@ def main(header, fallback_table):
|
||||
header.write("\n")
|
||||
header.write("static const uint16_t BASE_CHAR_MAPPING_LIST[] = {\n")
|
||||
for char, base_char in mappings:
|
||||
header.write(
|
||||
" /* {:#06x}".format(char)
|
||||
+ " */ "
|
||||
+ "{:#06x}".format(base_char & 0xFFFF)
|
||||
+ ","
|
||||
)
|
||||
header.write(f" /* {char:#06x}" + " */ " + f"{base_char & 0xFFFF:#06x}" + ",")
|
||||
if char != base_char:
|
||||
header.write(" /* " + chr(char) + " → " + chr(base_char) + " */")
|
||||
header.write("\n")
|
||||
@@ -143,20 +138,18 @@ def main(header, fallback_table):
|
||||
for block in blocks:
|
||||
header.write(
|
||||
" {"
|
||||
+ "{:#04x}".format(block.first)
|
||||
+ f"{block.first:#04x}"
|
||||
+ ", "
|
||||
+ "{:#04x}".format(block.last)
|
||||
+ f"{block.last:#04x}"
|
||||
+ ", "
|
||||
+ str(block.offset).rjust(4)
|
||||
+ "}, // "
|
||||
+ "{:#04x}".format(mappings[block.offset].char >> 8)
|
||||
+ f"{mappings[block.offset].char >> 8:#04x}"
|
||||
+ "xx\n"
|
||||
)
|
||||
header.write("};\n")
|
||||
header.write("\n")
|
||||
header.write("static const uint8_t BASE_CHAR_MAPPING_BLOCK_INDEX[] = {\n")
|
||||
for i, index in enumerate(indexes):
|
||||
header.write(
|
||||
" " + str(index).rjust(3) + ", // " + "{:#04x}".format(i) + "xx\n"
|
||||
)
|
||||
header.write(" " + str(index).rjust(3) + ", // " + f"{i:#04x}" + "xx\n")
|
||||
header.write("};\n")
|
||||
|
||||
@@ -58,7 +58,7 @@ class ErrorTestCase(IPDLTestCase):
|
||||
IPDLTestCase.__init__(self, ipdlargv, filename)
|
||||
|
||||
# Look for expected errors in the input file.
|
||||
f = open(filename, "r")
|
||||
f = open(filename)
|
||||
self.expectedErrorMessage = []
|
||||
for l in f:
|
||||
if l.startswith("//error:"):
|
||||
|
||||
@@ -57,7 +57,7 @@ def get_cpp_type(type):
|
||||
return "uint32_t"
|
||||
if type in ("int32_t", "RelaxedAtomicInt32"):
|
||||
return "int32_t"
|
||||
raise Exception("Unexpected type: {}".format(type))
|
||||
raise Exception(f"Unexpected type: {type}")
|
||||
|
||||
|
||||
# Returns a C++ expression for the default pref value. Booleans in the YAML file
|
||||
@@ -117,10 +117,10 @@ def generate_prefs_header(c_out, yaml_path):
|
||||
# after startup.
|
||||
field_type = type
|
||||
if not is_startup_pref:
|
||||
field_type = "mozilla::Atomic<{}, mozilla::Relaxed>".format(field_type)
|
||||
class_fields.append("static {} {}_;".format(field_type, cpp_name))
|
||||
field_type = f"mozilla::Atomic<{field_type}, mozilla::Relaxed>"
|
||||
class_fields.append(f"static {field_type} {cpp_name}_;")
|
||||
class_fields_inits.append(
|
||||
"{} JS::Prefs::{}_{{{}}};".format(field_type, cpp_name, init_value)
|
||||
f"{field_type} JS::Prefs::{cpp_name}_{{{init_value}}};"
|
||||
)
|
||||
|
||||
is_startup_pref_bool = "true" if is_startup_pref else "false"
|
||||
@@ -128,9 +128,7 @@ def generate_prefs_header(c_out, yaml_path):
|
||||
# Generate a MACRO invocation like this:
|
||||
# MACRO("arraybuffer_transfer", arraybuffer_transfer, bool, setAtStartup_arraybuffer_transfer, true)
|
||||
macro_entries.append(
|
||||
'MACRO("{}", {}, {}, {}, {})'.format(
|
||||
name, cpp_name, type, setter_name, is_startup_pref_bool
|
||||
)
|
||||
f'MACRO("{name}", {cpp_name}, {type}, {setter_name}, {is_startup_pref_bool})'
|
||||
)
|
||||
|
||||
# Generate a C++ statement to set the JS pref based on Gecko's StaticPrefs:
|
||||
@@ -139,9 +137,7 @@ def generate_prefs_header(c_out, yaml_path):
|
||||
if pref.get("do_not_use_directly", False):
|
||||
browser_pref_cpp_name += "_DoNotUseDirectly"
|
||||
|
||||
statement = "JS::Prefs::{}(mozilla::StaticPrefs::{}());".format(
|
||||
setter_name, browser_pref_cpp_name
|
||||
)
|
||||
statement = f"JS::Prefs::{setter_name}(mozilla::StaticPrefs::{browser_pref_cpp_name}());"
|
||||
browser_set_statements.append(statement)
|
||||
|
||||
# For non-startup prefs, also generate code to update the pref after startup.
|
||||
@@ -151,25 +147,23 @@ def generate_prefs_header(c_out, yaml_path):
|
||||
contents = ""
|
||||
|
||||
contents += "#define JS_PREF_CLASS_FIELDS \\\n"
|
||||
contents += "".join(map(lambda s: " {}\\\n".format(s), class_fields))
|
||||
contents += "".join(map(lambda s: f" {s}\\\n", class_fields))
|
||||
contents += "\n\n"
|
||||
|
||||
contents += "#define JS_PREF_CLASS_FIELDS_INIT \\\n"
|
||||
contents += "".join(map(lambda s: " {}\\\n".format(s), class_fields_inits))
|
||||
contents += "".join(map(lambda s: f" {s}\\\n", class_fields_inits))
|
||||
contents += "\n\n"
|
||||
|
||||
contents += "#define FOR_EACH_JS_PREF(MACRO) \\\n"
|
||||
contents += "".join(map(lambda s: " {}\\\n".format(s), macro_entries))
|
||||
contents += "".join(map(lambda s: f" {s}\\\n", macro_entries))
|
||||
contents += "\n\n"
|
||||
|
||||
contents += "#define SET_JS_PREFS_FROM_BROWSER_PREFS \\\n"
|
||||
contents += "".join(map(lambda s: " {}\\\n".format(s), browser_set_statements))
|
||||
contents += "".join(map(lambda s: f" {s}\\\n", browser_set_statements))
|
||||
contents += "\n\n"
|
||||
|
||||
contents += "#define SET_NON_STARTUP_JS_PREFS_FROM_BROWSER_PREFS \\\n"
|
||||
contents += "".join(
|
||||
map(lambda s: " {}\\\n".format(s), browser_set_non_startup_statements)
|
||||
)
|
||||
contents += "".join(map(lambda s: f" {s}\\\n", browser_set_non_startup_statements))
|
||||
contents += "\n\n"
|
||||
|
||||
c_out.write(
|
||||
|
||||
@@ -148,7 +148,7 @@ def preprocess(cxx, preprocessorOption, source, args=[]):
|
||||
result = subprocess.Popen(cxx + outputArg + args + [tmpIn]).wait()
|
||||
if result != 0:
|
||||
sys.exit(result)
|
||||
with open(tmpOut, "r") as output:
|
||||
with open(tmpOut) as output:
|
||||
processed = output.read()
|
||||
os.remove(tmpIn)
|
||||
os.remove(tmpOut)
|
||||
|
||||
@@ -70,9 +70,9 @@ def writeMappingHeader(println, description, source, url):
|
||||
if type(description) is not list:
|
||||
description = [description]
|
||||
for desc in description:
|
||||
println("// {0}".format(desc))
|
||||
println("// Derived from {0}.".format(source))
|
||||
println("// {0}".format(url))
|
||||
println(f"// {desc}")
|
||||
println(f"// Derived from {source}.")
|
||||
println(f"// {url}")
|
||||
|
||||
|
||||
def writeMappingsVar(println, mapping, name, description, source, url):
|
||||
@@ -84,9 +84,9 @@ def writeMappingsVar(println, mapping, name, description, source, url):
|
||||
"""
|
||||
println("")
|
||||
writeMappingHeader(println, description, source, url)
|
||||
println("var {0} = {{".format(name))
|
||||
println(f"var {name} = {{")
|
||||
for key, value in sorted(mapping.items(), key=itemgetter(0)):
|
||||
println(' "{0}": "{1}",'.format(key, value))
|
||||
println(f' "{key}": "{value}",')
|
||||
println("};")
|
||||
|
||||
|
||||
@@ -111,13 +111,11 @@ def writeMappingsBinarySearch(
|
||||
println("")
|
||||
writeMappingHeader(println, description, source, url)
|
||||
println(
|
||||
"""
|
||||
bool mozilla::intl::Locale::{0}({1} {2}) {{
|
||||
MOZ_ASSERT({3}({2}.Span()));
|
||||
MOZ_ASSERT({4}({2}.Span()));
|
||||
""".format(
|
||||
fn_name, type_name, name, validate_fn, validate_case_fn
|
||||
).strip()
|
||||
f"""
|
||||
bool mozilla::intl::Locale::{fn_name}({type_name} {name}) {{
|
||||
MOZ_ASSERT({validate_fn}({name}.Span()));
|
||||
MOZ_ASSERT({validate_case_fn}({name}.Span()));
|
||||
""".strip()
|
||||
)
|
||||
writeMappingsBinarySearchBody(println, name, name, mappings, tag_maxlength)
|
||||
|
||||
@@ -134,20 +132,14 @@ def writeMappingsBinarySearchBody(
|
||||
):
|
||||
def write_array(subtags, name, length, fixed):
|
||||
if fixed:
|
||||
println(
|
||||
" static const char {}[{}][{}] = {{".format(
|
||||
name, len(subtags), length + 1
|
||||
)
|
||||
)
|
||||
println(f" static const char {name}[{len(subtags)}][{length + 1}] = {{")
|
||||
else:
|
||||
println(" static const char* {}[{}] = {{".format(name, len(subtags)))
|
||||
println(f" static const char* {name}[{len(subtags)}] = {{")
|
||||
|
||||
# Group in pairs of ten to not exceed the 80 line column limit.
|
||||
for entries in grouper(subtags, 10):
|
||||
entries = (
|
||||
'"{}"'.format(tag).rjust(length + 2)
|
||||
for tag in entries
|
||||
if tag is not None
|
||||
f'"{tag}"'.rjust(length + 2) for tag in entries if tag is not None
|
||||
)
|
||||
println(" {},".format(", ".join(entries)))
|
||||
|
||||
@@ -163,11 +155,9 @@ def writeMappingsBinarySearchBody(
|
||||
# Omit the length check if the current length is the maximum length.
|
||||
if length != tag_maxlength:
|
||||
println(
|
||||
"""
|
||||
if ({}.Length() == {}) {{
|
||||
""".format(
|
||||
source_name, length
|
||||
).rstrip(
|
||||
f"""
|
||||
if ({source_name}.Length() == {length}) {{
|
||||
""".rstrip(
|
||||
"\n"
|
||||
)
|
||||
)
|
||||
@@ -185,31 +175,27 @@ def writeMappingsBinarySearchBody(
|
||||
subtags = sorted(subtags)
|
||||
|
||||
def equals(subtag):
|
||||
return """{}.EqualTo("{}")""".format(source_name, subtag)
|
||||
return f"""{source_name}.EqualTo("{subtag}")"""
|
||||
|
||||
# Don't emit a binary search for short lists.
|
||||
if len(subtags) == 1:
|
||||
if type(mappings) is dict:
|
||||
println(
|
||||
"""
|
||||
if ({}) {{
|
||||
{}.Set(mozilla::MakeStringSpan("{}"));
|
||||
f"""
|
||||
if ({equals(subtags[0])}) {{
|
||||
{target_name}.Set(mozilla::MakeStringSpan("{mappings[subtags[0]]}"));
|
||||
return true;
|
||||
}}
|
||||
return false;
|
||||
""".format(
|
||||
equals(subtags[0]), target_name, mappings[subtags[0]]
|
||||
).strip(
|
||||
""".strip(
|
||||
"\n"
|
||||
)
|
||||
)
|
||||
else:
|
||||
println(
|
||||
"""
|
||||
return {};
|
||||
""".format(
|
||||
equals(subtags[0])
|
||||
).strip(
|
||||
f"""
|
||||
return {equals(subtags[0])};
|
||||
""".strip(
|
||||
"\n"
|
||||
)
|
||||
)
|
||||
@@ -217,14 +203,12 @@ def writeMappingsBinarySearchBody(
|
||||
if type(mappings) is dict:
|
||||
for subtag in subtags:
|
||||
println(
|
||||
"""
|
||||
if ({}) {{
|
||||
{}.Set("{}");
|
||||
f"""
|
||||
if ({equals(subtag)}) {{
|
||||
{target_name}.Set("{mappings[subtag]}");
|
||||
return true;
|
||||
}}
|
||||
""".format(
|
||||
equals(subtag), target_name, mappings[subtag]
|
||||
).strip(
|
||||
""".strip(
|
||||
"\n"
|
||||
)
|
||||
)
|
||||
@@ -240,11 +224,9 @@ def writeMappingsBinarySearchBody(
|
||||
cond = (equals(subtag) for subtag in subtags)
|
||||
cond = (" ||\n" + " " * (4 + len("return "))).join(cond)
|
||||
println(
|
||||
"""
|
||||
return {};
|
||||
""".format(
|
||||
cond
|
||||
).strip(
|
||||
f"""
|
||||
return {cond};
|
||||
""".strip(
|
||||
"\n"
|
||||
)
|
||||
)
|
||||
@@ -255,23 +237,19 @@ def writeMappingsBinarySearchBody(
|
||||
write_array([mappings[k] for k in subtags], "aliases", length, False)
|
||||
|
||||
println(
|
||||
"""
|
||||
if (const char* replacement = SearchReplacement({0}s, aliases, {0})) {{
|
||||
{1}.Set(mozilla::MakeStringSpan(replacement));
|
||||
f"""
|
||||
if (const char* replacement = SearchReplacement({source_name}s, aliases, {source_name})) {{
|
||||
{target_name}.Set(mozilla::MakeStringSpan(replacement));
|
||||
return true;
|
||||
}}
|
||||
return false;
|
||||
""".format(
|
||||
source_name, target_name
|
||||
).rstrip()
|
||||
""".rstrip()
|
||||
)
|
||||
else:
|
||||
println(
|
||||
"""
|
||||
return HasReplacement({0}s, {0});
|
||||
""".format(
|
||||
source_name
|
||||
).rstrip()
|
||||
f"""
|
||||
return HasReplacement({source_name}s, {source_name});
|
||||
""".rstrip()
|
||||
)
|
||||
|
||||
println(
|
||||
@@ -325,48 +303,40 @@ void mozilla::intl::Locale::PerformComplexLanguageMappings() {
|
||||
first_language = False
|
||||
|
||||
cond = (
|
||||
'Language().EqualTo("{}")'.format(lang)
|
||||
f'Language().EqualTo("{lang}")'
|
||||
for lang in [deprecated_language] + language_aliases[key]
|
||||
)
|
||||
cond = (" ||\n" + " " * (2 + len(if_kind) + 2)).join(cond)
|
||||
|
||||
println(
|
||||
"""
|
||||
{} ({}) {{""".format(
|
||||
if_kind, cond
|
||||
).strip(
|
||||
f"""
|
||||
{if_kind} ({cond}) {{""".strip(
|
||||
"\n"
|
||||
)
|
||||
)
|
||||
|
||||
println(
|
||||
"""
|
||||
SetLanguage("{}");""".format(
|
||||
language
|
||||
).strip(
|
||||
f"""
|
||||
SetLanguage("{language}");""".strip(
|
||||
"\n"
|
||||
)
|
||||
)
|
||||
|
||||
if script is not None:
|
||||
println(
|
||||
"""
|
||||
f"""
|
||||
if (Script().Missing()) {{
|
||||
SetScript("{}");
|
||||
}}""".format(
|
||||
script
|
||||
).strip(
|
||||
SetScript("{script}");
|
||||
}}""".strip(
|
||||
"\n"
|
||||
)
|
||||
)
|
||||
if region is not None:
|
||||
println(
|
||||
"""
|
||||
f"""
|
||||
if (Region().Missing()) {{
|
||||
SetRegion("{}");
|
||||
}}""".format(
|
||||
region
|
||||
).strip(
|
||||
SetRegion("{region}");
|
||||
}}""".strip(
|
||||
"\n"
|
||||
)
|
||||
)
|
||||
@@ -429,16 +399,14 @@ void mozilla::intl::Locale::PerformComplexRegionMappings() {
|
||||
first_region = False
|
||||
|
||||
cond = (
|
||||
'Region().EqualTo("{}")'.format(region)
|
||||
f'Region().EqualTo("{region}")'
|
||||
for region in [deprecated_region] + region_aliases[key]
|
||||
)
|
||||
cond = (" ||\n" + " " * (2 + len(if_kind) + 2)).join(cond)
|
||||
|
||||
println(
|
||||
"""
|
||||
{} ({}) {{""".format(
|
||||
if_kind, cond
|
||||
).strip(
|
||||
f"""
|
||||
{if_kind} ({cond}) {{""".strip(
|
||||
"\n"
|
||||
)
|
||||
)
|
||||
@@ -460,10 +428,8 @@ void mozilla::intl::Locale::PerformComplexRegionMappings() {
|
||||
|
||||
def compare_tags(language, script):
|
||||
if script is None:
|
||||
return 'Language().EqualTo("{}")'.format(language)
|
||||
return '(Language().EqualTo("{}") && Script().EqualTo("{}"))'.format(
|
||||
language, script
|
||||
)
|
||||
return f'Language().EqualTo("{language}")'
|
||||
return f'(Language().EqualTo("{language}") && Script().EqualTo("{script}"))'
|
||||
|
||||
cond = (
|
||||
compare_tags(language, script)
|
||||
@@ -472,26 +438,22 @@ void mozilla::intl::Locale::PerformComplexRegionMappings() {
|
||||
cond = (" ||\n" + " " * (4 + len(if_kind) + 2)).join(cond)
|
||||
|
||||
println(
|
||||
"""
|
||||
{} ({}) {{
|
||||
SetRegion("{}");
|
||||
}}""".format(
|
||||
if_kind, cond, replacement_region
|
||||
f"""
|
||||
{if_kind} ({cond}) {{
|
||||
SetRegion("{replacement_region}");
|
||||
}}""".rstrip().strip(
|
||||
"\n"
|
||||
)
|
||||
.rstrip()
|
||||
.strip("\n")
|
||||
)
|
||||
|
||||
println(
|
||||
"""
|
||||
f"""
|
||||
else {{
|
||||
SetRegion("{}");
|
||||
SetRegion("{default}");
|
||||
}}
|
||||
}}""".format(
|
||||
default
|
||||
}}""".rstrip().strip(
|
||||
"\n"
|
||||
)
|
||||
.rstrip()
|
||||
.strip("\n")
|
||||
)
|
||||
|
||||
println(
|
||||
@@ -1485,7 +1447,7 @@ def readUnicodeExtensions(core_file):
|
||||
extension = keyword.get("extension", "u")
|
||||
assert (
|
||||
extension == "u" or extension == "t"
|
||||
), "unknown extension type: {}".format(extension)
|
||||
), f"unknown extension type: {extension}"
|
||||
|
||||
extension_name = keyword.get("name")
|
||||
|
||||
@@ -1517,7 +1479,7 @@ def readUnicodeExtensions(core_file):
|
||||
# All other names should match the 'type' production.
|
||||
assert (
|
||||
typeRE.match(name) is not None
|
||||
), "{} matches the 'type' production".format(name)
|
||||
), f"{name} matches the 'type' production"
|
||||
|
||||
# <https://unicode.org/reports/tr35/#Unicode_Locale_Extension_Data_Files>:
|
||||
#
|
||||
@@ -1593,7 +1555,7 @@ def readUnicodeExtensions(core_file):
|
||||
type = alias.get("type")
|
||||
assert (
|
||||
typeRE.match(type) is not None
|
||||
), "{} matches the 'type' production".format(type)
|
||||
), f"{type} matches the 'type' production"
|
||||
|
||||
# Take the first replacement when multiple ones are present.
|
||||
replacement = alias.get("replacement").split(" ")[0].lower()
|
||||
@@ -1605,7 +1567,7 @@ def readUnicodeExtensions(core_file):
|
||||
# Assert the replacement is syntactically correct.
|
||||
assert (
|
||||
typeRE.match(replacement) is not None
|
||||
), "replacement {} matches the 'type' production".format(replacement)
|
||||
), f"replacement {replacement} matches the 'type' production"
|
||||
|
||||
# 'subdivisionAlias' applies to 'rg' and 'sd' keys.
|
||||
mapping["u"].setdefault("rg", {})[type] = replacement
|
||||
@@ -1630,7 +1592,7 @@ def writeCLDRLanguageTagData(println, data, url):
|
||||
|
||||
println(generatedFileWarning)
|
||||
println("// Version: CLDR-{}".format(data["version"]))
|
||||
println("// URL: {}".format(url))
|
||||
println(f"// URL: {url}")
|
||||
|
||||
println(
|
||||
"""
|
||||
@@ -1897,7 +1859,7 @@ def writeCLDRLanguageTagLikelySubtagsTest(println, data, url):
|
||||
# Assume no complex region mappings are needed for now.
|
||||
assert (
|
||||
region not in complex_region_mappings
|
||||
), "unexpected region with complex mappings: {}".format(region)
|
||||
), f"unexpected region with complex mappings: {region}"
|
||||
|
||||
return (language, script, region)
|
||||
|
||||
@@ -2020,7 +1982,7 @@ if (typeof reportCompare === "function")
|
||||
def readCLDRVersionFromICU():
|
||||
icuDir = os.path.join(topsrcdir, "intl/icu/source")
|
||||
if not os.path.isdir(icuDir):
|
||||
raise RuntimeError("not a directory: {}".format(icuDir))
|
||||
raise RuntimeError(f"not a directory: {icuDir}")
|
||||
|
||||
reVersion = re.compile(r'\s*cldrVersion\{"(\d+(?:\.\d+)?)"\}')
|
||||
|
||||
@@ -2098,13 +2060,13 @@ def updateCLDRLangTags(args):
|
||||
|
||||
def flines(filepath, encoding="utf-8"):
|
||||
"""Open filepath and iterate over its content."""
|
||||
with open(filepath, mode="r", encoding=encoding) as f:
|
||||
with open(filepath, encoding=encoding) as f:
|
||||
for line in f:
|
||||
yield line
|
||||
|
||||
|
||||
@total_ordering
|
||||
class Zone(object):
|
||||
class Zone:
|
||||
"""Time zone with optional file name."""
|
||||
|
||||
def __init__(self, name, filename=""):
|
||||
@@ -2127,7 +2089,7 @@ class Zone(object):
|
||||
return self.name
|
||||
|
||||
|
||||
class TzDataDir(object):
|
||||
class TzDataDir:
|
||||
"""tzdata source from a directory."""
|
||||
|
||||
def __init__(self, obj):
|
||||
@@ -2139,7 +2101,7 @@ class TzDataDir(object):
|
||||
self.readlines = flines
|
||||
|
||||
|
||||
class TzDataFile(object):
|
||||
class TzDataFile:
|
||||
"""tzdata source from a file (tar or gzipped)."""
|
||||
|
||||
def __init__(self, obj):
|
||||
@@ -2667,7 +2629,6 @@ def availableNamedTimeZoneIdentifiers(tzdataDir, ignoreFactory):
|
||||
|
||||
with open(
|
||||
os.path.join(js_src_builtin_intl_dir, "TimeZoneMapping.yaml"),
|
||||
mode="r",
|
||||
encoding="utf-8",
|
||||
) as f:
|
||||
time_zone_mapping = yaml.safe_load(f)
|
||||
@@ -2930,7 +2891,7 @@ def generateTzDataTestVersion(tzdataDir, version, testDir):
|
||||
println("")
|
||||
println(generatedFileWarning)
|
||||
println(tzdataVersionComment.format(version))
|
||||
println("""const tzdata = "{0}";""".format(version))
|
||||
println(f"""const tzdata = "{version}";""")
|
||||
|
||||
println(
|
||||
"""
|
||||
@@ -3192,7 +3153,7 @@ def writeCurrencyFile(published, currencies, out):
|
||||
println = partial(print, file=f)
|
||||
|
||||
println(generatedFileWarning)
|
||||
println("// Version: {}".format(published))
|
||||
println(f"// Version: {published}")
|
||||
|
||||
println(
|
||||
"""
|
||||
@@ -3209,8 +3170,8 @@ def writeCurrencyFile(published, currencies, out):
|
||||
sorted(currencies, key=itemgetter(0)), itemgetter(0)
|
||||
):
|
||||
for _, minorUnits, currencyName, countryName in entries:
|
||||
println(" // {} ({})".format(currencyName, countryName))
|
||||
println(" {}: {},".format(currency, minorUnits))
|
||||
println(f" // {currencyName} ({countryName})")
|
||||
println(f" {currency}: {minorUnits},")
|
||||
println("};")
|
||||
|
||||
|
||||
@@ -3261,24 +3222,22 @@ def updateCurrency(topsrcdir, args):
|
||||
|
||||
def writeUnicodeExtensionsMappings(println, mapping, extension):
|
||||
println(
|
||||
"""
|
||||
f"""
|
||||
template <size_t Length>
|
||||
static inline bool Is{0}Key(mozilla::Span<const char> key, const char (&str)[Length]) {{
|
||||
static_assert(Length == {0}KeyLength + 1,
|
||||
"{0} extension key is two characters long");
|
||||
static inline bool Is{extension}Key(mozilla::Span<const char> key, const char (&str)[Length]) {{
|
||||
static_assert(Length == {extension}KeyLength + 1,
|
||||
"{extension} extension key is two characters long");
|
||||
return memcmp(key.data(), str, Length - 1) == 0;
|
||||
}}
|
||||
|
||||
template <size_t Length>
|
||||
static inline bool Is{0}Type(mozilla::Span<const char> type, const char (&str)[Length]) {{
|
||||
static_assert(Length > {0}KeyLength + 1,
|
||||
"{0} extension type contains more than two characters");
|
||||
static inline bool Is{extension}Type(mozilla::Span<const char> type, const char (&str)[Length]) {{
|
||||
static_assert(Length > {extension}KeyLength + 1,
|
||||
"{extension} extension type contains more than two characters");
|
||||
return type.size() == (Length - 1) &&
|
||||
memcmp(type.data(), str, Length - 1) == 0;
|
||||
}}
|
||||
""".format(
|
||||
extension
|
||||
).rstrip(
|
||||
""".rstrip(
|
||||
"\n"
|
||||
)
|
||||
)
|
||||
@@ -3292,8 +3251,8 @@ static inline bool Is{0}Type(mozilla::Span<const char> type, const char (&str)[L
|
||||
|
||||
if needs_binary_search:
|
||||
println(
|
||||
"""
|
||||
static int32_t Compare{0}Type(const char* a, mozilla::Span<const char> b) {{
|
||||
f"""
|
||||
static int32_t Compare{extension}Type(const char* a, mozilla::Span<const char> b) {{
|
||||
MOZ_ASSERT(!std::char_traits<char>::find(b.data(), b.size(), '\\0'),
|
||||
"unexpected null-character in string");
|
||||
|
||||
@@ -3313,45 +3272,41 @@ static int32_t Compare{0}Type(const char* a, mozilla::Span<const char> b) {{
|
||||
}}
|
||||
|
||||
template <size_t Length>
|
||||
static inline const char* Search{0}Replacement(
|
||||
static inline const char* Search{extension}Replacement(
|
||||
const char* (&types)[Length], const char* (&aliases)[Length],
|
||||
mozilla::Span<const char> type) {{
|
||||
|
||||
auto p = std::lower_bound(std::begin(types), std::end(types), type,
|
||||
[](const auto& a, const auto& b) {{
|
||||
return Compare{0}Type(a, b) < 0;
|
||||
return Compare{extension}Type(a, b) < 0;
|
||||
}});
|
||||
if (p != std::end(types) && Compare{0}Type(*p, type) == 0) {{
|
||||
if (p != std::end(types) && Compare{extension}Type(*p, type) == 0) {{
|
||||
return aliases[std::distance(std::begin(types), p)];
|
||||
}}
|
||||
return nullptr;
|
||||
}}
|
||||
""".format(
|
||||
extension
|
||||
).rstrip(
|
||||
""".rstrip(
|
||||
"\n"
|
||||
)
|
||||
)
|
||||
|
||||
println(
|
||||
"""
|
||||
f"""
|
||||
/**
|
||||
* Mapping from deprecated BCP 47 {0} extension types to their preferred
|
||||
* Mapping from deprecated BCP 47 {extension} extension types to their preferred
|
||||
* values.
|
||||
*
|
||||
* Spec: https://www.unicode.org/reports/tr35/#Unicode_Locale_Extension_Data_Files
|
||||
* Spec: https://www.unicode.org/reports/tr35/#t_Extension
|
||||
*/
|
||||
const char* mozilla::intl::Locale::Replace{0}ExtensionType(
|
||||
const char* mozilla::intl::Locale::Replace{extension}ExtensionType(
|
||||
mozilla::Span<const char> key, mozilla::Span<const char> type) {{
|
||||
MOZ_ASSERT(key.size() == {0}KeyLength);
|
||||
MOZ_ASSERT(IsCanonicallyCased{0}Key(key));
|
||||
MOZ_ASSERT(key.size() == {extension}KeyLength);
|
||||
MOZ_ASSERT(IsCanonicallyCased{extension}Key(key));
|
||||
|
||||
MOZ_ASSERT(type.size() > {0}KeyLength);
|
||||
MOZ_ASSERT(IsCanonicallyCased{0}Type(type));
|
||||
""".format(
|
||||
extension
|
||||
)
|
||||
MOZ_ASSERT(type.size() > {extension}KeyLength);
|
||||
MOZ_ASSERT(IsCanonicallyCased{extension}Type(type));
|
||||
"""
|
||||
)
|
||||
|
||||
def to_hash_key(replacements):
|
||||
@@ -3360,13 +3315,11 @@ const char* mozilla::intl::Locale::Replace{0}ExtensionType(
|
||||
def write_array(subtags, name, length):
|
||||
max_entries = (80 - len(" ")) // (length + len('"", '))
|
||||
|
||||
println(" static const char* {}[{}] = {{".format(name, len(subtags)))
|
||||
println(f" static const char* {name}[{len(subtags)}] = {{")
|
||||
|
||||
for entries in grouper(subtags, max_entries):
|
||||
entries = (
|
||||
'"{}"'.format(tag).center(length + 2)
|
||||
for tag in entries
|
||||
if tag is not None
|
||||
f'"{tag}"'.center(length + 2) for tag in entries if tag is not None
|
||||
)
|
||||
println(" {},".format(", ".join(entries)))
|
||||
|
||||
@@ -3387,18 +3340,13 @@ const char* mozilla::intl::Locale::Replace{0}ExtensionType(
|
||||
if key in key_aliases[hash_key]:
|
||||
continue
|
||||
|
||||
cond = (
|
||||
'Is{}Key(key, "{}")'.format(extension, k)
|
||||
for k in [key] + key_aliases[hash_key]
|
||||
)
|
||||
cond = (f'Is{extension}Key(key, "{k}")' for k in [key] + key_aliases[hash_key])
|
||||
|
||||
if_kind = "if" if first_key else "else if"
|
||||
cond = (" ||\n" + " " * (2 + len(if_kind) + 2)).join(cond)
|
||||
println(
|
||||
"""
|
||||
{} ({}) {{""".format(
|
||||
if_kind, cond
|
||||
).strip(
|
||||
f"""
|
||||
{if_kind} ({cond}) {{""".strip(
|
||||
"\n"
|
||||
)
|
||||
)
|
||||
@@ -3414,23 +3362,19 @@ const char* mozilla::intl::Locale::Replace{0}ExtensionType(
|
||||
write_array(types, "types", max_len)
|
||||
write_array(preferred, "aliases", max_len)
|
||||
println(
|
||||
"""
|
||||
return Search{}Replacement(types, aliases, type);
|
||||
""".format(
|
||||
extension
|
||||
).strip(
|
||||
f"""
|
||||
return Search{extension}Replacement(types, aliases, type);
|
||||
""".strip(
|
||||
"\n"
|
||||
)
|
||||
)
|
||||
else:
|
||||
for type, replacement in replacements:
|
||||
println(
|
||||
"""
|
||||
if (Is{}Type(type, "{}")) {{
|
||||
return "{}";
|
||||
}}""".format(
|
||||
extension, type, replacement
|
||||
).strip(
|
||||
f"""
|
||||
if (Is{extension}Type(type, "{type}")) {{
|
||||
return "{replacement}";
|
||||
}}""".strip(
|
||||
"\n"
|
||||
)
|
||||
)
|
||||
@@ -3540,7 +3484,7 @@ def readICUUnitResourceFile(filepath):
|
||||
table[entry_key] = entry_value
|
||||
continue
|
||||
|
||||
raise Exception("unexpected line: '{}' in {}".format(line, filepath))
|
||||
raise Exception(f"unexpected line: '{line}' in {filepath}")
|
||||
|
||||
assert len(parents) == 0, "Not all tables closed"
|
||||
assert len(table) == 1, "More than one root table"
|
||||
@@ -3579,7 +3523,7 @@ def computeSupportedUnits(all_units, sanctioned_units):
|
||||
def compound_unit_identifiers():
|
||||
for numerator in sanctioned_units:
|
||||
for denominator in sanctioned_units:
|
||||
yield "{}-per-{}".format(numerator, denominator)
|
||||
yield f"{numerator}-per-{denominator}"
|
||||
|
||||
supported_simple_units = {find_match(unit) for unit in sanctioned_units}
|
||||
assert None not in supported_simple_units
|
||||
@@ -3594,7 +3538,7 @@ def computeSupportedUnits(all_units, sanctioned_units):
|
||||
|
||||
|
||||
def readICUDataFilterForUnits(data_filter_file):
|
||||
with open(data_filter_file, mode="r", encoding="utf-8") as f:
|
||||
with open(data_filter_file, encoding="utf-8") as f:
|
||||
data_filter = json.load(f)
|
||||
|
||||
# Find the rule set for the "unit_tree".
|
||||
@@ -3651,9 +3595,7 @@ def writeSanctionedSimpleUnitIdentifiersFiles(all_units, sanctioned_units):
|
||||
)
|
||||
|
||||
println("// prettier-ignore")
|
||||
println(
|
||||
"var sanctionedSimpleUnitIdentifiers = {};".format(sanctioned_units_object)
|
||||
)
|
||||
println(f"var sanctionedSimpleUnitIdentifiers = {sanctioned_units_object};")
|
||||
|
||||
sanctioned_h_file = os.path.join(intl_components_src_dir, "MeasureUnitGenerated.h")
|
||||
with open(sanctioned_h_file, mode="w", encoding="utf-8", newline="") as f:
|
||||
@@ -3683,7 +3625,7 @@ inline constexpr SimpleMeasureUnit simpleMeasureUnits[] = {
|
||||
)
|
||||
|
||||
for unit_name in sorted(sanctioned_units):
|
||||
println(' {{"{}", "{}"}},'.format(find_unit_type(unit_name), unit_name))
|
||||
println(f' {{"{find_unit_type(unit_name)}", "{unit_name}"}},')
|
||||
|
||||
println(
|
||||
"""
|
||||
@@ -3726,9 +3668,7 @@ def writeUnitTestFiles(all_units, sanctioned_units):
|
||||
)
|
||||
|
||||
println(
|
||||
"const sanctionedSimpleUnitIdentifiers = {};".format(
|
||||
sanctioned_units_array
|
||||
)
|
||||
f"const sanctionedSimpleUnitIdentifiers = {sanctioned_units_array};"
|
||||
)
|
||||
|
||||
println(test_content)
|
||||
@@ -3762,11 +3702,9 @@ for (const numerator of sanctionedSimpleUnitIdentifiers) {
|
||||
|
||||
write_test(
|
||||
"unit-well-formed.js",
|
||||
"""
|
||||
const allUnits = {};
|
||||
""".format(
|
||||
all_units_array
|
||||
)
|
||||
f"""
|
||||
const allUnits = {all_units_array};
|
||||
"""
|
||||
+ r"""
|
||||
// Test only sanctioned unit identifiers are allowed.
|
||||
|
||||
@@ -3841,7 +3779,6 @@ def updateUnits(topsrcdir, args):
|
||||
|
||||
with open(
|
||||
os.path.join(js_src_builtin_intl_dir, "SanctionedSimpleUnitIdentifiers.yaml"),
|
||||
mode="r",
|
||||
encoding="utf-8",
|
||||
) as f:
|
||||
sanctioned_units = yaml.safe_load(f)
|
||||
@@ -3865,13 +3802,13 @@ def updateUnits(topsrcdir, args):
|
||||
|
||||
missing = supported_units - filtered_units
|
||||
if missing:
|
||||
raise RuntimeError("Missing units: {}".format(units_to_string(missing)))
|
||||
raise RuntimeError(f"Missing units: {units_to_string(missing)}")
|
||||
|
||||
# Not exactly an error, but we currently don't have a use case where we need to support
|
||||
# more units than required by ECMA-402.
|
||||
extra = filtered_units - supported_units
|
||||
if extra:
|
||||
raise RuntimeError("Unnecessary units: {}".format(units_to_string(extra)))
|
||||
raise RuntimeError(f"Unnecessary units: {units_to_string(extra)}")
|
||||
|
||||
writeSanctionedSimpleUnitIdentifiersFiles(all_units, sanctioned_units)
|
||||
|
||||
@@ -3957,7 +3894,7 @@ def readICUNumberingSystemsResourceFile(filepath):
|
||||
table[entry_key] = entry_value
|
||||
continue
|
||||
|
||||
raise Exception("unexpected line: '{}' in {}".format(line, filepath))
|
||||
raise Exception(f"unexpected line: '{line}' in {filepath}")
|
||||
|
||||
assert len(parents) == 0, "Not all tables closed"
|
||||
assert len(table) == 1, "More than one root table"
|
||||
@@ -4012,9 +3949,7 @@ def writeNumberingSystemFiles(numbering_systems):
|
||||
println("#define NUMBERING_SYSTEMS_WITH_SIMPLE_DIGIT_MAPPINGS \\")
|
||||
println(
|
||||
"{}".format(
|
||||
", \\\n".join(
|
||||
' "{}"'.format(name) for name in simple_numbering_systems
|
||||
)
|
||||
", \\\n".join(f' "{name}"' for name in simple_numbering_systems)
|
||||
)
|
||||
)
|
||||
println("// clang-format on")
|
||||
@@ -4033,13 +3968,11 @@ def writeNumberingSystemFiles(numbering_systems):
|
||||
println(generatedFileWarning)
|
||||
|
||||
println(
|
||||
"""
|
||||
// source: CLDR file common/bcp47/number.xml; version CLDR {}.
|
||||
f"""
|
||||
// source: CLDR file common/bcp47/number.xml; version CLDR {readCLDRVersionFromICU()}.
|
||||
// https://github.com/unicode-org/cldr/blob/master/common/bcp47/number.xml
|
||||
// https://github.com/unicode-org/cldr/blob/master/common/supplemental/numberingSystems.xml
|
||||
""".format(
|
||||
readCLDRVersionFromICU()
|
||||
).rstrip()
|
||||
""".rstrip()
|
||||
)
|
||||
|
||||
numbering_systems_object = json.dumps(
|
||||
@@ -4049,7 +3982,7 @@ def writeNumberingSystemFiles(numbering_systems):
|
||||
sort_keys=True,
|
||||
ensure_ascii=False,
|
||||
)
|
||||
println("const numberingSystems = {};".format(numbering_systems_object))
|
||||
println(f"const numberingSystems = {numbering_systems_object};")
|
||||
|
||||
|
||||
def updateNumberingSystems(topsrcdir, args):
|
||||
@@ -4059,7 +3992,6 @@ def updateNumberingSystems(topsrcdir, args):
|
||||
|
||||
with open(
|
||||
os.path.join(js_src_builtin_intl_dir, "NumberingSystems.yaml"),
|
||||
mode="r",
|
||||
encoding="utf-8",
|
||||
) as f:
|
||||
numbering_systems = yaml.safe_load(f)
|
||||
@@ -4078,14 +4010,14 @@ def updateNumberingSystems(topsrcdir, args):
|
||||
# something is broken in ICU.
|
||||
assert all_numbering_systems_simple_digits.issuperset(
|
||||
numbering_systems
|
||||
), "{}".format(numbering_systems.difference(all_numbering_systems_simple_digits))
|
||||
), f"{numbering_systems.difference(all_numbering_systems_simple_digits)}"
|
||||
|
||||
# Assert the spec requires support for all numbering systems with simple digit mappings. If
|
||||
# this assertion fails, file a PR at <https://github.com/tc39/ecma402> to include any new
|
||||
# numbering systems.
|
||||
assert all_numbering_systems_simple_digits.issubset(numbering_systems), "{}".format(
|
||||
all_numbering_systems_simple_digits.difference(numbering_systems)
|
||||
)
|
||||
assert all_numbering_systems_simple_digits.issubset(
|
||||
numbering_systems
|
||||
), f"{all_numbering_systems_simple_digits.difference(numbering_systems)}"
|
||||
|
||||
writeNumberingSystemFiles(all_numbering_systems)
|
||||
|
||||
|
||||
@@ -183,7 +183,7 @@ MAKE = env.get("MAKE", "make")
|
||||
PYTHON = sys.executable
|
||||
|
||||
for d in DIR._fields:
|
||||
info("DIR.{name} = {dir}".format(name=d, dir=getattr(DIR, d)))
|
||||
info(f"DIR.{d} = {getattr(DIR, d)}")
|
||||
|
||||
|
||||
def ensure_dir_exists(
|
||||
@@ -428,17 +428,17 @@ if use_minidump:
|
||||
if injector_lib is None:
|
||||
use_minidump = False
|
||||
|
||||
info("use_minidump is {}".format(use_minidump))
|
||||
info(f"use_minidump is {use_minidump}")
|
||||
info(" MINIDUMP_SAVE_PATH={}".format(env["MINIDUMP_SAVE_PATH"]))
|
||||
info(" injector lib is {}".format(injector_lib))
|
||||
info(f" injector lib is {injector_lib}")
|
||||
info(" MINIDUMP_STACKWALK={}".format(env.get("MINIDUMP_STACKWALK")))
|
||||
|
||||
|
||||
mozconfig = os.path.join(DIR.source, "mozconfig.autospider")
|
||||
CONFIGURE_ARGS += " --prefix={OBJDIR}/dist".format(OBJDIR=quote(OBJDIR))
|
||||
CONFIGURE_ARGS += f" --prefix={quote(OBJDIR)}/dist"
|
||||
|
||||
# Generate a mozconfig.
|
||||
with open(mozconfig, "wt") as fh:
|
||||
with open(mozconfig, "w") as fh:
|
||||
if AUTOMATION and platform.system() == "Windows":
|
||||
fh.write('. "$topsrcdir/build/mozconfig.clang-cl"\n')
|
||||
fh.write("ac_add_options --enable-project=js\n")
|
||||
@@ -497,7 +497,7 @@ def run_jsapitests(args):
|
||||
" ".join(["jsapi-tests"] + args)
|
||||
)
|
||||
)
|
||||
print("Return code: {}".format(st))
|
||||
print(f"Return code: {st}")
|
||||
return st
|
||||
|
||||
|
||||
@@ -549,11 +549,11 @@ jstest_workers = worker_max
|
||||
jittest_workers = worker_max
|
||||
if platform.system() == "Windows":
|
||||
jstest_workers = min(worker_max, 16)
|
||||
env["JSTESTS_EXTRA_ARGS"] = "-j{} ".format(jstest_workers) + env.get(
|
||||
env["JSTESTS_EXTRA_ARGS"] = f"-j{jstest_workers} " + env.get(
|
||||
"JSTESTS_EXTRA_ARGS", ""
|
||||
)
|
||||
jittest_workers = min(worker_max, 8)
|
||||
env["JITTEST_EXTRA_ARGS"] = "-j{} ".format(jittest_workers) + env.get(
|
||||
env["JITTEST_EXTRA_ARGS"] = f"-j{jittest_workers} " + env.get(
|
||||
"JITTEST_EXTRA_ARGS", ""
|
||||
)
|
||||
print(
|
||||
|
||||
@@ -185,7 +185,7 @@ if __name__ == "__main__":
|
||||
|
||||
if OPTIONS.baseline_path:
|
||||
baseline_map = []
|
||||
fh = open(OPTIONS.baseline_path, "r")
|
||||
fh = open(OPTIONS.baseline_path)
|
||||
baseline_map = json.load(fh)
|
||||
fh.close()
|
||||
compare(current=bench_map, baseline=baseline_map)
|
||||
|
||||
@@ -262,7 +262,7 @@ def run_job(name, config):
|
||||
raise
|
||||
|
||||
if final_status != 0:
|
||||
raise Exception("job {} returned status {}".format(name, final_status))
|
||||
raise Exception(f"job {name} returned status {final_status}")
|
||||
|
||||
|
||||
def spawn_command(cmdspec, job, name, config):
|
||||
|
||||
@@ -325,7 +325,7 @@ def gather_hazard_data(command_context, **kwargs):
|
||||
|
||||
work_dir = get_work_dir(command_context, project, kwargs["work_dir"])
|
||||
ensure_dir_exists(work_dir)
|
||||
with open(os.path.join(work_dir, "defaults.py"), "wt") as fh:
|
||||
with open(os.path.join(work_dir, "defaults.py"), "w") as fh:
|
||||
data = textwrap.dedent(
|
||||
"""\
|
||||
analysis_scriptdir = "{script_dir}"
|
||||
@@ -545,7 +545,7 @@ def annotated_source(filename, query):
|
||||
line0 = int(line0)
|
||||
line1 = int(line1)
|
||||
|
||||
fh = open(filename, "rt")
|
||||
fh = open(filename)
|
||||
|
||||
out = "<pre>"
|
||||
for lineno, line in enumerate(fh, 1):
|
||||
|
||||
@@ -131,7 +131,7 @@ for path in tests:
|
||||
os.chdir(outdir)
|
||||
for xdb in glob("*.xdb"):
|
||||
os.unlink(xdb)
|
||||
print("START TEST {}".format(name), flush=True)
|
||||
print(f"START TEST {name}", flush=True)
|
||||
testpath = os.path.join(indir, "test.py")
|
||||
testscript = open(testpath).read()
|
||||
testcode = compile(testscript, testpath, "exec")
|
||||
|
||||
@@ -24,7 +24,7 @@ assert callgraph.calleeGraph[f][h]
|
||||
assert callgraph.calleeGraph[g][f]
|
||||
assert callgraph.calleeGraph[g][h]
|
||||
|
||||
node = ["void n{}(int32)".format(i) for i in range(10)]
|
||||
node = [f"void n{i}(int32)" for i in range(10)]
|
||||
mnode = [callgraph.unmangledToMangled.get(f) for f in node]
|
||||
for src, dst in [
|
||||
(1, 2),
|
||||
@@ -51,4 +51,4 @@ rroots = set(
|
||||
)
|
||||
assert len(set([node[1], node[2]]) & rroots) == 1
|
||||
assert len(set([node[4], node[5]]) & rroots) == 1
|
||||
assert len(rroots) == 4, "rroots = {}".format(rroots) # n1, n4, f, self_recursive
|
||||
assert len(rroots) == 4, f"rroots = {rroots}" # n1, n4, f, self_recursive
|
||||
|
||||
@@ -38,7 +38,7 @@ def extract_unmangled(func):
|
||||
return func.split("$")[-1]
|
||||
|
||||
|
||||
class Test(object):
|
||||
class Test:
|
||||
def __init__(self, indir, outdir, cfg, verbose=0):
|
||||
self.indir = indir
|
||||
self.outdir = outdir
|
||||
@@ -56,12 +56,7 @@ class Test(object):
|
||||
env["CCACHE_DISABLE"] = "1"
|
||||
if "-fexceptions" not in options and "-fno-exceptions" not in options:
|
||||
options += " -fno-exceptions"
|
||||
cmd = "{CXX} -c {source} -O3 -std=c++17 -fplugin={sixgill} -fplugin-arg-xgill-mangle=1 {options}".format( # NOQA: E501
|
||||
source=self.infile(source),
|
||||
CXX=self.cfg.cxx,
|
||||
sixgill=self.cfg.sixgill_plugin,
|
||||
options=options,
|
||||
)
|
||||
cmd = f"{self.cfg.cxx} -c {self.infile(source)} -O3 -std=c++17 -fplugin={self.cfg.sixgill_plugin} -fplugin-arg-xgill-mangle=1 {options}"
|
||||
if self.cfg.verbose > 0:
|
||||
print("Running %s" % cmd)
|
||||
subprocess.check_call(["sh", "-c", cmd])
|
||||
@@ -89,12 +84,10 @@ class Test(object):
|
||||
|
||||
def run_analysis_script(self, startPhase="gcTypes", upto=None):
|
||||
open("defaults.py", "w").write(
|
||||
"""\
|
||||
f"""\
|
||||
analysis_scriptdir = '{scriptdir}'
|
||||
sixgill_bin = '{bindir}'
|
||||
""".format(
|
||||
scriptdir=scriptdir, bindir=self.cfg.sixgill_bin
|
||||
)
|
||||
sixgill_bin = '{self.cfg.sixgill_bin}'
|
||||
"""
|
||||
)
|
||||
cmd = [
|
||||
sys.executable,
|
||||
@@ -118,7 +111,7 @@ sixgill_bin = '{bindir}'
|
||||
|
||||
def load_text_file(self, filename, extract=lambda l: l):
|
||||
fullpath = os.path.join(self.outdir, filename)
|
||||
values = (extract(line.strip()) for line in open(fullpath, "r"))
|
||||
values = (extract(line.strip()) for line in open(fullpath))
|
||||
return list(filter(lambda _: _ is not None, values))
|
||||
|
||||
def load_json_file(self, filename, reviver=None):
|
||||
|
||||
@@ -13,7 +13,7 @@ def read_reserved_word_list(
|
||||
|
||||
reserved_word_list = []
|
||||
index = 0
|
||||
with open(filename, "r") as f:
|
||||
with open(filename) as f:
|
||||
for line in f:
|
||||
m = macro_pat.search(line)
|
||||
if m:
|
||||
@@ -107,22 +107,22 @@ def generate_letter_switch(opt, unprocessed_columns, reserved_word_list, columns
|
||||
index, word = reserved_word_list[0]
|
||||
|
||||
if unprocessed_columns == 0:
|
||||
line(opt, "JSRW_GOT_MATCH({}) /* {} */".format(index, word))
|
||||
line(opt, f"JSRW_GOT_MATCH({index}) /* {word} */")
|
||||
return
|
||||
|
||||
if unprocessed_columns > opt["char_tail_test_threshold"]:
|
||||
line(opt, "JSRW_TEST_GUESS({}) /* {} */".format(index, word))
|
||||
line(opt, f"JSRW_TEST_GUESS({index}) /* {word} */")
|
||||
return
|
||||
|
||||
conds = []
|
||||
for column in columns[0:unprocessed_columns]:
|
||||
quoted = repr(word[column])
|
||||
conds.append("JSRW_AT({})=={}".format(column, quoted))
|
||||
conds.append(f"JSRW_AT({column})=={quoted}")
|
||||
|
||||
line(opt, "if ({}) {{".format(" && ".join(conds)))
|
||||
|
||||
indent(opt)
|
||||
line(opt, "JSRW_GOT_MATCH({}) /* {} */".format(index, word))
|
||||
line(opt, f"JSRW_GOT_MATCH({index}) /* {word} */")
|
||||
dedent(opt)
|
||||
|
||||
line(opt, "}")
|
||||
@@ -143,14 +143,14 @@ def generate_letter_switch(opt, unprocessed_columns, reserved_word_list, columns
|
||||
list_per_column = split_list_per_column(reserved_word_list, optimal_column)
|
||||
|
||||
if not use_if:
|
||||
line(opt, "switch (JSRW_AT({})) {{".format(optimal_column))
|
||||
line(opt, f"switch (JSRW_AT({optimal_column})) {{")
|
||||
|
||||
for char, reserved_word_list_per_column in list_per_column:
|
||||
quoted = repr(char)
|
||||
if use_if:
|
||||
line(opt, "if (JSRW_AT({}) == {}) {{".format(optimal_column, quoted))
|
||||
line(opt, f"if (JSRW_AT({optimal_column}) == {quoted}) {{")
|
||||
else:
|
||||
line(opt, " case {}:".format(quoted))
|
||||
line(opt, f" case {quoted}:")
|
||||
|
||||
indent(opt)
|
||||
generate_letter_switch(
|
||||
@@ -185,12 +185,10 @@ def generate_switch(opt, reserved_word_list):
|
||||
line(opt, "/*")
|
||||
line(
|
||||
opt,
|
||||
" * Generating switch for the list of {} entries:".format(
|
||||
len(reserved_word_list)
|
||||
),
|
||||
f" * Generating switch for the list of {len(reserved_word_list)} entries:",
|
||||
)
|
||||
for index, word in reserved_word_list:
|
||||
line(opt, " * {}".format(word))
|
||||
line(opt, f" * {word}")
|
||||
line(opt, " */")
|
||||
|
||||
list_per_length = split_list_per_length(reserved_word_list)
|
||||
@@ -204,9 +202,9 @@ def generate_switch(opt, reserved_word_list):
|
||||
|
||||
for length, reserved_word_list_per_length in list_per_length:
|
||||
if use_if:
|
||||
line(opt, "if (JSRW_LENGTH() == {}) {{".format(length))
|
||||
line(opt, f"if (JSRW_LENGTH() == {length}) {{")
|
||||
else:
|
||||
line(opt, " case {}:".format(length))
|
||||
line(opt, f" case {length}:")
|
||||
|
||||
indent(opt)
|
||||
generate_letter_switch(opt, length, reserved_word_list_per_length)
|
||||
|
||||
@@ -49,18 +49,14 @@ def align_stack_comment(path):
|
||||
|
||||
if head_len > ALIGNMENT_COLUMN:
|
||||
print(
|
||||
"Warning: line {} overflows from alignment column {}: {}".format(
|
||||
line_num, ALIGNMENT_COLUMN, head_len
|
||||
),
|
||||
f"Warning: line {line_num} overflows from alignment column {ALIGNMENT_COLUMN}: {head_len}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
|
||||
line_len = max(head_len, ALIGNMENT_COLUMN) + comment_len
|
||||
if line_len > MAX_CHARS_PER_LINE:
|
||||
print(
|
||||
"Warning: line {} overflows from {} chars: {}".format(
|
||||
line_num, MAX_CHARS_PER_LINE, line_len
|
||||
),
|
||||
f"Warning: line {line_num} overflows from {MAX_CHARS_PER_LINE} chars: {line_len}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
|
||||
@@ -78,15 +74,15 @@ def align_stack_comment(path):
|
||||
lines.append(line)
|
||||
|
||||
print(
|
||||
"Info: Minimum column number for [stack]: {}".format(max_head_len),
|
||||
f"Info: Minimum column number for [stack]: {max_head_len}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
print(
|
||||
"Info: Alignment column number for [stack]: {}".format(ALIGNMENT_COLUMN),
|
||||
f"Info: Alignment column number for [stack]: {ALIGNMENT_COLUMN}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
print(
|
||||
"Info: Max length of stack transition comments: {}".format(max_comment_len),
|
||||
f"Info: Max length of stack transition comments: {max_comment_len}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@ from mozilla.prettyprinters import pretty_printer, ptr_pretty_printer
|
||||
mozilla.prettyprinters.clear_module_printers(__name__)
|
||||
|
||||
|
||||
class jsjitExecutableAllocatorCache(object):
|
||||
class jsjitExecutableAllocatorCache:
|
||||
"""Cache information about the ExecutableAllocator type for this objfile."""
|
||||
|
||||
def __init__(self):
|
||||
@@ -36,7 +36,7 @@ class jsjitExecutableAllocatorCache(object):
|
||||
|
||||
|
||||
@pretty_printer("js::jit::ExecutableAllocator")
|
||||
class jsjitExecutableAllocator(object):
|
||||
class jsjitExecutableAllocator:
|
||||
def __init__(self, value, cache):
|
||||
if not cache.mod_ExecutableAllocator:
|
||||
cache.mod_ExecutableAllocator = jsjitExecutableAllocatorCache()
|
||||
@@ -49,7 +49,7 @@ class jsjitExecutableAllocator(object):
|
||||
def __iter__(self):
|
||||
return self.PoolIterator(self)
|
||||
|
||||
class PoolIterator(object):
|
||||
class PoolIterator:
|
||||
def __init__(self, allocator):
|
||||
self.allocator = allocator
|
||||
self.entryType = allocator.cache.ExecutablePool.pointer()
|
||||
|
||||
@@ -15,7 +15,7 @@ mozilla.prettyprinters.clear_module_printers(__name__)
|
||||
# Cache information about the types for this objfile.
|
||||
|
||||
|
||||
class GCCellPtrTypeCache(object):
|
||||
class GCCellPtrTypeCache:
|
||||
def __init__(self, cache):
|
||||
self.TraceKind_t = gdb.lookup_type("JS::TraceKind")
|
||||
self.AllocKind_t = gdb.lookup_type("js::gc::AllocKind")
|
||||
@@ -76,7 +76,7 @@ class GCCellPtrTypeCache(object):
|
||||
|
||||
|
||||
@pretty_printer("JS::GCCellPtr")
|
||||
class GCCellPtr(object):
|
||||
class GCCellPtr:
|
||||
def __init__(self, value, cache):
|
||||
self.value = value
|
||||
if not cache.mod_GCCellPtr:
|
||||
@@ -120,6 +120,4 @@ class GCCellPtr(object):
|
||||
# Map the AllocKind to a TraceKind.
|
||||
kind = self.cache.mod_GCCellPtr.alloc_kind_to_trace_kind[alloc_idx]
|
||||
type_name = self.cache.mod_GCCellPtr.trace_kind_to_type[int(kind)]
|
||||
return "JS::GCCellPtr(({}*) {})".format(
|
||||
type_name, ptr.cast(self.cache.void_ptr_t)
|
||||
)
|
||||
return f"JS::GCCellPtr(({type_name}*) {ptr.cast(self.cache.void_ptr_t)})"
|
||||
|
||||
@@ -13,7 +13,7 @@ prettyprinters.clear_module_printers(__name__)
|
||||
from mozilla.prettyprinters import pretty_printer
|
||||
|
||||
|
||||
class InterpreterTypeCache(object):
|
||||
class InterpreterTypeCache:
|
||||
# Cache information about the Interpreter types for this objfile.
|
||||
def __init__(self):
|
||||
self.tValue = gdb.lookup_type("JS::Value")
|
||||
@@ -32,7 +32,7 @@ class InterpreterTypeCache(object):
|
||||
|
||||
|
||||
@pretty_printer("js::InterpreterRegs")
|
||||
class InterpreterRegs(object):
|
||||
class InterpreterRegs:
|
||||
def __init__(self, value, cache):
|
||||
self.value = value
|
||||
self.cache = cache
|
||||
@@ -48,11 +48,11 @@ class InterpreterRegs(object):
|
||||
slots = (self.value["fp_"] + 1).cast(self.itc.tValue.pointer())
|
||||
sp = "sp = fp_.slots() + {}".format(self.value["sp"] - slots)
|
||||
pc = "pc = {}".format(self.value["pc"])
|
||||
return "{{ {}, {}, {} }}".format(fp_, sp, pc)
|
||||
return f"{{ {fp_}, {sp}, {pc} }}"
|
||||
|
||||
|
||||
@pretty_printer("js::AbstractFramePtr")
|
||||
class AbstractFramePtr(object):
|
||||
class AbstractFramePtr:
|
||||
Tag_InterpreterFrame = 0x0
|
||||
Tag_BaselineFrame = 0x1
|
||||
Tag_RematerializedFrame = 0x2
|
||||
@@ -82,7 +82,7 @@ class AbstractFramePtr(object):
|
||||
if tag == AbstractFramePtr.Tag_WasmDebugFrame:
|
||||
label = "js::wasm::DebugFrame"
|
||||
ptr = ptr.cast(self.itc.tDebugFrame.pointer())
|
||||
return "AbstractFramePtr (({} *) {})".format(label, ptr)
|
||||
return f"AbstractFramePtr (({label} *) {ptr})"
|
||||
|
||||
# Provide the ptr_ field as a child, so it prints after the pretty string
|
||||
# provided above.
|
||||
|
||||
@@ -27,7 +27,7 @@ mozilla.prettyprinters.clear_module_printers(__name__)
|
||||
# Cache information about the JSString type for this objfile.
|
||||
|
||||
|
||||
class jsvmPrinterCache(object):
|
||||
class jsvmPrinterCache:
|
||||
def __init__(self):
|
||||
self.d = None
|
||||
|
||||
@@ -45,13 +45,13 @@ class jsvmPrinterCache(object):
|
||||
# iongraph command, which uses the jsvmLSprinter.
|
||||
|
||||
|
||||
class ModuleCache(object):
|
||||
class ModuleCache:
|
||||
def __init__(self):
|
||||
self.mod_IonGraph = None
|
||||
|
||||
|
||||
@pretty_printer("js::vm::LSprinter")
|
||||
class jsvmLSprinter(object):
|
||||
class jsvmLSprinter:
|
||||
def __init__(self, value, cache):
|
||||
self.value = value
|
||||
if not cache.mod_IonGraph:
|
||||
|
||||
@@ -16,7 +16,7 @@ from mozilla.prettyprinters import ptr_pretty_printer, ref_pretty_printer
|
||||
prettyprinters.clear_module_printers(__name__)
|
||||
|
||||
|
||||
class JSObjectTypeCache(object):
|
||||
class JSObjectTypeCache:
|
||||
def __init__(self):
|
||||
object_flag = gdb.lookup_type("js::ObjectFlag")
|
||||
self.objectflag_IsUsedAsPrototype = prettyprinters.enum_value(
|
||||
@@ -65,7 +65,7 @@ class JSObjectPtrOrRef(prettyprinters.Pointer):
|
||||
class_name = m.group(1)
|
||||
|
||||
if non_native:
|
||||
return "[object {}]".format(class_name)
|
||||
return f"[object {class_name}]"
|
||||
else:
|
||||
flags = shape["objectFlags_"]["flags_"]
|
||||
used_as_prototype = bool(flags & self.otc.objectflag_IsUsedAsPrototype)
|
||||
|
||||
@@ -19,7 +19,7 @@ except ValueError: # yuck, we are in Python 2.x, so chr() is 8-bit
|
||||
mozilla.prettyprinters.clear_module_printers(__name__)
|
||||
|
||||
|
||||
class JSStringTypeCache(object):
|
||||
class JSStringTypeCache:
|
||||
# Cache information about the JSString type for this objfile.
|
||||
def __init__(self, cache):
|
||||
dummy = gdb.Value(0).cast(cache.JSString_ptr_t)
|
||||
|
||||
@@ -27,11 +27,11 @@ class JSSymbolPtr(mozilla.prettyprinters.Pointer):
|
||||
code = int(self.value["code_"]) & 0xFFFFFFFF
|
||||
desc = str(get_header_ptr(self.value, self.cache.JSString_ptr_t))
|
||||
if code == InSymbolRegistry:
|
||||
return "Symbol.for({})".format(desc)
|
||||
return f"Symbol.for({desc})"
|
||||
elif code == UniqueSymbol:
|
||||
return "Symbol({})".format(desc)
|
||||
return f"Symbol({desc})"
|
||||
elif code == PrivateNameSymbol:
|
||||
return "#{}".format(desc)
|
||||
return f"#{desc}"
|
||||
else:
|
||||
# Well-known symbol. Strip off the quotes added by the JSString *
|
||||
# pretty-printer.
|
||||
|
||||
@@ -13,7 +13,7 @@ mozilla.prettyprinters.clear_module_printers(__name__)
|
||||
|
||||
|
||||
@pretty_printer("JS::PropertyKey")
|
||||
class PropertyKey(object):
|
||||
class PropertyKey:
|
||||
# Since people don't always build with macro debugging info, I can't
|
||||
# think of any way to avoid copying these values here, short of using
|
||||
# inferior calls for every operation (which, I hear, is broken from
|
||||
|
||||
@@ -12,7 +12,7 @@ from mozilla.prettyprinters import template_pretty_printer
|
||||
mozilla.prettyprinters.clear_module_printers(__name__)
|
||||
|
||||
|
||||
class Common(object):
|
||||
class Common:
|
||||
# Common base class for all the rooting template pretty-printers. All these
|
||||
# templates have one member holding the referent (or a pointer to it), so
|
||||
# there's not much to it.
|
||||
|
||||
@@ -14,7 +14,7 @@ from mozilla.prettyprinters import pretty_printer, ptr_pretty_printer
|
||||
mozilla.prettyprinters.clear_module_printers(__name__)
|
||||
|
||||
|
||||
class JSOpTypeCache(object):
|
||||
class JSOpTypeCache:
|
||||
# Cache information about the JSOp type for this objfile.
|
||||
def __init__(self, cache):
|
||||
self.tJSOp = gdb.lookup_type("JSOp")
|
||||
@@ -27,7 +27,7 @@ class JSOpTypeCache(object):
|
||||
|
||||
|
||||
@pretty_printer("JSOp")
|
||||
class JSOp(object):
|
||||
class JSOp:
|
||||
def __init__(self, value, cache):
|
||||
self.value = value
|
||||
self.cache = cache
|
||||
@@ -43,7 +43,7 @@ class JSOp(object):
|
||||
fields = self.jotc.tJSOp.fields()
|
||||
if idx < len(fields):
|
||||
return fields[idx].name
|
||||
return "(JSOp) {:d}".format(idx)
|
||||
return f"(JSOp) {idx:d}"
|
||||
|
||||
|
||||
@ptr_pretty_printer("jsbytecode")
|
||||
@@ -57,4 +57,4 @@ class JSBytecodePtr(mozilla.prettyprinters.Pointer):
|
||||
opcode = str(self.value.dereference().cast(self.jotc.tJSOp))
|
||||
except Exception:
|
||||
opcode = "bad pc"
|
||||
return "{} ({})".format(self.value.cast(self.cache.void_ptr_t), opcode)
|
||||
return f"{self.value.cast(self.cache.void_ptr_t)} ({opcode})"
|
||||
|
||||
@@ -19,7 +19,7 @@ mozilla.prettyprinters.clear_module_printers(__name__)
|
||||
# formats handled below.
|
||||
|
||||
|
||||
class Box(object):
|
||||
class Box:
|
||||
def __init__(self, asBits, jtc):
|
||||
self.asBits = asBits
|
||||
self.jtc = jtc
|
||||
@@ -90,7 +90,7 @@ class Nunbox(Box):
|
||||
return gdb.Value(self.asBits & Nunbox.PAYLOAD_MASK)
|
||||
|
||||
|
||||
class JSValueTypeCache(object):
|
||||
class JSValueTypeCache:
|
||||
# Cache information about the Value type for this objfile.
|
||||
|
||||
def __init__(self, cache):
|
||||
@@ -128,7 +128,7 @@ class JSValueTypeCache(object):
|
||||
|
||||
|
||||
@pretty_printer("JS::Value")
|
||||
class JSValue(object):
|
||||
class JSValue:
|
||||
def __init__(self, value, cache):
|
||||
# Save the generic typecache, and create our own, if we haven't already.
|
||||
self.cache = cache
|
||||
|
||||
@@ -204,7 +204,7 @@ class NotSpiderMonkeyObjfileError(TypeError):
|
||||
# metadata in the TypeCache's mod_JSString attribute.
|
||||
|
||||
|
||||
class TypeCache(object):
|
||||
class TypeCache:
|
||||
def __init__(self, objfile):
|
||||
self.objfile = objfile
|
||||
|
||||
@@ -386,7 +386,7 @@ def lookup_for_objfile(objfile):
|
||||
# including the type name and address, as string contents.
|
||||
|
||||
|
||||
class Pointer(object):
|
||||
class Pointer:
|
||||
def __new__(cls, value, cache):
|
||||
# Don't try to provide pretty-printers for NULL pointers.
|
||||
if value.type.strip_typedefs().code == gdb.TYPE_CODE_PTR and value == 0:
|
||||
|
||||
@@ -59,7 +59,7 @@ SizeOfFramePrefix = {
|
||||
|
||||
# We cannot have semi-colon as identifier names, so use a colon instead,
|
||||
# and forward the name resolution to the type cache class.
|
||||
class UnwinderTypeCacheFrameType(object):
|
||||
class UnwinderTypeCacheFrameType:
|
||||
def __init__(self, tc):
|
||||
self.tc = tc
|
||||
|
||||
@@ -144,7 +144,7 @@ class UnwinderTypeCache(TypeCache):
|
||||
self.frame_class_types[enumval] = class_type.pointer()
|
||||
|
||||
|
||||
class FrameSymbol(object):
|
||||
class FrameSymbol:
|
||||
"A symbol/value pair as expected from gdb frame decorators."
|
||||
|
||||
def __init__(self, sym, val):
|
||||
@@ -253,7 +253,7 @@ class JitFrameDecorator(FrameDecorator):
|
||||
return result
|
||||
|
||||
|
||||
class SpiderMonkeyFrameFilter(object):
|
||||
class SpiderMonkeyFrameFilter:
|
||||
"A frame filter for SpiderMonkey."
|
||||
|
||||
# |state_holder| is either None, or an instance of
|
||||
@@ -280,7 +280,7 @@ class SpiderMonkeyFrameFilter(object):
|
||||
return imap(self.maybe_wrap_frame, frame_iter)
|
||||
|
||||
|
||||
class SpiderMonkeyFrameId(object):
|
||||
class SpiderMonkeyFrameId:
|
||||
"A frame id class, as specified by the gdb unwinder API."
|
||||
|
||||
def __init__(self, sp, pc):
|
||||
@@ -288,7 +288,7 @@ class SpiderMonkeyFrameId(object):
|
||||
self.pc = pc
|
||||
|
||||
|
||||
class UnwinderState(object):
|
||||
class UnwinderState:
|
||||
"""This holds all the state needed during a given unwind. Each time a
|
||||
new unwind is done, a new instance of this class is created. It
|
||||
keeps track of all the state needed to unwind JIT frames. Note that
|
||||
|
||||
@@ -9,7 +9,7 @@ import sys
|
||||
import time
|
||||
|
||||
|
||||
class ProgressBar(object):
|
||||
class ProgressBar:
|
||||
def __init__(self, label, limit, label_width=12):
|
||||
self.label = label
|
||||
self.limit = limit
|
||||
|
||||
@@ -58,7 +58,7 @@ def make_shell_cmd(l):
|
||||
|
||||
# An instance of this class collects the lists of passing, failing, and
|
||||
# timing-out tests, runs the progress bar, and prints a summary at the end.
|
||||
class Summary(object):
|
||||
class Summary:
|
||||
class SummaryBar(progressbar.ProgressBar):
|
||||
def __init__(self, limit):
|
||||
super(Summary.SummaryBar, self).__init__("", limit, 24)
|
||||
|
||||
@@ -11,7 +11,7 @@ import time
|
||||
from subprocess import PIPE, Popen
|
||||
|
||||
|
||||
class TaskPool(object):
|
||||
class TaskPool:
|
||||
# Run a series of subprocesses. Try to keep up to a certain number going in
|
||||
# parallel at any given time. Enforce time limits.
|
||||
#
|
||||
@@ -25,7 +25,7 @@ class TaskPool(object):
|
||||
|
||||
# A task we should run in a subprocess. Users should subclass this and
|
||||
# fill in the methods as given.
|
||||
class Task(object):
|
||||
class Task:
|
||||
def __init__(self):
|
||||
self.pipe = None
|
||||
self.start_time = None
|
||||
@@ -78,7 +78,7 @@ class TaskPool(object):
|
||||
def run_all(self):
|
||||
# The currently running tasks: a set of Task instances.
|
||||
running = set()
|
||||
with open(os.devnull, "r") as devnull:
|
||||
with open(os.devnull) as devnull:
|
||||
while True:
|
||||
while len(running) < self.job_limit and self.next_pending:
|
||||
task = self.next_pending
|
||||
|
||||
@@ -5,7 +5,7 @@ import mozilla.prettyprinters
|
||||
|
||||
|
||||
@mozilla.prettyprinters.pretty_printer("unscoped_no_storage")
|
||||
class UnscopedNoStoragePrinter(object):
|
||||
class UnscopedNoStoragePrinter:
|
||||
def __init__(self, value, cache):
|
||||
pass
|
||||
|
||||
@@ -14,7 +14,7 @@ class UnscopedNoStoragePrinter(object):
|
||||
|
||||
|
||||
@mozilla.prettyprinters.pretty_printer("unscoped_with_storage")
|
||||
class UnscopedWithStoragePrinter(object):
|
||||
class UnscopedWithStoragePrinter:
|
||||
def __init__(self, value, cache):
|
||||
pass
|
||||
|
||||
@@ -23,7 +23,7 @@ class UnscopedWithStoragePrinter(object):
|
||||
|
||||
|
||||
@mozilla.prettyprinters.pretty_printer("scoped_no_storage")
|
||||
class ScopedNoStoragePrinter(object):
|
||||
class ScopedNoStoragePrinter:
|
||||
def __init__(self, value, cache):
|
||||
pass
|
||||
|
||||
@@ -32,7 +32,7 @@ class ScopedNoStoragePrinter(object):
|
||||
|
||||
|
||||
@mozilla.prettyprinters.pretty_printer("scoped_with_storage")
|
||||
class ScopedWithStoragePrinter(object):
|
||||
class ScopedWithStoragePrinter:
|
||||
def __init__(self, value, cache):
|
||||
pass
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ import mozilla.prettyprinters
|
||||
|
||||
|
||||
@mozilla.prettyprinters.pretty_printer("my_typedef")
|
||||
class my_typedef(object):
|
||||
class my_typedef:
|
||||
def __init__(self, value, cache):
|
||||
pass
|
||||
|
||||
|
||||
@@ -48,7 +48,7 @@ def copy_and_update_includes(src_path, dst_path):
|
||||
"special-case.h",
|
||||
]
|
||||
|
||||
src = open(str(src_path), "r")
|
||||
src = open(str(src_path))
|
||||
dst = open(str(dst_path), "w")
|
||||
|
||||
# 1. Rewrite includes of V8 regexp headers:
|
||||
|
||||
@@ -61,16 +61,16 @@ def choose_item(jobs, max_items):
|
||||
flags = ""
|
||||
if len(job.jitflags) != 0:
|
||||
flags = "({})".format(" ".join(job.jitflags))
|
||||
return "{} {}".format(job.path, flags)
|
||||
return f"{job.path} {flags}"
|
||||
|
||||
for i, job in enumerate(jobs, 1):
|
||||
print("{}) {}".format(i, display_job(job)))
|
||||
print(f"{i}) {display_job(job)}")
|
||||
|
||||
item = read_input("Which one:\n")
|
||||
try:
|
||||
item = int(item)
|
||||
if item > job_count or item < 1:
|
||||
raise Exception("Input isn't between 1 and {}".format(job_count))
|
||||
raise Exception(f"Input isn't between 1 and {job_count}")
|
||||
except ValueError:
|
||||
raise Exception("Unrecognized input")
|
||||
|
||||
@@ -426,7 +426,7 @@ def main(argv):
|
||||
else:
|
||||
sys.stderr.write(
|
||||
"Exception thrown trying to read test file"
|
||||
" '{}'\n".format(options.read_tests)
|
||||
f" '{options.read_tests}'\n"
|
||||
)
|
||||
traceback.print_exc()
|
||||
sys.stderr.write("---\n")
|
||||
@@ -589,7 +589,7 @@ def main(argv):
|
||||
except OSError:
|
||||
if not os.path.exists(prefix[0]):
|
||||
print(
|
||||
"JS shell argument: file does not exist:" " '{}'".format(prefix[0]),
|
||||
"JS shell argument: file does not exist:" f" '{prefix[0]}'",
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
@@ -133,17 +133,17 @@ def gen_writer_method(name, args, custom_writer):
|
||||
cpp_type, write_method = arg_writer_info[arg_type]
|
||||
if arg_name == "result":
|
||||
ret_type = cpp_type
|
||||
args_code += " {} result(newOperandId());\\\n".format(cpp_type)
|
||||
args_code += f" {cpp_type} result(newOperandId());\\\n"
|
||||
args_code += " writeOperandId(result);\\\n"
|
||||
else:
|
||||
method_args.append("{} {}".format(cpp_type, arg_name))
|
||||
args_code += " {}({});\\\n".format(write_method, arg_name)
|
||||
method_args.append(f"{cpp_type} {arg_name}")
|
||||
args_code += f" {write_method}({arg_name});\\\n"
|
||||
|
||||
code = ""
|
||||
if custom_writer:
|
||||
code += "private:\\\n"
|
||||
code += "{} {}({}) {{\\\n".format(ret_type, method_name, ", ".join(method_args))
|
||||
code += " writeOp(CacheOp::{});\\\n".format(name)
|
||||
code += f" writeOp(CacheOp::{name});\\\n"
|
||||
code += args_code
|
||||
code += " assertLengthMatches();\\\n"
|
||||
if ret_type != "void":
|
||||
@@ -236,14 +236,14 @@ def gen_compiler_method(name, args):
|
||||
cpp_type, suffix, readexpr = arg_reader_info[arg_type]
|
||||
cpp_name = arg_name + suffix
|
||||
cpp_args.append(cpp_name)
|
||||
method_args.append("{} {}".format(cpp_type, cpp_name))
|
||||
args_code += " {} {} = {};\\\n".format(cpp_type, cpp_name, readexpr)
|
||||
method_args.append(f"{cpp_type} {cpp_name}")
|
||||
args_code += f" {cpp_type} {cpp_name} = {readexpr};\\\n"
|
||||
|
||||
# Generate signature.
|
||||
code = "[[nodiscard]] bool {}({});\\\n".format(method_name, ", ".join(method_args))
|
||||
|
||||
# Generate the method forwarding to it.
|
||||
code += "[[nodiscard]] bool {}(CacheIRReader& reader) {{\\\n".format(method_name)
|
||||
code += f"[[nodiscard]] bool {method_name}(CacheIRReader& reader) {{\\\n"
|
||||
code += args_code
|
||||
code += " return {}({});\\\n".format(method_name, ", ".join(cpp_args))
|
||||
code += "}\\\n"
|
||||
@@ -327,11 +327,11 @@ def gen_spewer_method(name, args):
|
||||
spew_method = arg_spewer_method[arg_type]
|
||||
if not is_first:
|
||||
args_code += " spewArgSeparator();\\\n"
|
||||
args_code += ' {}("{}", {});\\\n'.format(spew_method, arg_name, readexpr)
|
||||
args_code += f' {spew_method}("{arg_name}", {readexpr});\\\n'
|
||||
is_first = False
|
||||
|
||||
code = "void {}(CacheIRReader& reader) {{\\\n".format(method_name)
|
||||
code += " spewOp(CacheOp::{});\\\n".format(name)
|
||||
code = f"void {method_name}(CacheIRReader& reader) {{\\\n"
|
||||
code += f" spewOp(CacheOp::{name});\\\n"
|
||||
code += args_code
|
||||
code += " spewOpEnd();\\\n"
|
||||
code += "}\\\n"
|
||||
@@ -365,7 +365,7 @@ def gen_clone_method(name, args):
|
||||
read_type, suffix, readexpr = arg_reader_info[arg_type]
|
||||
read_name = arg_name + suffix
|
||||
value_name = read_name
|
||||
args_code += " {} {} = {};\\\n".format(read_type, read_name, readexpr)
|
||||
args_code += f" {read_type} {read_name} = {readexpr};\\\n"
|
||||
|
||||
write_type, write_method = arg_writer_info[arg_type]
|
||||
if arg_name == "result":
|
||||
@@ -376,14 +376,14 @@ def gen_clone_method(name, args):
|
||||
if write_type.endswith("&"):
|
||||
write_type = write_type[:-1]
|
||||
value_name = arg_name
|
||||
args_code += " {} {} = get{}({});\\\n".format(
|
||||
write_type, value_name, arg_type, read_name
|
||||
args_code += (
|
||||
f" {write_type} {value_name} = get{arg_type}({read_name});\\\n"
|
||||
)
|
||||
args_code += " writer.{}({});\\\n".format(write_method, value_name)
|
||||
args_code += f" writer.{write_method}({value_name});\\\n"
|
||||
|
||||
code = "void {}".format(method_name)
|
||||
code = f"void {method_name}"
|
||||
code += "(CacheIRReader& reader, CacheIRWriter& writer) {{\\\n"
|
||||
code += " writer.writeOp(CacheOp::{});\\\n".format(name)
|
||||
code += f" writer.writeOp(CacheOp::{name});\\\n"
|
||||
code += args_code
|
||||
code += " writer.assertLengthMatches();\\\n"
|
||||
code += "}}\\\n"
|
||||
@@ -501,9 +501,7 @@ def generate_cacheirops_header(c_out, yaml_path):
|
||||
args_length = "0"
|
||||
|
||||
transpile_str = "true" if transpile else "false"
|
||||
ops_items.append(
|
||||
"_({}, {}, {}, {})".format(name, args_length, transpile_str, cost_estimate)
|
||||
)
|
||||
ops_items.append(f"_({name}, {args_length}, {transpile_str}, {cost_estimate})")
|
||||
|
||||
writer_methods.append(gen_writer_method(name, args, custom_writer))
|
||||
|
||||
@@ -514,7 +512,7 @@ def generate_cacheirops_header(c_out, yaml_path):
|
||||
|
||||
if transpile:
|
||||
transpiler_methods.append(gen_compiler_method(name, args))
|
||||
transpiler_ops.append("_({})".format(name))
|
||||
transpiler_ops.append(f"_({name})")
|
||||
|
||||
spewer_methods.append(gen_spewer_method(name, args))
|
||||
|
||||
|
||||
@@ -462,7 +462,7 @@ def generate_lir_header(c_out, yaml_path, mir_yaml_path):
|
||||
)
|
||||
)
|
||||
|
||||
ops.append("_({})".format(name))
|
||||
ops.append(f"_({name})")
|
||||
|
||||
# Generate LIR instructions for MIR instructions with 'generate_lir': true
|
||||
mir_data = load_yaml(mir_yaml_path)
|
||||
@@ -521,7 +521,7 @@ def generate_lir_header(c_out, yaml_path, mir_yaml_path):
|
||||
)
|
||||
)
|
||||
|
||||
ops.append("_({})".format(name))
|
||||
ops.append(f"_({name})")
|
||||
|
||||
contents = "#define LIR_OPCODE_LIST(_)\\\n"
|
||||
contents += "\\\n".join(ops)
|
||||
|
||||
@@ -67,13 +67,13 @@ def decide_type_policy(types, no_type_policy):
|
||||
return "public NoTypePolicy::Data"
|
||||
|
||||
if len(types) == 1:
|
||||
return "public {}<0>::Data".format(type_policies[types[0]])
|
||||
return f"public {type_policies[types[0]]}<0>::Data"
|
||||
|
||||
type_num = 0
|
||||
mixed_type_policies = []
|
||||
for mir_type in types:
|
||||
policy = type_policies[mir_type]
|
||||
mixed_type_policies.append("{}<{}>".format(policy, type_num))
|
||||
mixed_type_policies.append(f"{policy}<{type_num}>")
|
||||
type_num += 1
|
||||
|
||||
return "public MixPolicy<{}>::Data".format(", ".join(mixed_type_policies))
|
||||
@@ -167,7 +167,7 @@ def gen_mir_class(
|
||||
# ops type policy.
|
||||
mir_types.append(operands[oper_name])
|
||||
# Collecting named operands for defining accessors.
|
||||
named_operands.append("({}, {})".format(current_oper_num, oper_name))
|
||||
named_operands.append(f"({current_oper_num}, {oper_name})")
|
||||
current_oper_num += 1
|
||||
type_policy = decide_type_policy(mir_types, no_type_policy)
|
||||
|
||||
@@ -179,7 +179,7 @@ def gen_mir_class(
|
||||
if base_class != "MNullaryInstruction":
|
||||
assert type_policy
|
||||
type_policy = ", " + type_policy
|
||||
code = "class {} : public {}{} {{\\\n".format(class_name, base_class, type_policy)
|
||||
code = f"class {class_name} : public {base_class}{type_policy} {{\\\n"
|
||||
|
||||
# Arguments to class constructor that require accessors.
|
||||
mir_args = []
|
||||
@@ -208,13 +208,13 @@ def gen_mir_class(
|
||||
if movable:
|
||||
code += " setMovable();\\\n"
|
||||
if result:
|
||||
code += " setResultType(MIRType::{});\\\n".format(result)
|
||||
code += f" setResultType(MIRType::{result});\\\n"
|
||||
code += " }\\\n public:\\\n"
|
||||
if arguments:
|
||||
for arg_name in arguments:
|
||||
code += " " + arguments[arg_name] + " " + arg_name + "() const { "
|
||||
code += "return " + arg_name + "_; }\\\n"
|
||||
code += " INSTRUCTION_HEADER({})\\\n".format(name)
|
||||
code += f" INSTRUCTION_HEADER({name})\\\n"
|
||||
code += " TRIVIAL_NEW_WRAPPERS\\\n"
|
||||
if named_operands:
|
||||
code += " NAMED_OPERANDS({})\\\n".format(", ".join(named_operands))
|
||||
@@ -302,7 +302,7 @@ def generate_mir_header(c_out, yaml_path):
|
||||
for op in data:
|
||||
name = op["name"]
|
||||
|
||||
ops_items.append("_({})".format(name))
|
||||
ops_items.append(f"_({name})")
|
||||
|
||||
gen_boilerplate = op.get("gen_boilerplate", True)
|
||||
assert isinstance(gen_boilerplate, bool)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user