Bug 1959849 - [lint] Remove six usages via auto fix on ruff==0.0.237 via pupgrade r=linter-reviewers,webdriver-reviewers,ahal,whimboo

This was not done through `./mach lint`, `ruff` version `0.0.237` was
called directly on the root directory with a modified `pyproject.toml`
that only enabled the `UP016` rule with `--fix`.

Some manual corrections were made after the auto fix ran to resolve
incorrect behavior.

No manual removals of `six` usages was done in this revision.

Differential Revision: https://phabricator.services.mozilla.com/D245270
This commit is contained in:
Alex Hochheiden
2025-04-13 17:48:23 +00:00
parent c1e140fe43
commit 0bbbbc094d
112 changed files with 476 additions and 666 deletions

View File

@@ -25,36 +25,36 @@ def main(argv):
"Generate a file from a Python script", add_help=False
)
parser.add_argument(
"--locale", metavar="locale", type=six.text_type, help="The locale in use."
"--locale", metavar="locale", type=str, help="The locale in use."
)
parser.add_argument(
"python_script",
metavar="python-script",
type=six.text_type,
type=str,
help="The Python script to run",
)
parser.add_argument(
"method_name",
metavar="method-name",
type=six.text_type,
type=str,
help="The method of the script to invoke",
)
parser.add_argument(
"output_file",
metavar="output-file",
type=six.text_type,
type=str,
help="The file to generate",
)
parser.add_argument(
"dep_file",
metavar="dep-file",
type=six.text_type,
type=str,
help="File to write any additional make dependencies to",
)
parser.add_argument(
"dep_target",
metavar="dep-target",
type=six.text_type,
type=str,
help="Make target to use in the dependencies file",
)
parser.add_argument(

View File

@@ -122,7 +122,7 @@ def generate(output, node_script, *files):
sys.exit(1)
node_script = six.ensure_text(node_script)
if not isinstance(node_script, six.text_type):
if not isinstance(node_script, str):
print(
"moz.build file didn't pass a valid node script name to execute",
file=sys.stderr,

View File

@@ -16,7 +16,6 @@ from collections import OrderedDict
# (bringing in TASKCLUSTER_ROOT_URL) which is necessary.
import gecko_taskgraph.main # noqa: F401
import mozversioncontrol
import six
from mach.decorators import Command, CommandArgument, SubCommand
from mozbuild.artifact_builds import JOB_CHOICES
@@ -496,7 +495,7 @@ def artifact_toolchain(
record = ArtifactRecord(task_id, name)
records[record.filename] = record
for record in six.itervalues(records):
for record in records.values():
command_context.log(
logging.INFO,
"artifact",

View File

@@ -55,7 +55,6 @@ import mozinstall
import mozpack.path as mozpath
import pylru
import requests
import six
from mach.util import UserError
from mozpack import executables
from mozpack.files import JarFinder, TarFinder
@@ -270,7 +269,7 @@ class ArtifactJob(object):
with self.get_writer(file=processed_filename, compress_level=5) as writer:
reader = JarReader(filename)
for filename, entry in six.iteritems(reader.entries):
for filename, entry in reader.entries.items():
for pattern, (src_prefix, dest_prefix) in self.test_artifact_patterns:
if not mozpath.match(filename, pattern):
continue
@@ -1253,7 +1252,7 @@ class Artifacts(object):
candidate_pushheads = collections.defaultdict(list)
for tree, pushid in six.iteritems(found_pushids):
for tree, pushid in found_pushids.items():
end = pushid
start = pushid - NUM_PUSHHEADS_TO_QUERY_PER_PARENT

View File

@@ -9,7 +9,6 @@ from abc import ABCMeta, abstractmethod
from contextlib import contextmanager
import mozpack.path as mozpath
import six
from mach.mixin.logging import LoggingMixin
from mozbuild.base import ExecutionSummary
@@ -309,7 +308,7 @@ class BuildBackend(LoggingMixin):
pp.context.update(
{
k: " ".join(v) if isinstance(v, list) else v
for k, v in six.iteritems(obj.config.substs)
for k, v in obj.config.substs.items()
}
)
pp.context.update(

View File

@@ -9,7 +9,6 @@ from collections import defaultdict
from operator import itemgetter
import mozpack.path as mozpath
import six
from mozpack.chrome.manifest import parse_manifest_line
from mozbuild.backend.base import BuildBackend
@@ -90,7 +89,7 @@ class XPIDLManager(object):
The stem of an IDL file is the basename of the file with no .idl extension.
"""
return itertools.chain(*[m.stems() for m in six.itervalues(self.modules)])
return itertools.chain(*[m.stems() for m in self.modules.values()])
class BinariesCollection(object):

View File

@@ -11,7 +11,6 @@ from pathlib import Path
from types import ModuleType
import mozpack.path as mozpath
import six
from mozbuild.shellutil import quote as shell_quote
from mozbuild.util import (
@@ -159,7 +158,7 @@ class ConfigEnvironment(object):
)
def serialize(name, obj):
if isinstance(obj, six.string_types):
if isinstance(obj, (str,)):
return obj
if isinstance(obj, Iterable):
return " ".join(obj)
@@ -241,7 +240,7 @@ class PartialConfigDict(object):
existing_files = {Path(f) for f in existing_files}
new_files = set()
for k, v in six.iteritems(values):
for k, v in values.items():
new_files.add(Path(self._write_file(k, v)))
for filename in existing_files - new_files:

View File

@@ -6,7 +6,6 @@ from collections import defaultdict
from operator import itemgetter
import mozpack.path as mozpath
import six
from mozpack.manifests import InstallManifest
from mozbuild.backend.base import PartialBackend
@@ -189,7 +188,7 @@ class FasterMakeBackend(MakeBackend, PartialBackend):
# Add information for chrome manifest generation
manifest_targets = []
for target, entries in six.iteritems(self._manifest_entries):
for target, entries in self._manifest_entries.items():
manifest_targets.append(target)
install_target = mozpath.basedir(target, install_manifests_bases)
self._install_manifests[install_target].add_content(
@@ -203,7 +202,7 @@ class FasterMakeBackend(MakeBackend, PartialBackend):
)
# Add dependencies we inferred:
for target, deps in sorted(six.iteritems(self._dependencies)):
for target, deps in sorted(self._dependencies.items()):
mk.create_rule([target]).add_dependencies(
"$(TOPOBJDIR)/%s" % d for d in sorted(deps)
)
@@ -213,7 +212,7 @@ class FasterMakeBackend(MakeBackend, PartialBackend):
"$(TOPSRCDIR)/third_party/python/moz.l10n/moz/l10n/bin/build_file.py",
]
# Add l10n dependencies we inferred:
for target, deps in sorted(six.iteritems(self._l10n_dependencies)):
for target, deps in sorted(self._l10n_dependencies.items()):
mk.create_rule([target]).add_dependencies(
"%s" % d[0] for d in sorted(deps, key=itemgetter(0))
)
@@ -232,7 +231,7 @@ class FasterMakeBackend(MakeBackend, PartialBackend):
mk.add_statement("include $(TOPSRCDIR)/config/faster/rules.mk")
for base, install_manifest in six.iteritems(self._install_manifests):
for base, install_manifest in self._install_manifests.items():
with self._write_file(
mozpath.join(
self.environment.topobjdir,
@@ -245,7 +244,7 @@ class FasterMakeBackend(MakeBackend, PartialBackend):
# Write a single unified manifest for consumption by |mach watch|.
# Since this doesn't start 'install_', it's not processed by the build.
unified_manifest = InstallManifest()
for base, install_manifest in six.iteritems(self._install_manifests):
for base, install_manifest in self._install_manifests.items():
# Expect 'dist/bin/**', which includes 'dist/bin' with no trailing slash.
assert base.startswith("dist/bin")
base = base[len("dist/bin") :]

View File

@@ -774,7 +774,7 @@ class RecursiveMakeBackend(MakeBackend):
# - nodes that are rust targets.
compile_roots = [
t
for t, deps in six.iteritems(self._compile_graph)
for t, deps in self._compile_graph.items()
if t in self._rust_targets or t not in all_compile_deps
]
@@ -832,7 +832,7 @@ class RecursiveMakeBackend(MakeBackend):
self._no_skip["syms"].remove(dirname)
add_category_rules("compile", compile_roots, self._compile_graph)
for category, graph in sorted(six.iteritems(non_default_graphs)):
for category, graph in sorted(non_default_graphs.items()):
add_category_rules(category, non_default_roots[category], graph)
for relobjdir, tier, input in self._post_process_dependencies:
@@ -870,7 +870,7 @@ class RecursiveMakeBackend(MakeBackend):
"non_default_tiers := %s" % " ".join(sorted(non_default_roots.keys()))
)
for category, graphs in sorted(six.iteritems(non_default_graphs)):
for category, graphs in sorted(non_default_graphs.items()):
category_dirs = [mozpath.dirname(target) for target in graphs.keys()]
root_mk.add_statement("%s_dirs := %s" % (category, " ".join(category_dirs)))

View File

@@ -5,7 +5,6 @@
from collections import defaultdict
import mozpack.path as mozpath
import six
import six.moves.cPickle as pickle
from mozbuild.backend.base import PartialBackend
@@ -100,7 +99,7 @@ class TestManifestBackend(PartialBackend):
self.manifest_defaults[sub_manifest] = defaults
def add_installs(self, obj, topsrcdir):
for src, (dest, _) in six.iteritems(obj.installs):
for src, (dest, _) in obj.installs.items():
key = src[len(topsrcdir) + 1 :]
self.installs_by_path[key].append((src, dest))
for src, pat, dest in obj.pattern_installs:

View File

@@ -296,7 +296,7 @@ class MozbuildObject(ProcessExecutionMixin):
args = tuple(
(
a
if not isinstance(a, six.string_types) or a != "--help"
if not isinstance(a, (str,)) or a != "--help"
else self._always.sandboxed
)
for a in args
@@ -372,12 +372,9 @@ class MozbuildObject(ProcessExecutionMixin):
config_status
)
except ConfigStatusFailure as e:
six.raise_from(
BuildEnvironmentNotFoundException(
raise BuildEnvironmentNotFoundException(
"config.status is outdated or broken. Run configure."
),
e,
)
) from e
return self._config_environment

View File

@@ -7,7 +7,6 @@ import os
import re
import mozpack.path as mozpath
import six
from mach.config import ConfigSettings
from mach.logging import LoggingManager
from mozpack.copier import FileRegistry
@@ -161,7 +160,7 @@ class ChromeMapBackend(CommonBackend):
overrides = self.manifest_handler.overrides
json.dump(
[
{k: list(v) for k, v in six.iteritems(chrome_mapping)},
{k: list(v) for k, v in chrome_mapping.items()},
overrides,
self._install_mapping,
{

View File

@@ -14,7 +14,6 @@ except ImportError:
import mozpack.path as mozpath
from mozpack.chrome.manifest import parse_manifest
from six import viewitems
from .manifest_handler import ChromeManifestHandler
@@ -48,15 +47,15 @@ class LcovRecord(object):
self.test_name = other.test_name
self.functions.update(other.functions)
for name, count in viewitems(other.function_exec_counts):
for name, count in other.function_exec_counts.items():
self.function_exec_counts[name] = count + self.function_exec_counts.get(
name, 0
)
for key, taken in viewitems(other.branches):
for key, taken in other.branches.items():
self.branches[key] = taken + self.branches.get(key, 0)
for line, (exec_count, checksum) in viewitems(other.lines):
for line, (exec_count, checksum) in other.lines.items():
new_exec_count = exec_count
if line in self.lines:
old_exec_count, _ = self.lines[line]
@@ -72,7 +71,7 @@ class LcovRecord(object):
# Function records may have moved between files, so filter here.
self.function_exec_counts = {
fn_name: count
for fn_name, count in viewitems(self.function_exec_counts)
for fn_name, count in self.function_exec_counts.items()
if fn_name in self.functions.values()
}
self.covered_function_count = len(
@@ -115,7 +114,7 @@ class RecordRewriter(object):
def _rewrite_lines(self, record):
rewritten_lines = {}
for ln, line_info in viewitems(record.lines):
for ln, line_info in record.lines.items():
r = self._get_range(ln)
if r is None:
rewritten_lines[ln] = line_info
@@ -140,7 +139,7 @@ class RecordRewriter(object):
# instance). It's not clear the records that result are well-formed, but
# we act as though if a function has multiple FN's, the corresponding
# FNDA's are all the same.
for ln, fn_name in viewitems(record.functions):
for ln, fn_name in record.functions.items():
r = self._get_range(ln)
if r is None:
rewritten_fns[ln] = fn_name
@@ -160,7 +159,7 @@ class RecordRewriter(object):
def _rewrite_branches(self, record):
rewritten_branches = {}
for (ln, block_number, branch_number), taken in viewitems(record.branches):
for (ln, block_number, branch_number), taken in record.branches.items():
r = self._get_range(ln)
if r is None:
rewritten_branches[ln, block_number, branch_number] = taken
@@ -337,13 +336,13 @@ class LcovFile(object):
# Sorting results gives deterministic output (and is a lot faster than
# using OrderedDict).
fns = []
for start_lineno, fn_name in sorted(viewitems(record.functions)):
for start_lineno, fn_name in sorted(record.functions.items()):
fns.append("FN:%s,%s" % (start_lineno, fn_name))
return "\n".join(fns)
def format_function_exec_counts(self, record):
fndas = []
for name, exec_count in sorted(viewitems(record.function_exec_counts)):
for name, exec_count in sorted(record.function_exec_counts.items()):
fndas.append("FNDA:%s,%s" % (exec_count, name))
return "\n".join(fndas)
@@ -369,7 +368,7 @@ class LcovFile(object):
def format_lines(self, record):
das = []
for line_no, (exec_count, checksum) in sorted(viewitems(record.lines)):
for line_no, (exec_count, checksum) in sorted(record.lines.items()):
s = "DA:%s,%s" % (line_no, exec_count)
if checksum:
s += ",%s" % checksum
@@ -537,7 +536,7 @@ class UrlFinder(object):
source_path, pp_info = self._abs_objdir_install_info(term)
return source_path, pp_info
for prefix, dests in viewitems(self._url_prefixes):
for prefix, dests in self._url_prefixes.items():
if term.startswith(prefix):
for dest in dests:
if not dest.endswith("/"):

View File

@@ -225,7 +225,7 @@ class WarningsDatabase(object):
"""
# Need to calculate up front since we are mutating original object.
filenames = list(six.iterkeys(self._files))
filenames = list(self._files.keys())
for filename in filenames:
if not os.path.exists(filename):
del self._files[filename]
@@ -244,10 +244,10 @@ class WarningsDatabase(object):
obj = {"files": {}}
# All this hackery because JSON can't handle sets.
for k, v in six.iteritems(self._files):
for k, v in self._files.items():
obj["files"][k] = {}
for k2, v2 in six.iteritems(v):
for k2, v2 in v.items():
normalized = v2
if isinstance(v2, set):
normalized = list(v2)
@@ -263,7 +263,7 @@ class WarningsDatabase(object):
self._files = obj["files"]
# Normalize data types.
for filename, value in six.iteritems(self._files):
for filename, value in self._files.items():
if "warnings" in value:
normalized = set()
for d in value["warnings"]:

View File

@@ -328,7 +328,7 @@ class ConfigureSandbox(dict):
)
},
__import__=forbidden_import,
str=six.text_type,
str=str,
)
# Expose a limited set of functions from os.path
@@ -434,7 +434,7 @@ class ConfigureSandbox(dict):
out_args = [
(
six.ensure_text(arg, encoding=encoding or "utf-8")
if isinstance(arg, six.binary_type)
if isinstance(arg, bytes)
else arg
)
for arg in args
@@ -514,7 +514,7 @@ class ConfigureSandbox(dict):
if path:
self.include_file(path)
for option in six.itervalues(self._options):
for option in self._options.values():
# All options must be referenced by some @depends function
if option not in self._seen:
raise ConfigureError(
@@ -705,7 +705,7 @@ class ConfigureSandbox(dict):
return value
def _dependency(self, arg, callee_name, arg_name=None):
if isinstance(arg, six.string_types):
if isinstance(arg, (str,)):
prefix, name, values = Option.split_option(arg)
if values != ():
raise ConfigureError("Option must not contain an '='")
@@ -773,7 +773,7 @@ class ConfigureSandbox(dict):
"""
when = self._normalize_when(kwargs.get("when"), "option")
args = [self._resolve(arg) for arg in args]
kwargs = {k: self._resolve(v) for k, v in six.iteritems(kwargs) if k != "when"}
kwargs = {k: self._resolve(v) for k, v in kwargs.items() if k != "when"}
# The Option constructor needs to look up the stack to infer a category
# for the Option, since the category is based on the filename where the
# Option is defined. However, if the Option is defined in a template, we
@@ -870,7 +870,7 @@ class ConfigureSandbox(dict):
with self.only_when_impl(when):
what = self._resolve(what)
if what:
if not isinstance(what, six.string_types):
if not isinstance(what, (str,)):
raise TypeError("Unexpected type: '%s'" % type(what).__name__)
self.include_file(what)
@@ -890,7 +890,7 @@ class ConfigureSandbox(dict):
for k in dir(self)
if k.endswith("_impl") and k != "template_impl"
)
glob.update((k, v) for k, v in six.iteritems(self) if k not in glob)
glob.update((k, v) for k, v in self.items() if k not in glob)
template = self._prepare_function(func, update_globals)
@@ -951,9 +951,7 @@ class ConfigureSandbox(dict):
@imports(_from='mozpack', _import='path', _as='mozpath')
"""
for value, required in ((_import, True), (_from, False), (_as, False)):
if not isinstance(value, six.string_types) and (
required or value is not None
):
if not isinstance(value, (str,)) and (required or value is not None):
raise TypeError("Unexpected type: '%s'" % type(value).__name__)
if value is not None and not self.RE_MODULE.match(value):
raise ValueError("Invalid argument to @imports: '%s'" % value)
@@ -1122,7 +1120,7 @@ class ConfigureSandbox(dict):
name = self._resolve(name)
if name is None:
return
if not isinstance(name, six.string_types):
if not isinstance(name, (str,)):
raise TypeError("Unexpected type: '%s'" % type(name).__name__)
if name in data:
raise ConfigureError(
@@ -1233,7 +1231,7 @@ class ConfigureSandbox(dict):
line = frame.f_back.f_lineno
filename = frame.f_back.f_code.co_filename
if not reason and (
isinstance(value, (bool, tuple)) or isinstance(value, six.string_types)
isinstance(value, (bool, tuple)) or isinstance(value, (str,))
):
# A reason can be provided automatically when imply_option
# is called with an immediate value.
@@ -1272,7 +1270,7 @@ class ConfigureSandbox(dict):
glob = SandboxedGlobal(
(k, v)
for k, v in six.iteritems(func.__globals__)
for k, v in func.__globals__.items()
if (isinstance(v, types.FunctionType) and v not in self._templates)
or (isinstance(v, type) and issubclass(v, Exception))
)

View File

@@ -7,8 +7,6 @@ import os
import sys
from collections import OrderedDict
import six
HELP_OPTIONS_CATEGORY = "Help options"
# List of whitelisted option categories. If you want to add a new category,
# simply add it to this list; however, exercise discretion as
@@ -33,9 +31,7 @@ def _infer_option_category(define_depth):
def istupleofstrings(obj):
return (
isinstance(obj, tuple)
and len(obj)
and all(isinstance(o, six.string_types) for o in obj)
isinstance(obj, tuple) and len(obj) and all(isinstance(o, (str,)) for o in obj)
)
@@ -118,7 +114,7 @@ class OptionValue(tuple):
return PositiveOptionValue()
elif value is False or value == ():
return NegativeOptionValue()
elif isinstance(value, six.string_types):
elif isinstance(value, (str,)):
return PositiveOptionValue((value,))
elif isinstance(value, tuple):
return PositiveOptionValue(value)
@@ -161,7 +157,7 @@ class ConflictingOptionError(InvalidOptionError):
if format_data:
message = message.format(**format_data)
super(ConflictingOptionError, self).__init__(message)
for k, v in six.iteritems(format_data):
for k, v in format_data.items():
setattr(self, k, v)
@@ -230,7 +226,7 @@ class Option(object):
"be given"
)
if name:
if not isinstance(name, six.string_types):
if not isinstance(name, (str,)):
raise InvalidOptionError("Option must be a string")
if not name.startswith("--"):
raise InvalidOptionError("Option must start with `--`")
@@ -239,7 +235,7 @@ class Option(object):
if not name.islower():
raise InvalidOptionError("Option must be all lowercase")
if env:
if not isinstance(env, six.string_types):
if not isinstance(env, (str,)):
raise InvalidOptionError("Environment variable name must be a string")
if not env.isupper():
raise InvalidOptionError(
@@ -252,7 +248,7 @@ class Option(object):
"nargs must be a positive integer, '?', '*' or '+'"
)
if (
not isinstance(default, six.string_types)
not isinstance(default, (str,))
and not isinstance(default, (bool, type(None)))
and not istupleofstrings(default)
):
@@ -261,7 +257,7 @@ class Option(object):
)
if choices and not istupleofstrings(choices):
raise InvalidOptionError("choices must be a tuple of strings")
if category and not isinstance(category, six.string_types):
if category and not isinstance(category, (str,)):
raise InvalidOptionError("Category must be a string")
if category and category not in _ALL_CATEGORIES:
raise InvalidOptionError(
@@ -359,7 +355,7 @@ class Option(object):
`values_separator`. If `values_separator` is None, there is at
most one value.
"""
if not isinstance(option, six.string_types):
if not isinstance(option, (str,)):
raise InvalidOptionError("Option must be a string")
name, eq, values = option.partition("=")
@@ -632,5 +628,5 @@ class CommandLineHelper(object):
def __iter__(self):
for d in (self._args, self._extra_args):
for arg, pos in six.itervalues(d):
for arg, pos in d.values():
yield arg

View File

@@ -595,7 +595,7 @@ class BuildProgressFooter(Footer):
def __init__(self, terminal, monitor):
Footer.__init__(self, terminal)
self.tiers = six.viewitems(monitor.tiers.tier_status)
self.tiers = monitor.tiers.tier_status.items()
def draw(self):
"""Draws this footer in the terminal."""

View File

@@ -7,8 +7,6 @@
import codecs
import re
import six
class DotProperties:
r"""A thin representation of a key=value .properties file."""
@@ -46,7 +44,7 @@ class DotProperties:
if not prefix.endswith("."):
prefix = prefix + "."
indexes = []
for k, v in six.iteritems(self._properties):
for k, v in self._properties.items():
if not k.startswith(prefix):
continue
key = k[len(prefix) :]
@@ -69,7 +67,7 @@ class DotProperties:
D = dict(
(k[len(prefix) :], v)
for k, v in six.iteritems(self._properties)
for k, v in self._properties.items()
if k.startswith(prefix) and "." not in k[len(prefix) :]
)

View File

@@ -313,7 +313,7 @@ class BaseCompileFlags(ContextDerivedValue, dict):
klass_name = self.__class__.__name__
for k, v, build_vars in self.flag_variables:
if not isinstance(k, six.text_type):
if not isinstance(k, str):
raise ValueError("Flag %s for %s is not a string" % (k, klass_name))
if not isinstance(build_vars, tuple):
raise ValueError(
@@ -332,7 +332,7 @@ class BaseCompileFlags(ContextDerivedValue, dict):
dict.__init__(
self,
(
(k, v if v is None else TypedList(six.text_type)(v))
(k, v if v is None else TypedList(str)(v))
for k, v, _ in self.flag_variables
),
)
@@ -533,10 +533,7 @@ class TargetCompileFlags(BaseCompileFlags):
"`%s` may not be set in COMPILE_FLAGS from moz.build, this "
"value is resolved from the emitter." % key
)
if not (
isinstance(value, list)
and all(isinstance(v, six.string_types) for v in value)
):
if not (isinstance(value, list) and all(isinstance(v, (str,)) for v in value)):
raise ValueError(
"A list of strings must be provided as a value for a compile "
"flags category."
@@ -767,7 +764,7 @@ class WasmFlags(TargetCompileFlags):
return ["-Os"]
class FinalTargetValue(ContextDerivedValue, six.text_type):
class FinalTargetValue(ContextDerivedValue, str):
def __new__(cls, context, value=""):
if not value:
value = "dist/"
@@ -777,7 +774,7 @@ class FinalTargetValue(ContextDerivedValue, six.text_type):
value += "bin"
if context["DIST_SUBDIR"]:
value += "/" + context["DIST_SUBDIR"]
return six.text_type.__new__(cls, value)
return str.__new__(cls, value)
def Enum(*values):
@@ -830,7 +827,7 @@ class PathMeta(type):
return super(PathMeta, cls).__call__(context, value)
class Path(six.with_metaclass(PathMeta, ContextDerivedValue, six.text_type)):
class Path(six.with_metaclass(PathMeta, ContextDerivedValue, str)):
"""Stores and resolves a source path relative to a given context
This class is used as a backing type for some of the sandbox variables.
@@ -862,7 +859,7 @@ class Path(six.with_metaclass(PathMeta, ContextDerivedValue, six.text_type)):
def _cmp(self, other, op):
if isinstance(other, Path) and self.srcdir != other.srcdir:
return op(self.full_path, other.full_path)
return op(six.text_type(self), other)
return op(str(self), other)
def __eq__(self, other):
return self._cmp(other, operator.eq)
@@ -1167,15 +1164,15 @@ ManifestparserManifestList = OrderedPathListWithAction(read_manifestparser_manif
ReftestManifestList = OrderedPathListWithAction(read_reftest_manifest)
BugzillaComponent = TypedNamedTuple(
"BugzillaComponent", [("product", six.text_type), ("component", six.text_type)]
"BugzillaComponent", [("product", str), ("component", str)]
)
SchedulingComponents = ContextDerivedTypedRecord(
("inclusive", TypedList(six.text_type, StrictOrderingOnAppendList)),
("exclusive", TypedList(six.text_type, StrictOrderingOnAppendList)),
("inclusive", TypedList(str, StrictOrderingOnAppendList)),
("exclusive", TypedList(str, StrictOrderingOnAppendList)),
)
GeneratedFilesList = StrictOrderingOnAppendListWithFlagsFactory(
{"script": six.text_type, "inputs": list, "force": bool, "flags": list}
{"script": str, "inputs": list, "force": bool, "flags": list}
)
@@ -1450,13 +1447,13 @@ VARIABLES = {
""",
),
"RUST_TESTS": (
TypedList(six.text_type),
TypedList(str),
list,
"""Names of Rust tests to build and run via `cargo test`.
""",
),
"RUST_TEST_FEATURES": (
TypedList(six.text_type),
TypedList(str),
list,
"""Cargo features to activate for RUST_TESTS.
""",
@@ -1712,8 +1709,8 @@ VARIABLES = {
""",
),
"FINAL_LIBRARY": (
six.text_type,
six.text_type,
str,
str,
"""Library in which the objects of the current directory will be linked.
This variable contains the name of a library, defined elsewhere with
@@ -1771,8 +1768,8 @@ VARIABLES = {
""",
),
"HOST_LIBRARY_NAME": (
six.text_type,
six.text_type,
str,
str,
"""Name of target library generated when cross compiling.
""",
),
@@ -1786,8 +1783,8 @@ VARIABLES = {
""",
),
"LIBRARY_NAME": (
six.text_type,
six.text_type,
str,
str,
"""The code name of the library generated for a directory.
By default STATIC_LIBRARY_NAME and SHARED_LIBRARY_NAME take this name.
@@ -1800,8 +1797,8 @@ VARIABLES = {
""",
),
"SHARED_LIBRARY_NAME": (
six.text_type,
six.text_type,
str,
str,
"""The name of the static library generated for a directory, if it needs to
differ from the library code name.
@@ -1809,22 +1806,22 @@ VARIABLES = {
""",
),
"SANDBOXED_WASM_LIBRARY_NAME": (
six.text_type,
six.text_type,
str,
str,
"""The name of the static sandboxed wasm library generated for a directory.
""",
),
"SHARED_LIBRARY_OUTPUT_CATEGORY": (
six.text_type,
six.text_type,
str,
str,
"""The output category for this context's shared library. If set this will
correspond to the build command that will build this shared library, and
the library will not be built as part of the default build.
""",
),
"RUST_LIBRARY_OUTPUT_CATEGORY": (
six.text_type,
six.text_type,
str,
str,
"""The output category for this context's rust library. If set this will
correspond to the build command that will build this rust library, and
the library will not be built as part of the default build.
@@ -1840,8 +1837,8 @@ VARIABLES = {
""",
),
"STATIC_LIBRARY_NAME": (
six.text_type,
six.text_type,
str,
str,
"""The name of the static library generated for a directory, if it needs to
differ from the library code name.
@@ -1888,7 +1885,7 @@ VARIABLES = {
),
"RCFILE": (
Path,
six.text_type,
str,
"""The program .rc file.
This variable can only be used on Windows.
@@ -1896,7 +1893,7 @@ VARIABLES = {
),
"RCINCLUDE": (
Path,
six.text_type,
str,
"""The resource script file to be included in the default .res file.
This variable can only be used on Windows.
@@ -1904,7 +1901,7 @@ VARIABLES = {
),
"DEFFILE": (
Path,
six.text_type,
str,
"""The program .def (module definition) file.
This variable can only be used on Windows.
@@ -1912,7 +1909,7 @@ VARIABLES = {
),
"SYMBOLS_FILE": (
Path,
six.text_type,
str,
"""A file containing a list of symbols to export from a shared library.
The given file contains a list of symbols to be exported, and is
@@ -1936,8 +1933,8 @@ VARIABLES = {
""",
),
"SONAME": (
six.text_type,
six.text_type,
str,
str,
"""The soname of the shared object currently being linked
soname is the "logical name" of a shared object, often used to provide
@@ -2020,8 +2017,8 @@ VARIABLES = {
""",
),
"PROGRAM": (
six.text_type,
six.text_type,
str,
str,
"""Compiled executable name.
If the configuration token ``BIN_SUFFIX`` is set, its value will be
@@ -2030,8 +2027,8 @@ VARIABLES = {
""",
),
"HOST_PROGRAM": (
six.text_type,
six.text_type,
str,
str,
"""Compiled host executable name.
If the configuration token ``HOST_BIN_SUFFIX`` is set, its value will be
@@ -2076,8 +2073,8 @@ VARIABLES = {
""",
),
"XPIDL_MODULE": (
six.text_type,
six.text_type,
str,
str,
"""XPCOM Interface Definition Module Name.
This is the name of the ``.xpt`` file that is created by linking
@@ -2263,8 +2260,8 @@ VARIABLES = {
),
# The following variables are used to control the target of installed files.
"XPI_NAME": (
six.text_type,
six.text_type,
str,
str,
"""The name of an extension XPI to generate.
When this variable is present, the results of this directory will end up
@@ -2272,8 +2269,8 @@ VARIABLES = {
""",
),
"DIST_SUBDIR": (
six.text_type,
six.text_type,
str,
str,
"""The name of an alternate directory to install files to.
When this variable is present, the results of this directory will end up
@@ -2283,7 +2280,7 @@ VARIABLES = {
),
"FINAL_TARGET": (
FinalTargetValue,
six.text_type,
str,
"""The name of the directory to install targets to.
The directory is relative to the top of the object directory. The
@@ -2307,7 +2304,7 @@ VARIABLES = {
StrictOrderingOnAppendListWithFlagsFactory(
{
"variables": dict,
"input": six.text_type,
"input": str,
"sandbox_vars": dict,
"no_chromium": bool,
"no_unified": bool,

View File

@@ -18,7 +18,6 @@ structures.
from collections import OrderedDict, defaultdict
import mozpack.path as mozpath
import six
from mozpack.chrome.manifest import ManifestEntry
from mozbuild.frontend.context import ObjDirPath, SourcePath
@@ -204,7 +203,7 @@ class BaseDefines(ContextDerived):
self.defines = defines
def get_defines(self):
for define, value in six.iteritems(self.defines):
for define, value in self.defines.items():
if value is True:
yield ("-D%s" % define)
elif value is False:

View File

@@ -11,7 +11,6 @@ from collections import OrderedDict, defaultdict
import mozinfo
import mozpack.path as mozpath
import six
import toml
from mach.mixin.logging import LoggingMixin
from mozpack.chrome.manifest import Manifest
@@ -556,9 +555,9 @@ class TreeMetadataEmitter(LoggingMixin):
self, context, crate_dir, crate_name, dependencies, description="Dependency"
):
"""Verify that a crate's dependencies all specify local paths."""
for dep_crate_name, values in six.iteritems(dependencies):
for dep_crate_name, values in dependencies.items():
# A simple version number.
if isinstance(values, (six.binary_type, six.text_type)):
if isinstance(values, (bytes, str)):
raise SandboxValidationError(
"%s %s of crate %s does not list a path"
% (description, dep_crate_name, crate_name),
@@ -624,7 +623,7 @@ class TreeMetadataEmitter(LoggingMixin):
"crate-type %s is not permitted for %s" % (crate_type, libname), context
)
dependencies = set(six.iterkeys(config.get("dependencies", {})))
dependencies = set(config.get("dependencies", {}).keys())
features = context.get(cls.FEATURES_VAR, [])
unique_features = set(features)
@@ -1086,7 +1085,7 @@ class TreeMetadataEmitter(LoggingMixin):
)
no_pgo = context.get("NO_PGO")
no_pgo_sources = [f for f, flags in six.iteritems(all_flags) if flags.no_pgo]
no_pgo_sources = [f for f, flags in all_flags.items() if flags.no_pgo]
if no_pgo:
if no_pgo_sources:
raise SandboxValidationError(
@@ -1114,7 +1113,7 @@ class TreeMetadataEmitter(LoggingMixin):
# The inverse of the above, mapping suffixes to their canonical suffix.
canonicalized_suffix_map = {}
for suffix, alternatives in six.iteritems(suffix_map):
for suffix, alternatives in suffix_map.items():
alternatives.add(suffix)
for a in alternatives:
canonicalized_suffix_map[a] = suffix
@@ -1191,7 +1190,7 @@ class TreeMetadataEmitter(LoggingMixin):
for suffix, srcs in ctxt_sources["WASM_SOURCES"].items():
wasm_linkable.sources[suffix] += srcs
for f, flags in sorted(six.iteritems(all_flags)):
for f, flags in sorted(all_flags.items()):
if flags.flags:
ext = mozpath.splitext(f)[1]
yield PerSourceFlag(context, f, flags.flags)
@@ -1693,7 +1692,7 @@ class TreeMetadataEmitter(LoggingMixin):
context,
script,
"process_define_file",
six.text_type(path),
str(path),
[Path(context, path + ".in")],
)

View File

@@ -354,7 +354,7 @@ def process_gyp_result(
if not f:
continue
# the result may be a string or a list.
if isinstance(f, six.string_types):
if isinstance(f, (str,)):
context[var].append(f)
else:
context[var].extend(f)

View File

@@ -9,8 +9,6 @@ import json
import os
import re
import six
def build_dict(config, env=os.environ):
"""
@@ -166,7 +164,7 @@ def write_mozinfo(file, config, env=os.environ):
and what keys are produced.
"""
build_conf = build_dict(config, env)
if isinstance(file, six.text_type):
if isinstance(file, str):
file = open(file, "wt")
json.dump(build_conf, file, sort_keys=True, indent=4)

View File

@@ -5,7 +5,6 @@
import json
import os
import six
from mozunit import main
from mozbuild.backend.clangd import ClangdBackend
@@ -54,7 +53,7 @@ class TestCompileDBBackends(BackendTester):
]
# Verify item consistency against `expected_db`
six.assertCountEqual(self, compile_db, expected_db)
self.assertCountEqual(compile_db, expected_db)
def test_database(self):
"""Ensure we can generate a `compile_commands.json` and that is correct."""

View File

@@ -6,7 +6,6 @@ import os
import unittest
import mozpack.path as mozpath
import six
import six.moves.cPickle as pickle
from mozpack.manifests import InstallManifest
from mozunit import main
@@ -1065,7 +1064,7 @@ class TestRecursiveMakeBackend(BackendTester):
expected[mozpath.join(env.topobjdir, "final-target")] = [
"FINAL_TARGET = $(DEPTH)/random-final-target"
]
for key, expected_rules in six.iteritems(expected):
for key, expected_rules in expected.items():
backend_path = mozpath.join(key, "backend.mk")
lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]]
found = [

View File

@@ -92,9 +92,7 @@ class ConfigureTestSandbox(ConfigureSandbox):
def __init__(self, paths, config, environ, *args, **kwargs):
self._search_path = environ.get("PATH", "").split(os.pathsep)
self._subprocess_paths = {
mozpath.abspath(k): v for k, v in six.iteritems(paths) if v
}
self._subprocess_paths = {mozpath.abspath(k): v for k, v in paths.items() if v}
paths = list(paths)

View File

@@ -5,7 +5,6 @@
import logging
import os
import six
from mozboot.util import MINIMUM_RUST_VERSION
from mozpack import path as mozpath
from mozunit import main
@@ -651,7 +650,7 @@ class LinuxToolchainTest(BaseToolchainTest):
# count), find clang.
paths = {
k: v
for k, v in six.iteritems(self.PATHS)
for k, v in self.PATHS.items()
if os.path.basename(k) not in ("gcc", "g++")
}
self.do_toolchain_test(
@@ -696,7 +695,7 @@ class LinuxToolchainTest(BaseToolchainTest):
# don't try them. This could be considered something to improve.
paths = {
k: v
for k, v in six.iteritems(self.PATHS)
for k, v in self.PATHS.items()
if os.path.basename(k) not in ("gcc", "g++", "clang", "clang++")
}
self.do_toolchain_test(
@@ -917,7 +916,7 @@ class OSXToolchainTest(BaseToolchainTest):
# We won't pick GCC if it's the only thing available.
paths = {
k: v
for k, v in six.iteritems(self.PATHS)
for k, v in self.PATHS.items()
if os.path.basename(k) not in ("clang", "clang++")
}
self.do_toolchain_test(
@@ -1044,9 +1043,7 @@ class MingwToolchainTest(BaseToolchainTest):
def test_gcc(self):
# GCC is unsupported, if you try it should find clang.
paths = {
k: v
for k, v in six.iteritems(self.PATHS)
if os.path.basename(k) != "clang-cl"
k: v for k, v in self.PATHS.items() if os.path.basename(k) != "clang-cl"
}
self.do_toolchain_test(
paths,
@@ -1077,7 +1074,7 @@ class MingwToolchainTest(BaseToolchainTest):
# We'll pick clang if nothing else is found.
paths = {
k: v
for k, v in six.iteritems(self.PATHS)
for k, v in self.PATHS.items()
if os.path.basename(k) not in ("clang-cl", "gcc")
}
self.do_toolchain_test(
@@ -1160,9 +1157,7 @@ class WindowsToolchainTest(BaseToolchainTest):
def test_unsupported_gcc(self):
paths = {
k: v
for k, v in six.iteritems(self.PATHS)
if os.path.basename(k) != "clang-cl"
k: v for k, v in self.PATHS.items() if os.path.basename(k) != "clang-cl"
}
self.do_toolchain_test(
paths,
@@ -1179,7 +1174,7 @@ class WindowsToolchainTest(BaseToolchainTest):
def test_unsupported_clang(self):
paths = {
k: v
for k, v in six.iteritems(self.PATHS)
for k, v in self.PATHS.items()
if os.path.basename(k) not in ("clang-cl", "gcc")
}
self.do_toolchain_test(
@@ -1215,9 +1210,7 @@ class WindowsGnuToolchainTest(BaseToolchainTest):
def test_unsupported_clang_cl(self):
paths = {
k: v
for k, v in six.iteritems(self.PATHS)
if os.path.basename(k) == "clang-cl"
k: v for k, v in self.PATHS.items() if os.path.basename(k) == "clang-cl"
}
self.do_toolchain_test(
paths,
@@ -1232,9 +1225,7 @@ class WindowsGnuToolchainTest(BaseToolchainTest):
)
def test_unsupported_gcc(self):
paths = {
k: v for k, v in six.iteritems(self.PATHS) if os.path.basename(k) == "gcc"
}
paths = {k: v for k, v in self.PATHS.items() if os.path.basename(k) == "gcc"}
self.do_toolchain_test(
paths,
{"c_compiler": "Cannot find the target C compiler"},
@@ -1250,7 +1241,7 @@ class WindowsGnuToolchainTest(BaseToolchainTest):
def test_clang(self):
paths = {
k: v
for k, v in six.iteritems(self.PATHS)
for k, v in self.PATHS.items()
if os.path.basename(k) not in ("clang-cl", "gcc")
}
self.do_toolchain_test(

View File

@@ -8,7 +8,6 @@ import unittest
from fnmatch import fnmatch
from textwrap import dedent
import six
from mozpack import path as mozpath
from mozunit import MockedOpen, main
from six import StringIO
@@ -40,7 +39,7 @@ class CompilerPreprocessor(Preprocessor):
context = self.context
def normalize_numbers(value):
if isinstance(value, six.string_types):
if isinstance(value, (str,)):
if value[-1:] == "L" and value[:-1].isdigit():
value = int(value[:-1])
return value
@@ -57,7 +56,7 @@ class CompilerPreprocessor(Preprocessor):
self.context = self.Context(
(normalize_has_feature_or_builtin(k), normalize_numbers(v))
for k, v in six.iteritems(context)
for k, v in context.items()
)
try:
return Preprocessor.do_if(
@@ -74,7 +73,7 @@ class CompilerPreprocessor(Preprocessor):
def repl(matchobj):
varname = matchobj.group("VAR")
if varname in self.context:
result = six.text_type(self.context[varname])
result = str(self.context[varname])
# The C preprocessor inserts whitespaces around expanded
# symbols.
start, end = matchobj.span("VAR")
@@ -200,9 +199,9 @@ class FakeCompiler(dict):
def __init__(self, *definitions):
for definition in definitions:
if all(not isinstance(d, dict) for d in six.itervalues(definition)):
if all(not isinstance(d, dict) for d in definition.values()):
definition = {None: definition}
for key, value in six.iteritems(definition):
for key, value in definition.items():
self.setdefault(key, {}).update(value)
def __call__(self, stdin, args):
@@ -228,14 +227,14 @@ class FakeCompiler(dict):
pp = CompilerPreprocessor(self[None])
def apply_defn(defn):
for k, v in six.iteritems(defn):
for k, v in defn.items():
if v is False:
if k in pp.context:
del pp.context[k]
else:
pp.context[k] = v
for glob, defn in six.iteritems(self):
for glob, defn in self.items():
if glob and not glob.startswith("-") and fnmatch(file, glob):
apply_defn(defn)
@@ -350,7 +349,7 @@ class CompilerResult(ReadOnlyNamespace):
def __add__(self, other):
assert isinstance(other, dict)
result = copy.deepcopy(self.__dict__)
for k, v in six.iteritems(other):
for k, v in other.items():
if k == "flags":
flags = result.setdefault(k, [])
if isinstance(v, PrependFlags):

View File

@@ -5,7 +5,6 @@
import os
import unittest
import six
from mozpack import path as mozpath
from mozunit import main
@@ -617,7 +616,7 @@ class TestPaths(unittest.TestCase):
class TestTypedRecord(unittest.TestCase):
def test_fields(self):
T = ContextDerivedTypedRecord(("field1", six.text_type), ("field2", list))
T = ContextDerivedTypedRecord(("field1", str), ("field2", list))
inst = T(None)
self.assertEqual(inst.field1, "")
self.assertEqual(inst.field2, [])
@@ -632,7 +631,7 @@ class TestTypedRecord(unittest.TestCase):
inst.field3 = []
def test_coercion(self):
T = ContextDerivedTypedRecord(("field1", six.text_type), ("field2", list))
T = ContextDerivedTypedRecord(("field1", str), ("field2", list))
inst = T(None)
inst.field1 = 3
inst.field2 += ("bar",)

View File

@@ -6,7 +6,6 @@ import os
import unittest
import mozpack.path as mozpath
import six
from mozunit import main
from mozbuild.frontend.context import ObjDirPath, Path
@@ -381,12 +380,12 @@ class TestEmitterBasic(unittest.TestCase):
def test_compile_flags_validation(self):
reader = self.reader("compile-flags-field-validation")
with six.assertRaisesRegex(self, BuildReaderError, "Invalid value."):
with self.assertRaisesRegex(BuildReaderError, "Invalid value."):
self.read_topsrcdir(reader)
reader = self.reader("compile-flags-type-validation")
with six.assertRaisesRegex(
self, BuildReaderError, "A list of strings must be provided"
with self.assertRaisesRegex(
BuildReaderError, "A list of strings must be provided"
):
self.read_topsrcdir(reader)
@@ -441,10 +440,8 @@ class TestEmitterBasic(unittest.TestCase):
def test_resolved_flags_error(self):
reader = self.reader("resolved-flags-error")
with six.assertRaisesRegex(
self,
BuildReaderError,
"`DEFINES` may not be set in COMPILE_FLAGS from moz.build",
with self.assertRaisesRegex(
BuildReaderError, "`DEFINES` may not be set in COMPILE_FLAGS from moz.build"
):
self.read_topsrcdir(reader)
@@ -481,9 +478,7 @@ class TestEmitterBasic(unittest.TestCase):
def test_use_nasm(self):
# When nasm is not available, this should raise.
reader = self.reader("use-nasm")
with six.assertRaisesRegex(
self, SandboxValidationError, "nasm is not available"
):
with self.assertRaisesRegex(SandboxValidationError, "nasm is not available"):
self.read_topsrcdir(reader)
# When nasm is available, this should work.
@@ -578,8 +573,7 @@ class TestEmitterBasic(unittest.TestCase):
LOCALIZED_FILES as an objdir path produces an error.
"""
reader = self.reader("localized-files-not-localized-generated")
with six.assertRaisesRegex(
self,
with self.assertRaisesRegex(
SandboxValidationError,
"Objdir file listed in LOCALIZED_FILES not in LOCALIZED_GENERATED_FILES:",
):
@@ -590,8 +584,7 @@ class TestEmitterBasic(unittest.TestCase):
FINAL_TARGET_FILES as an objdir path produces an error.
"""
reader = self.reader("localized-generated-files-final-target-files")
with six.assertRaisesRegex(
self,
with self.assertRaisesRegex(
SandboxValidationError,
"Outputs of LOCALIZED_GENERATED_FILES cannot be used in FINAL_TARGET_FILES:",
):
@@ -629,24 +622,22 @@ class TestEmitterBasic(unittest.TestCase):
def test_generated_files_no_script(self):
reader = self.reader("generated-files-no-script")
with six.assertRaisesRegex(
self, SandboxValidationError, "Script for generating bar.c does not exist"
with self.assertRaisesRegex(
SandboxValidationError, "Script for generating bar.c does not exist"
):
self.read_topsrcdir(reader)
def test_generated_files_no_inputs(self):
reader = self.reader("generated-files-no-inputs")
with six.assertRaisesRegex(
self, SandboxValidationError, "Input for generating foo.c does not exist"
with self.assertRaisesRegex(
SandboxValidationError, "Input for generating foo.c does not exist"
):
self.read_topsrcdir(reader)
def test_generated_files_no_python_script(self):
reader = self.reader("generated-files-no-python-script")
with six.assertRaisesRegex(
self,
SandboxValidationError,
"Script for generating bar.c does not end in .py",
with self.assertRaisesRegex(
SandboxValidationError, "Script for generating bar.c does not end in .py"
):
self.read_topsrcdir(reader)
@@ -676,8 +667,8 @@ class TestEmitterBasic(unittest.TestCase):
Missing files in EXPORTS is an error.
"""
reader = self.reader("exports-missing")
with six.assertRaisesRegex(
self, SandboxValidationError, "File listed in EXPORTS does not exist:"
with self.assertRaisesRegex(
SandboxValidationError, "File listed in EXPORTS does not exist:"
):
self.read_topsrcdir(reader)
@@ -686,8 +677,7 @@ class TestEmitterBasic(unittest.TestCase):
An objdir file in EXPORTS that is not in GENERATED_FILES is an error.
"""
reader = self.reader("exports-missing-generated")
with six.assertRaisesRegex(
self,
with self.assertRaisesRegex(
SandboxValidationError,
"Objdir file listed in EXPORTS not in GENERATED_FILES:",
):
@@ -726,8 +716,7 @@ class TestEmitterBasic(unittest.TestCase):
def test_test_harness_files_root(self):
reader = self.reader("test-harness-files-root")
with six.assertRaisesRegex(
self,
with self.assertRaisesRegex(
SandboxValidationError,
"Cannot install files to the root of TEST_HARNESS_FILES",
):
@@ -823,21 +812,21 @@ class TestEmitterBasic(unittest.TestCase):
"""A missing manifest file should result in an error."""
reader = self.reader("test-manifest-missing-manifest")
with six.assertRaisesRegex(self, BuildReaderError, "Missing files"):
with self.assertRaisesRegex(BuildReaderError, "Missing files"):
self.read_topsrcdir(reader)
def test_empty_test_manifest_rejected(self):
"""A test manifest without any entries is rejected."""
reader = self.reader("test-manifest-empty")
with six.assertRaisesRegex(self, SandboxValidationError, "Empty test manifest"):
with self.assertRaisesRegex(SandboxValidationError, "Empty test manifest"):
self.read_topsrcdir(reader)
def test_test_manifest_just_support_files(self):
"""A test manifest with no tests but support-files is not supported."""
reader = self.reader("test-manifest-just-support")
with six.assertRaisesRegex(self, SandboxValidationError, "Empty test manifest"):
with self.assertRaisesRegex(SandboxValidationError, "Empty test manifest"):
self.read_topsrcdir(reader)
def test_test_manifest_dupe_support_files(self):
@@ -846,11 +835,9 @@ class TestEmitterBasic(unittest.TestCase):
"""
reader = self.reader("test-manifest-dupes")
with six.assertRaisesRegex(
self,
with self.assertRaisesRegex(
SandboxValidationError,
"bar.js appears multiple times "
"in a test manifest under a support-files field, please omit the duplicate entry.",
"bar.js appears multiple times in a test manifest under a support-files field, please omit the duplicate entry.",
):
self.read_topsrcdir(reader)
@@ -893,10 +880,8 @@ class TestEmitterBasic(unittest.TestCase):
"""A non-existent shared support file reference produces an error."""
reader = self.reader("test-manifest-shared-missing")
with six.assertRaisesRegex(
self,
SandboxValidationError,
"entry in support-files not present in the srcdir",
with self.assertRaisesRegex(
SandboxValidationError, "entry in support-files not present in the srcdir"
):
self.read_topsrcdir(reader)
@@ -1005,10 +990,8 @@ class TestEmitterBasic(unittest.TestCase):
def test_test_manifest_unmatched_generated(self):
reader = self.reader("test-manifest-unmatched-generated")
with six.assertRaisesRegex(
self,
SandboxValidationError,
"entry in generated-files not present elsewhere",
with self.assertRaisesRegex(
SandboxValidationError, "entry in generated-files not present elsewhere"
):
self.read_topsrcdir(reader),
@@ -1033,10 +1016,8 @@ class TestEmitterBasic(unittest.TestCase):
"""Missing test files should result in error."""
reader = self.reader("test-manifest-missing-test-file")
with six.assertRaisesRegex(
self,
SandboxValidationError,
"lists test that does not exist: test_missing.html",
with self.assertRaisesRegex(
SandboxValidationError, "lists test that does not exist: test_missing.html"
):
self.read_topsrcdir(reader)
@@ -1044,8 +1025,8 @@ class TestEmitterBasic(unittest.TestCase):
"""Missing test files should result in error, even when the test list is not filtered."""
reader = self.reader("test-manifest-missing-test-file-unfiltered")
with six.assertRaisesRegex(
self, SandboxValidationError, "lists test that does not exist: missing.js"
with self.assertRaisesRegex(
SandboxValidationError, "lists test that does not exist: missing.js"
):
self.read_topsrcdir(reader)
@@ -1097,21 +1078,17 @@ class TestEmitterBasic(unittest.TestCase):
"""Test that invalid LOCAL_INCLUDES are properly detected."""
reader = self.reader("local_includes-invalid/srcdir")
with six.assertRaisesRegex(
self,
with self.assertRaisesRegex(
SandboxValidationError,
"Path specified in LOCAL_INCLUDES.*resolves to the "
"topsrcdir or topobjdir",
"Path specified in LOCAL_INCLUDES.*resolves to the topsrcdir or topobjdir",
):
self.read_topsrcdir(reader)
reader = self.reader("local_includes-invalid/objdir")
with six.assertRaisesRegex(
self,
with self.assertRaisesRegex(
SandboxValidationError,
"Path specified in LOCAL_INCLUDES.*resolves to the "
"topsrcdir or topobjdir",
"Path specified in LOCAL_INCLUDES.*resolves to the topsrcdir or topobjdir",
):
self.read_topsrcdir(reader)
@@ -1119,10 +1096,8 @@ class TestEmitterBasic(unittest.TestCase):
"""Test that a filename can't be used in LOCAL_INCLUDES."""
reader = self.reader("local_includes-filename")
with six.assertRaisesRegex(
self,
SandboxValidationError,
"Path specified in LOCAL_INCLUDES is a filename",
with self.assertRaisesRegex(
SandboxValidationError, "Path specified in LOCAL_INCLUDES is a filename"
):
self.read_topsrcdir(reader)
@@ -1175,34 +1150,30 @@ class TestEmitterBasic(unittest.TestCase):
self.assertIsInstance(obj.path, Path)
def test_jar_manifests_multiple_files(self):
with six.assertRaisesRegex(
self, SandboxValidationError, "limited to one value"
):
with self.assertRaisesRegex(SandboxValidationError, "limited to one value"):
reader = self.reader("jar-manifests-multiple-files")
self.read_topsrcdir(reader)
def test_xpidl_module_no_sources(self):
"""XPIDL_MODULE without XPIDL_SOURCES should be rejected."""
with six.assertRaisesRegex(
self, SandboxValidationError, "XPIDL_MODULE " "cannot be defined"
with self.assertRaisesRegex(
SandboxValidationError, "XPIDL_MODULE cannot be defined"
):
reader = self.reader("xpidl-module-no-sources")
self.read_topsrcdir(reader)
def test_xpidl_module_missing_sources(self):
"""Missing XPIDL_SOURCES should be rejected."""
with six.assertRaisesRegex(
self, SandboxValidationError, "File .* " "from XPIDL_SOURCES does not exist"
with self.assertRaisesRegex(
SandboxValidationError, "File .* from XPIDL_SOURCES does not exist"
):
reader = self.reader("missing-xpidl")
self.read_topsrcdir(reader)
def test_missing_local_includes(self):
"""LOCAL_INCLUDES containing non-existent directories should be rejected."""
with six.assertRaisesRegex(
self,
SandboxValidationError,
"Path specified in " "LOCAL_INCLUDES does not exist",
with self.assertRaisesRegex(
SandboxValidationError, "Path specified in LOCAL_INCLUDES does not exist"
):
reader = self.reader("missing-local-includes")
self.read_topsrcdir(reader)
@@ -1565,14 +1536,13 @@ class TestEmitterBasic(unittest.TestCase):
expected = {"install.rdf", "main.js"}
for f in files:
self.assertTrue(six.text_type(f) in expected)
self.assertTrue(str(f) in expected)
def test_missing_final_target_pp_files(self):
"""Test that FINAL_TARGET_PP_FILES with missing files throws errors."""
with six.assertRaisesRegex(
self,
with self.assertRaisesRegex(
SandboxValidationError,
"File listed in " "FINAL_TARGET_PP_FILES does not exist",
"File listed in FINAL_TARGET_PP_FILES does not exist",
):
reader = self.reader("dist-files-missing")
self.read_topsrcdir(reader)
@@ -1580,8 +1550,7 @@ class TestEmitterBasic(unittest.TestCase):
def test_final_target_pp_files_non_srcdir(self):
"""Test that non-srcdir paths in FINAL_TARGET_PP_FILES throws errors."""
reader = self.reader("final-target-pp-files-non-srcdir")
with six.assertRaisesRegex(
self,
with self.assertRaisesRegex(
SandboxValidationError,
"Only source directory paths allowed in FINAL_TARGET_PP_FILES:",
):
@@ -1601,17 +1570,15 @@ class TestEmitterBasic(unittest.TestCase):
expected = {"en-US/bar.ini", "en-US/code/*.js", "en-US/foo.js"}
for f in files:
self.assertTrue(six.text_type(f) in expected)
self.assertTrue(str(f) in expected)
def test_localized_files_no_en_us(self):
"""Test that LOCALIZED_FILES errors if a path does not start with
`en-US/` or contain `locales/en-US/`."""
reader = self.reader("localized-files-no-en-us")
with six.assertRaisesRegex(
self,
with self.assertRaisesRegex(
SandboxValidationError,
"LOCALIZED_FILES paths must start with `en-US/` or contain `locales/en-US/`: "
"foo.js",
"LOCALIZED_FILES paths must start with `en-US/` or contain `locales/en-US/`: foo.js",
):
self.read_topsrcdir(reader)
@@ -1629,7 +1596,7 @@ class TestEmitterBasic(unittest.TestCase):
expected = {"en-US/bar.ini", "en-US/foo.js"}
for f in files:
self.assertTrue(six.text_type(f) in expected)
self.assertTrue(str(f) in expected)
def test_mozsrc_files(self):
"""Test that MOZ_SRC_FILES automatically match objdir folders with the
@@ -1651,34 +1618,30 @@ class TestEmitterBasic(unittest.TestCase):
def test_rust_library_no_cargo_toml(self):
"""Test that defining a RustLibrary without a Cargo.toml fails."""
reader = self.reader("rust-library-no-cargo-toml")
with six.assertRaisesRegex(
self, SandboxValidationError, "No Cargo.toml file found"
):
with self.assertRaisesRegex(SandboxValidationError, "No Cargo.toml file found"):
self.read_topsrcdir(reader)
def test_rust_library_name_mismatch(self):
"""Test that defining a RustLibrary that doesn't match Cargo.toml fails."""
reader = self.reader("rust-library-name-mismatch")
with six.assertRaisesRegex(
self,
SandboxValidationError,
"library.*does not match Cargo.toml-defined package",
with self.assertRaisesRegex(
SandboxValidationError, "library.*does not match Cargo.toml-defined package"
):
self.read_topsrcdir(reader)
def test_rust_library_no_lib_section(self):
"""Test that a RustLibrary Cargo.toml with no [lib] section fails."""
reader = self.reader("rust-library-no-lib-section")
with six.assertRaisesRegex(
self, SandboxValidationError, "Cargo.toml for.* has no \\[lib\\] section"
with self.assertRaisesRegex(
SandboxValidationError, "Cargo.toml for.* has no \\[lib\\] section"
):
self.read_topsrcdir(reader)
def test_rust_library_invalid_crate_type(self):
"""Test that a RustLibrary Cargo.toml has a permitted crate-type."""
reader = self.reader("rust-library-invalid-crate-type")
with six.assertRaisesRegex(
self, SandboxValidationError, "crate-type.* is not permitted"
with self.assertRaisesRegex(
SandboxValidationError, "crate-type.* is not permitted"
):
self.read_topsrcdir(reader)
@@ -1706,8 +1669,8 @@ class TestEmitterBasic(unittest.TestCase):
"multiple-rust-libraries",
extra_substs=dict(RUST_TARGET="i686-pc-windows-msvc"),
)
with six.assertRaisesRegex(
self, SandboxValidationError, "Cannot link the following Rust libraries"
with self.assertRaisesRegex(
SandboxValidationError, "Cannot link the following Rust libraries"
):
self.read_topsrcdir(reader)
@@ -1730,35 +1693,29 @@ class TestEmitterBasic(unittest.TestCase):
def test_rust_library_duplicate_features(self):
"""Test that duplicate RustLibrary features are rejected."""
reader = self.reader("rust-library-duplicate-features")
with six.assertRaisesRegex(
self,
SandboxValidationError,
"features for .* should not contain duplicates",
with self.assertRaisesRegex(
SandboxValidationError, "features for .* should not contain duplicates"
):
self.read_topsrcdir(reader)
def test_rust_program_no_cargo_toml(self):
"""Test that specifying RUST_PROGRAMS without a Cargo.toml fails."""
reader = self.reader("rust-program-no-cargo-toml")
with six.assertRaisesRegex(
self, SandboxValidationError, "No Cargo.toml file found"
):
with self.assertRaisesRegex(SandboxValidationError, "No Cargo.toml file found"):
self.read_topsrcdir(reader)
def test_host_rust_program_no_cargo_toml(self):
"""Test that specifying HOST_RUST_PROGRAMS without a Cargo.toml fails."""
reader = self.reader("host-rust-program-no-cargo-toml")
with six.assertRaisesRegex(
self, SandboxValidationError, "No Cargo.toml file found"
):
with self.assertRaisesRegex(SandboxValidationError, "No Cargo.toml file found"):
self.read_topsrcdir(reader)
def test_rust_program_nonexistent_name(self):
"""Test that specifying RUST_PROGRAMS that don't exist in Cargo.toml
correctly throws an error."""
reader = self.reader("rust-program-nonexistent-name")
with six.assertRaisesRegex(
self, SandboxValidationError, "Cannot find Cargo.toml definition for"
with self.assertRaisesRegex(
SandboxValidationError, "Cannot find Cargo.toml definition for"
):
self.read_topsrcdir(reader)
@@ -1766,8 +1723,8 @@ class TestEmitterBasic(unittest.TestCase):
"""Test that specifying HOST_RUST_PROGRAMS that don't exist in
Cargo.toml correctly throws an error."""
reader = self.reader("host-rust-program-nonexistent-name")
with six.assertRaisesRegex(
self, SandboxValidationError, "Cannot find Cargo.toml definition for"
with self.assertRaisesRegex(
SandboxValidationError, "Cannot find Cargo.toml definition for"
):
self.read_topsrcdir(reader)
@@ -1843,16 +1800,15 @@ class TestEmitterBasic(unittest.TestCase):
def test_missing_workspace_hack(self):
"""Test detection of a missing workspace hack."""
reader = self.reader("rust-no-workspace-hack")
with six.assertRaisesRegex(
self, SandboxValidationError, "doesn't contain the workspace hack"
with self.assertRaisesRegex(
SandboxValidationError, "doesn't contain the workspace hack"
):
self.read_topsrcdir(reader)
def test_old_workspace_hack(self):
"""Test detection of an old workspace hack."""
reader = self.reader("rust-old-workspace-hack")
with six.assertRaisesRegex(
self,
with self.assertRaisesRegex(
SandboxValidationError,
"needs an update to its mozilla-central-workspace-hack dependency",
):
@@ -1902,8 +1858,7 @@ class TestEmitterBasic(unittest.TestCase):
from GENERATED_FILES is an error.
"""
reader = self.reader("test-symbols-file-objdir-missing-generated")
with six.assertRaisesRegex(
self,
with self.assertRaisesRegex(
SandboxValidationError,
"Objdir file specified in SYMBOLS_FILE not in GENERATED_FILES:",
):

View File

@@ -4,7 +4,6 @@
import unittest
import six
from mozunit import main
from mozbuild.frontend.context import (
@@ -27,7 +26,7 @@ class Fuga(object):
class Piyo(ContextDerivedValue):
def __init__(self, context, value):
if not isinstance(value, six.text_type):
if not isinstance(value, str):
raise ValueError
self.context = context
self.value = value
@@ -39,28 +38,28 @@ class Piyo(ContextDerivedValue):
return self.value
def __eq__(self, other):
return self.value == six.text_type(other)
return self.value == str(other)
def __lt__(self, other):
return self.value < six.text_type(other)
return self.value < str(other)
def __le__(self, other):
return self.value <= six.text_type(other)
return self.value <= str(other)
def __gt__(self, other):
return self.value > six.text_type(other)
return self.value > str(other)
def __ge__(self, other):
return self.value >= six.text_type(other)
return self.value >= str(other)
def __hash__(self):
return hash(self.value)
VARIABLES = {
"HOGE": (six.text_type, six.text_type, None),
"FUGA": (Fuga, six.text_type, None),
"PIYO": (Piyo, six.text_type, None),
"HOGE": (str, str, None),
"FUGA": (Fuga, str, None),
"PIYO": (Piyo, str, None),
"HOGERA": (ContextDerivedTypedList(Piyo, StrictOrderingOnAppendList), list, None),
"HOGEHOGE": (
ContextDerivedTypedListWithItems(
@@ -121,7 +120,7 @@ class TestContext(unittest.TestCase):
self.assertEqual(e[1], "set_type")
self.assertEqual(e[2], "HOGE")
self.assertEqual(e[3], True)
self.assertEqual(e[4], six.text_type)
self.assertEqual(e[4], str)
def test_key_checking(self):
# Checking for existence of a key should not populate the key if it
@@ -144,7 +143,7 @@ class TestContext(unittest.TestCase):
self.assertEqual(e[1], "set_type")
self.assertEqual(e[2], "FUGA")
self.assertEqual(e[3], False)
self.assertEqual(e[4], six.text_type)
self.assertEqual(e[4], str)
ns["FUGA"] = "fuga"
self.assertIsInstance(ns["FUGA"], Fuga)
@@ -167,7 +166,7 @@ class TestContext(unittest.TestCase):
self.assertEqual(e[1], "set_type")
self.assertEqual(e[2], "PIYO")
self.assertEqual(e[3], False)
self.assertEqual(e[4], six.text_type)
self.assertEqual(e[4], str)
ns["PIYO"] = "piyo"
self.assertIsInstance(ns["PIYO"], Piyo)

View File

@@ -154,7 +154,7 @@ class _TreeDiff(dircmp):
rv["diff_files"] += map(lambda l: basepath.format(l), dc.diff_files)
rv["funny"] += map(lambda l: basepath.format(l), dc.common_funny)
rv["funny"] += map(lambda l: basepath.format(l), dc.funny_files)
for subdir, _dc in six.iteritems(dc.subdirs):
for subdir, _dc in dc.subdirs.items():
self._fillDiff(_dc, rv, basepath.format(subdir + "/{0}"))
def allResults(self, left, right):
@@ -337,7 +337,7 @@ class TestJarMaker(unittest.TestCase):
("hoge", "foo", "2"): ("qux", "foo", "2"),
("hoge", "baz"): ("qux", "baz"),
}
for dest, src in six.iteritems(expected_symlinks):
for dest, src in expected_symlinks.items():
srcpath = os.path.join(self.srcdir, *src)
destpath = os.path.join(self.builddir, "chrome", "test", "dir", *dest)
self.assertTrue(

View File

@@ -161,18 +161,18 @@ class TestHierarchicalStringList(unittest.TestCase):
def test_exports_subdir(self):
self.assertEqual(self.EXPORTS._children, {})
self.EXPORTS.foo += ["foo.h"]
six.assertCountEqual(self, self.EXPORTS._children, {"foo": True})
self.assertCountEqual(self.EXPORTS._children, {"foo": True})
self.assertEqual(self.EXPORTS.foo._strings, ["foo.h"])
self.EXPORTS.bar += ["bar.h"]
six.assertCountEqual(self, self.EXPORTS._children, {"foo": True, "bar": True})
self.assertCountEqual(self.EXPORTS._children, {"foo": True, "bar": True})
self.assertEqual(self.EXPORTS.foo._strings, ["foo.h"])
self.assertEqual(self.EXPORTS.bar._strings, ["bar.h"])
def test_exports_multiple_subdir(self):
self.EXPORTS.foo.bar = ["foobar.h"]
six.assertCountEqual(self, self.EXPORTS._children, {"foo": True})
six.assertCountEqual(self, self.EXPORTS.foo._children, {"bar": True})
six.assertCountEqual(self, self.EXPORTS.foo.bar._children, {})
self.assertCountEqual(self.EXPORTS._children, {"foo": True})
self.assertCountEqual(self.EXPORTS.foo._children, {"bar": True})
self.assertCountEqual(self.EXPORTS.foo.bar._children, {})
self.assertEqual(self.EXPORTS._strings, [])
self.assertEqual(self.EXPORTS.foo._strings, [])
self.assertEqual(self.EXPORTS.foo.bar._strings, ["foobar.h"])
@@ -180,8 +180,7 @@ class TestHierarchicalStringList(unittest.TestCase):
def test_invalid_exports_append(self):
with self.assertRaises(ValueError) as ve:
self.EXPORTS += "foo.h"
six.assertRegex(
self,
self.assertRegex(
str(ve.exception),
"Expected a list of strings, not <(?:type|class) '%s'>" % str_type,
)
@@ -190,8 +189,7 @@ class TestHierarchicalStringList(unittest.TestCase):
with self.assertRaises(ValueError) as ve:
self.EXPORTS.foo = "foo.h"
six.assertRegex(
self,
self.assertRegex(
str(ve.exception),
"Expected a list of strings, not <(?:type|class) '%s'>" % str_type,
)
@@ -200,8 +198,7 @@ class TestHierarchicalStringList(unittest.TestCase):
with self.assertRaises(ValueError) as ve:
self.EXPORTS += "foo.h"
six.assertRegex(
self,
self.assertRegex(
str(ve.exception),
"Expected a list of strings, not <(?:type|class) '%s'>" % str_type,
)
@@ -210,10 +207,9 @@ class TestHierarchicalStringList(unittest.TestCase):
with self.assertRaises(ValueError) as ve:
self.EXPORTS += [True]
six.assertRegex(
self,
self.assertRegex(
str(ve.exception),
"Expected a list of strings, not an element of " "<(?:type|class) 'bool'>",
"Expected a list of strings, not an element of <(?:type|class) 'bool'>",
)
def test_del_exports(self):
@@ -493,17 +489,13 @@ class TestStrictOrderingOnAppendListWithFlagsFactory(unittest.TestCase):
l["b"].update(xyz=1)
def test_strict_ordering_on_append_list_with_flags_factory_extend(self):
FooList = StrictOrderingOnAppendListWithFlagsFactory(
{"foo": bool, "bar": six.text_type}
)
FooList = StrictOrderingOnAppendListWithFlagsFactory({"foo": bool, "bar": str})
foo = FooList(["a", "b", "c"])
foo["a"].foo = True
foo["b"].bar = "bar"
# Don't allow extending lists with different flag definitions.
BarList = StrictOrderingOnAppendListWithFlagsFactory(
{"foo": six.text_type, "baz": bool}
)
BarList = StrictOrderingOnAppendListWithFlagsFactory({"foo": str, "baz": bool})
bar = BarList(["d", "e", "f"])
bar["d"].foo = "foo"
bar["e"].baz = True
@@ -723,11 +715,11 @@ class TestTypedList(unittest.TestCase):
class TypedTestStrictOrderingOnAppendList(unittest.TestCase):
def test_init(self):
class Unicode(six.text_type):
class Unicode(str):
def __new__(cls, other):
if not isinstance(other, six.text_type):
if not isinstance(other, str):
raise ValueError()
return six.text_type.__new__(cls, other)
return str.__new__(cls, other)
cls = TypedList(Unicode, StrictOrderingOnAppendList)
l = cls()
@@ -747,7 +739,7 @@ class TypedTestStrictOrderingOnAppendList(unittest.TestCase):
class TestTypedNamedTuple(unittest.TestCase):
def test_simple(self):
FooBar = TypedNamedTuple("FooBar", [("foo", six.text_type), ("bar", int)])
FooBar = TypedNamedTuple("FooBar", [("foo", str), ("bar", int)])
t = FooBar(foo="foo", bar=2)
self.assertEqual(type(t), FooBar)

View File

@@ -4,8 +4,6 @@
import os
import six
def toolchain_task_definitions():
# triggers override of the `graph_config_schema` noqa
@@ -22,7 +20,7 @@ def toolchain_task_definitions():
aliases = t.attributes.get("toolchain-alias")
if not aliases:
aliases = []
if isinstance(aliases, six.text_type):
if isinstance(aliases, str):
aliases = [aliases]
for alias in aliases:
aliased["toolchain-{}".format(alias)] = t

View File

@@ -63,7 +63,7 @@ def hash_file(path, hasher=None):
return h.hexdigest()
class EmptyValue(six.text_type):
class EmptyValue(str):
"""A dummy type that behaves like an empty string and sequence.
This type exists in order to support
@@ -79,7 +79,7 @@ class ReadOnlyNamespace(object):
"""A class for objects with immutable attributes set at initialization."""
def __init__(self, **kwargs):
for k, v in six.iteritems(kwargs):
for k, v in kwargs.items():
super(ReadOnlyNamespace, self).__setattr__(k, v)
def __delattr__(self, key):
@@ -612,7 +612,7 @@ def FlagsFactory(flags):
_flags = flags
def update(self, **kwargs):
for k, v in six.iteritems(kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
def __getattr__(self, name):
@@ -872,7 +872,7 @@ class HierarchicalStringList(object):
if not isinstance(value, list):
raise ValueError("Expected a list of strings, not %s" % type(value))
for v in value:
if not isinstance(v, six.string_types):
if not isinstance(v, (str,)):
raise ValueError(
"Expected a list of strings, not an element of %s" % type(v)
)
@@ -1125,7 +1125,7 @@ def expand_variables(s, variables):
value = variables.get(name)
if not value:
continue
if not isinstance(value, six.string_types):
if not isinstance(value, (str,)):
value = " ".join(value)
result += value
return result
@@ -1180,7 +1180,7 @@ class EnumStringComparisonError(Exception):
pass
class EnumString(six.text_type):
class EnumString(str):
"""A string type that only can have a limited set of values, similarly to
an Enum, and can only be compared against that set of values.
@@ -1220,17 +1220,17 @@ def _escape_char(c):
# quoting could be done with either ' or ".
if c == "'":
return "\\'"
return six.text_type(c.encode("unicode_escape"))
return str(c.encode("unicode_escape"))
def ensure_bytes(value, encoding="utf-8"):
if isinstance(value, six.text_type):
if isinstance(value, str):
return value.encode(encoding)
return value
def ensure_unicode(value, encoding="utf-8"):
if isinstance(value, six.binary_type):
if isinstance(value, bytes):
return value.decode(encoding)
return value

View File

@@ -5,7 +5,6 @@
import re
from collections import OrderedDict
import six
from packaging.version import Version
from mozpack.errors import errors
@@ -270,7 +269,7 @@ class Flags(OrderedDict):
flags.match(application='foo', appversion='3.0') returns False
"""
for name, value in six.iteritems(filter):
for name, value in filter.items():
if name not in self:
continue
if not self[name].matches(value):

View File

@@ -9,8 +9,6 @@ import stat
import sys
from collections import Counter, OrderedDict, defaultdict
import six
import mozpack.path as mozpath
from mozpack.errors import errors
from mozpack.files import BaseFile, Dest
@@ -135,7 +133,7 @@ class FileRegistry(object):
for path, file in registry:
(...)
"""
return six.iteritems(self._files)
return iter(self._files.items())
def required_directories(self):
"""
@@ -292,7 +290,7 @@ class FileCopier(FileRegistry):
Returns a FileCopyResult that details what changed.
"""
assert isinstance(destination, six.string_types)
assert isinstance(destination, (str,))
assert not os.path.exists(destination) or os.path.isdir(destination)
result = FileCopyResult()
@@ -561,7 +559,7 @@ class Jarrer(FileRegistry, BaseFile):
def exists(self):
return self.deflater is not None
if isinstance(dest, six.string_types):
if isinstance(dest, (str,)):
dest = Dest(dest)
assert isinstance(dest, Dest)

View File

@@ -49,7 +49,7 @@ else:
def _copyfile(src, dest):
# False indicates `dest` should be overwritten if it exists already.
if isinstance(src, six.text_type) and isinstance(dest, six.text_type):
if isinstance(src, str) and isinstance(dest, str):
_CopyFileW(src, dest, False)
elif isinstance(src, str) and isinstance(dest, str):
_CopyFileA(src, dest, False)
@@ -175,7 +175,7 @@ class BaseFile(object):
disabled when skip_if_older is False.
Returns whether a copy was actually performed (True) or not (False).
"""
if isinstance(dest, six.string_types):
if isinstance(dest, (str,)):
dest = Dest(dest)
else:
assert isinstance(dest, Dest)
@@ -297,11 +297,11 @@ class ExecutableFile(File):
def copy(self, dest, skip_if_older=True):
real_dest = dest
if not isinstance(dest, six.string_types):
if not isinstance(dest, (str,)):
fd, dest = mkstemp()
os.close(fd)
os.remove(dest)
assert isinstance(dest, six.string_types)
assert isinstance(dest, (str,))
# If File.copy didn't actually copy because dest is newer, check the
# file sizes. If dest is smaller, it means it is already stripped and
# elfhacked, so we can skip.
@@ -339,7 +339,7 @@ class AbsoluteSymlinkFile(File):
File.__init__(self, path)
def copy(self, dest, skip_if_older=True):
assert isinstance(dest, six.string_types)
assert isinstance(dest, (str,))
# The logic in this function is complicated by the fact that symlinks
# aren't universally supported. So, where symlinks aren't supported, we
@@ -430,7 +430,7 @@ class HardlinkFile(File):
"""
def copy(self, dest, skip_if_older=True):
assert isinstance(dest, six.string_types)
assert isinstance(dest, (str,))
if not hasattr(os, "link"):
return super(HardlinkFile, self).copy(dest, skip_if_older=skip_if_older)
@@ -488,7 +488,7 @@ class ExistingFile(BaseFile):
self.required = required
def copy(self, dest, skip_if_older=True):
if isinstance(dest, six.string_types):
if isinstance(dest, (str,)):
dest = Dest(dest)
else:
assert isinstance(dest, Dest)
@@ -540,7 +540,7 @@ class PreprocessedFile(BaseFile):
"""
Invokes the preprocessor to create the destination file.
"""
if isinstance(dest, six.string_types):
if isinstance(dest, (str,)):
dest = Dest(dest)
else:
assert isinstance(dest, Dest)
@@ -1120,7 +1120,7 @@ class ComposedFinder(BaseFinder):
self.files = FileRegistry()
for base, finder in sorted(six.iteritems(finders)):
for base, finder in sorted(finders.items()):
if self.files.contains(base):
self.files.remove(base)
for p, f in finder.find(""):
@@ -1247,7 +1247,7 @@ class FileListFinder(BaseFinder):
components = pattern.split("/")
prefix = "/".join(takewhile(lambda s: "*" not in s, components))
start = bisect.bisect_left(self._files, prefix)
for i in six.moves.range(start, len(self._files)):
for i in range(start, len(self._files)):
f = self._files[i]
if not f.startswith(prefix):
break

View File

@@ -72,7 +72,7 @@ class JarStruct(object):
"""
assert self.MAGIC and isinstance(self.STRUCT, OrderedDict)
self.size_fields = set(
t for t in six.itervalues(self.STRUCT) if t not in JarStruct.TYPE_MAPPING
t for t in self.STRUCT.values() if t not in JarStruct.TYPE_MAPPING
)
self._values = {}
if data:
@@ -94,7 +94,7 @@ class JarStruct(object):
# For all fields used as other fields sizes, keep track of their value
# separately.
sizes = dict((t, 0) for t in self.size_fields)
for name, t in six.iteritems(self.STRUCT):
for name, t in self.STRUCT.items():
if t in JarStruct.TYPE_MAPPING:
value, size = JarStruct.get_data(t, data[offset:])
else:
@@ -113,7 +113,7 @@ class JarStruct(object):
Initialize an instance with empty fields.
"""
self.signature = self.MAGIC
for name, t in six.iteritems(self.STRUCT):
for name, t in self.STRUCT.items():
if name in self.size_fields:
continue
self._values[name] = 0 if t in JarStruct.TYPE_MAPPING else ""
@@ -140,10 +140,10 @@ class JarStruct(object):
serialized = struct.pack(b"<I", self.signature)
sizes = dict(
(t, name)
for name, t in six.iteritems(self.STRUCT)
for name, t in self.STRUCT.items()
if t not in JarStruct.TYPE_MAPPING
)
for name, t in six.iteritems(self.STRUCT):
for name, t in self.STRUCT.items():
if t in JarStruct.TYPE_MAPPING:
format, size = JarStruct.TYPE_MAPPING[t]
if name in sizes:
@@ -162,7 +162,7 @@ class JarStruct(object):
variable length fields.
"""
size = JarStruct.TYPE_MAPPING["uint32"][1]
for name, type in six.iteritems(self.STRUCT):
for name, type in self.STRUCT.items():
if type in JarStruct.TYPE_MAPPING:
size += JarStruct.TYPE_MAPPING[type][1]
else:
@@ -183,7 +183,7 @@ class JarStruct(object):
return key in self._values
def __iter__(self):
return six.iteritems(self._values)
return iter(self._values.items())
def __repr__(self):
return "<%s %s>" % (
@@ -403,7 +403,7 @@ class JarReader(object):
entries = self.entries
if not entries:
return JAR_STORED
return max(f["compression"] for f in six.itervalues(entries))
return max(f["compression"] for f in entries.values())
@property
def entries(self):
@@ -419,7 +419,7 @@ class JarReader(object):
preload = JarStruct.get_data("uint32", self._data)[0]
entries = OrderedDict()
offset = self._cdir_end["cdir_offset"]
for e in six.moves.xrange(self._cdir_end["cdir_entries"]):
for e in range(self._cdir_end["cdir_entries"]):
entry = JarCdirEntry(self._data[offset:])
offset += entry.size
# Creator host system. 0 is MSDOS, 3 is Unix
@@ -480,7 +480,7 @@ class JarReader(object):
for file in jarReader:
...
"""
for entry in six.itervalues(self.entries):
for entry in self.entries.values():
yield self._getreader(entry)
def __getitem__(self, name):
@@ -575,7 +575,7 @@ class JarWriter(object):
headers = {}
preload_size = 0
# Prepare central directory entries
for entry, content in six.itervalues(self._contents):
for entry, content in self._contents.values():
header = JarLocalFileHeader()
for name in entry.STRUCT:
if name in header:
@@ -599,12 +599,12 @@ class JarWriter(object):
offset = end["cdir_size"] + end["cdir_offset"] + end.size
preload_size += offset
self._data.write(struct.pack("<I", preload_size))
for entry, _ in six.itervalues(self._contents):
for entry, _ in self._contents.values():
entry["offset"] += offset
self._data.write(entry.serialize())
self._data.write(end.serialize())
# Store local file entries followed by compressed data
for entry, content in six.itervalues(self._contents):
for entry, content in self._contents.values():
self._data.write(headers[entry].serialize())
if isinstance(content, memoryview):
self._data.write(content.tobytes())
@@ -613,7 +613,7 @@ class JarWriter(object):
# On non optimized archives, store the central directory entries.
if not preload_size:
end["cdir_offset"] = offset
for entry, _ in six.itervalues(self._contents):
for entry, _ in self._contents.values():
self._data.write(entry.serialize())
# Store the end of central directory.
self._data.write(end.serialize())
@@ -653,7 +653,7 @@ class JarWriter(object):
deflater = data
else:
deflater = Deflater(compress, compress_level=self._compress_level)
if isinstance(data, (six.binary_type, six.string_types)):
if isinstance(data, (bytes, (str,))):
deflater.write(data)
elif hasattr(data, "read"):
try:

View File

@@ -363,7 +363,7 @@ class SimplePackager(object):
bases = self.get_bases()
broken_bases = sorted(
m
for m, includer in six.iteritems(self._included_manifests)
for m, includer in self._included_manifests.items()
if mozpath.basedir(m, bases) != mozpath.basedir(includer, bases)
)
for m in broken_bases:

View File

@@ -250,7 +250,7 @@ def _repack(app_finder, l10n_finder, copier, formatter, non_chrome=set()):
formatter.add(p, f)
# Transplant jar preloading information.
for path, log in six.iteritems(app_finder.jarlogs):
for path, log in app_finder.jarlogs.items():
assert isinstance(copier[path], Jarrer)
copier[path].preload([l.replace(locale, l10n_locale) for l in log])
@@ -284,7 +284,7 @@ def repack(
finders = {
"": l10n_finder,
}
for base, path in six.iteritems(extra_l10n):
for base, path in extra_l10n.items():
finders[base] = UnpackFinder(path, minify=minify)
l10n_finder = ComposedFinder(finders)
copier = FileCopier()

View File

@@ -7,7 +7,6 @@ import stat
import unittest
import mozunit
import six
import mozpack.path as mozpath
from mozpack.copier import FileCopier, FileRegistry, FileRegistrySubtree, Jarrer
@@ -124,7 +123,7 @@ class TestFileRegistry(BaseTestFileRegistry, unittest.TestCase):
"bar": [],
}
reg = FileRegistry()
for path, parts in six.iteritems(cases):
for path, parts in cases.items():
self.assertEqual(reg._partial_paths(path), parts)
def test_file_registry(self):

View File

@@ -153,7 +153,7 @@ class TestDest(TestWithTmpDir):
rand = bytes(
random.choice(b"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ")
for i in six.moves.xrange(131597)
for i in range(131597)
)
samples = [
b"",

View File

@@ -111,26 +111,22 @@ for addon in ("addon0", "addon1", "app/chrome/addons/addon2"):
RESULT_FLAT.update(
{
mozpath.join(addon, p): f
for p, f in six.iteritems(
{
for p, f in {
"chrome.manifest": [
"manifest chrome/chrome.manifest",
"manifest components/components.manifest",
],
"chrome/chrome.manifest": [
"content %s foo/bar/" % mozpath.basename(addon),
],
"chrome/foo/bar/baz": FILES[
mozpath.join(addon, "chrome/foo/bar/baz")
"content %s foo/bar/" % mozpath.basename(addon)
],
"chrome/foo/bar/baz": FILES[mozpath.join(addon, "chrome/foo/bar/baz")],
"components/components.manifest": [
"interfaces bar.xpt",
"interfaces foo.xpt",
],
"components/bar.xpt": bar_xpt,
"components/foo.xpt": foo2_xpt,
}
)
}.items()
}
)
@@ -180,12 +176,12 @@ RESULT_JAR.update(
},
"addon1.xpi": {
mozpath.relpath(p, "addon1"): f
for p, f in six.iteritems(RESULT_FLAT)
for p, f in RESULT_FLAT.items()
if p.startswith("addon1/")
},
"app/chrome/addons/addon2.xpi": {
mozpath.relpath(p, "app/chrome/addons/addon2"): f
for p, f in six.iteritems(RESULT_FLAT)
for p, f in RESULT_FLAT.items()
if p.startswith("app/chrome/addons/addon2/")
},
}
@@ -227,7 +223,7 @@ RESULT_OMNIJAR.update(
),
(
mozpath.relpath(p, "app")
for p in six.iterkeys(RESULT_FLAT)
for p in RESULT_FLAT.keys()
if p.startswith("app/chrome/addons/addon2/")
),
)
@@ -258,14 +254,12 @@ RESULT_OMNIJAR_WITH_SUBPATH = {
CONTENTS_WITH_BASE = {
"bases": {
mozpath.join("base/root", b) if b else "base/root": a
for b, a in six.iteritems(CONTENTS["bases"])
for b, a in CONTENTS["bases"].items()
},
"manifests": [
m.move(mozpath.join("base/root", m.base)) for m in CONTENTS["manifests"]
],
"files": {
mozpath.join("base/root", p): f for p, f in six.iteritems(CONTENTS["files"])
},
"files": {mozpath.join("base/root", p): f for p, f in CONTENTS["files"].items()},
}
EXTRA_CONTENTS = {
@@ -276,7 +270,7 @@ CONTENTS_WITH_BASE["files"].update(EXTRA_CONTENTS)
def result_with_base(results):
result = {mozpath.join("base/root", p): v for p, v in six.iteritems(results)}
result = {mozpath.join("base/root", p): v for p, v in results.items()}
result.update(EXTRA_CONTENTS)
return result
@@ -293,7 +287,7 @@ def fill_formatter(formatter, contents):
for manifest in contents["manifests"]:
formatter.add_manifest(manifest)
for k, v in sorted(six.iteritems(contents["files"])):
for k, v in sorted(contents["files"].items()):
if k.endswith(".xpt"):
formatter.add_interfaces(k, v)
else:

View File

@@ -5,7 +5,6 @@
import unittest
import mozunit
import six
from mozpack.chrome.manifest import Manifest, ManifestContent, ManifestLocale
from mozpack.copier import FileRegistry
@@ -145,7 +144,7 @@ class TestL10NRepack(unittest.TestCase):
self.assertEqual(
dict((p, f.open().read()) for p, f in copier),
dict((p, f.open().read()) for p, f in six.iteritems(repacked)),
dict((p, f.open().read()) for p, f in repacked.items()),
)

View File

@@ -5,8 +5,6 @@
import os
import sys
import six
class NullTerminal(object):
"""Replacement for `blessed.Terminal()` that does no formatting."""
@@ -22,11 +20,11 @@ class NullTerminal(object):
except Exception:
self.is_a_tty = False
class NullCallableString(six.text_type):
class NullCallableString(str):
"""A dummy callable Unicode stolen from blessings"""
def __new__(cls):
new = six.text_type.__new__(cls, "")
new = str.__new__(cls, "")
return new
def __call__(self, *args):

View File

@@ -9,8 +9,6 @@ import math
import os
import sys
import six
AWSY_PATH = os.path.dirname(os.path.realpath(__file__))
if AWSY_PATH not in sys.path:
sys.path.append(AWSY_PATH)
@@ -151,7 +149,7 @@ def create_suite(
memory_report_path, "resident-unique"
)
value = list(totals_rss.values())[0] + sum(
[v for k, v in six.iteritems(totals_uss) if "Main" not in k]
[v for k, v in totals_uss.items() if "Main" not in k]
)
subtest = {

View File

@@ -167,7 +167,7 @@ def run_awsy(command_context, tests, binary=None, **kwargs):
if bin_dir not in sys.path:
sys.path.append(bin_dir)
for k, v in six.iteritems(kwargs):
for k, v in kwargs.items():
setattr(args, k, v)
parser.verify_usage(args)

View File

@@ -6,7 +6,6 @@ import logging
import os
import sys
import six
from mach.decorators import Command
from mozbuild.base import BinaryNotFoundException
from mozbuild.base import MachCommandConditions as conditions
@@ -53,7 +52,7 @@ def run_firefox_ui_test(topsrcdir=None, **kwargs):
args = Namespace()
for k, v in six.iteritems(kwargs):
for k, v in kwargs.items():
setattr(args, k, v)
parser.verify_usage(args)

View File

@@ -29,7 +29,7 @@ class MarionetteException(Exception):
"""
self.cause = cause
self.stacktrace = stacktrace
self._message = six.text_type(message)
self._message = str(message)
def __str__(self):
# pylint: disable=W1645
@@ -50,7 +50,7 @@ class MarionetteException(Exception):
if tb:
msg += ": " + "".join(traceback.format_tb(tb))
return six.text_type(msg)
return str(msg)
@property
def message(self):

View File

@@ -23,10 +23,8 @@ import traceback
from copy import deepcopy
import mozversion
import six
from mozprofile import Profile
from mozrunner import FennecEmulatorRunner, Runner
from six import reraise
from . import errors
@@ -274,7 +272,7 @@ class GeckoInstance(object):
profile_path = profile
# If a path to a profile is given then clone it
if isinstance(profile_path, six.string_types):
if isinstance(profile_path, str):
profile_args["path_from"] = profile_path
profile_args["path_to"] = tempfile.mkdtemp(
suffix=".{}".format(profile_name or os.path.basename(profile_path)),
@@ -359,7 +357,7 @@ class GeckoInstance(object):
msg = 'Application "{0}" unknown (should be one of {1})'.format(
app, list(apps.keys())
)
reraise(NotImplementedError, NotImplementedError(msg), tb)
raise NotImplementedError(msg).with_traceback(tb)
return instance_class(*args, **kwargs)
@@ -538,11 +536,9 @@ class FennecInstance(GeckoInstance):
self.runner.start()
except Exception:
exc_cls, exc, tb = sys.exc_info()
reraise(
exc_cls,
exc_cls("Error possibly due to runner or device args: {}".format(exc)),
tb,
)
raise exc_cls(
"Error possibly due to runner or device args: {}".format(exc)
).with_traceback(tb)
# forward marionette port
self.runner.device.device.forward(

View File

@@ -12,9 +12,6 @@ import time
import traceback
from contextlib import contextmanager
import six
from six import reraise
from . import errors, transport
from .decorators import do_process_check
from .geckoinstance import GeckoInstance
@@ -643,7 +640,7 @@ class Marionette(object):
except OSError:
_, value, tb = sys.exc_info()
msg = "Port {}:{} is unavailable ({})".format(self.host, self.port, value)
reraise(IOError, OSError(msg), tb)
raise OSError(msg).with_traceback(tb)
try:
self.instance.start()
@@ -657,7 +654,7 @@ class Marionette(object):
"Process killed after {}s because no connection to Marionette "
"server could be established. Check gecko.log for errors"
)
reraise(IOError, OSError(msg.format(timeout)), sys.exc_info()[2])
raise OSError(msg.format(timeout)).with_traceback(sys.exc_info()[2])
def cleanup(self):
if self.session is not None:
@@ -817,7 +814,7 @@ class Marionette(object):
# If the application hasn't been launched by Marionette no further action can be done.
# In such cases we simply re-throw the exception.
if not self.instance:
reraise(exc_cls, exc, tb)
raise exc.with_traceback(tb)
else:
# Somehow the socket disconnected. Give the application some time to shutdown
@@ -850,9 +847,9 @@ class Marionette(object):
message += " (Reason: {reason})"
reraise(
IOError, OSError(message.format(returncode=returncode, reason=exc)), tb
)
raise OSError(
message.format(returncode=returncode, reason=exc)
).with_traceback(tb)
@staticmethod
def convert_keys(*string):
@@ -1015,7 +1012,7 @@ class Marionette(object):
)
pref_exists = True
with self.using_context(self.CONTEXT_CHROME):
for pref, value in six.iteritems(prefs):
for pref, value in prefs.items():
if type(value) is not str:
value = json.dumps(value)
pref_exists = self.execute_script(
@@ -1281,17 +1278,15 @@ class Marionette(object):
self._send_message("Marionette:AcceptConnections", {"value": True})
message = "Process still running {}s after restart request"
reraise(exc_cls, exc_cls(message.format(timeout_restart)), tb)
raise exc_cls(message.format(timeout_restart)).with_traceback(tb)
else:
# The process shutdown but didn't start again.
self.cleanup()
msg = "Process unexpectedly quit without restarting (exit code: {})"
reraise(
exc_cls,
exc_cls(msg.format(self.instance.runner.returncode)),
tb,
)
raise exc_cls(
msg.format(self.instance.runner.returncode)
).with_traceback(tb)
self.is_shutting_down = False
@@ -1378,7 +1373,7 @@ class Marionette(object):
exc_type, value, tb = sys.exc_info()
if self.instance and self.instance.runner.is_running():
self.instance.close()
reraise(exc_type, exc_type(value.message), tb)
raise exc_type(value.message).with_traceback(tb)
self.session_id = resp["sessionId"]
self.session = resp["capabilities"]
@@ -1737,7 +1732,7 @@ class Marionette(object):
wrapped = {WEB_FRAME_KEY: args.id}
elif type(args) is WebWindow:
wrapped = {WEB_WINDOW_KEY: args.id}
elif isinstance(args, (bool, int, float, six.string_types)) or args is None:
elif isinstance(args, (bool, int, float, str)) or args is None:
wrapped = args
return wrapped

View File

@@ -310,7 +310,7 @@ class TcpTransport(object):
except socket.timeout:
exc_cls, exc, tb = sys.exc_info()
msg = "Connection attempt failed because no data has been received over the socket: {}"
six.reraise(exc_cls, exc_cls(msg.format(exc)), tb)
raise exc_cls(msg.format(exc)).with_traceback(tb)
hello = json.loads(raw)
application_type = hello.get("applicationType")

View File

@@ -381,7 +381,7 @@ class MarionetteTestCase(CommonTestCase):
for name in dir(test_mod):
obj = getattr(test_mod, name)
if isinstance(obj, six.class_types) and issubclass(obj, unittest.TestCase):
if isinstance(obj, type) and issubclass(obj, unittest.TestCase):
testnames = testloader.getTestCaseNames(obj)
for testname in testnames:
suite.addTest(

View File

@@ -19,13 +19,12 @@ import mozinfo
import moznetwork
import mozprofile
import mozversion
import six
from manifestparser import TestManifest
from manifestparser.filters import tags
from marionette_driver.marionette import Marionette
from moztest.adapters.unit import StructuredTestResult, StructuredTestRunner
from moztest.results import TestResult, TestResultCollection, relevant_line
from six import MAXSIZE, reraise
from six import MAXSIZE
from . import serve
@@ -743,7 +742,7 @@ class BaseMarionetteTestRunner(object):
def update(d, u):
"""Update a dictionary that may contain nested dictionaries."""
for k, v in six.iteritems(u):
for k, v in u.items():
o = d.get(k, {})
if isinstance(v, dict) and isinstance(o, dict):
d[k] = update(d.get(k, {}), v)
@@ -768,11 +767,9 @@ class BaseMarionetteTestRunner(object):
data.append(json.loads(f.read()))
except ValueError as e:
msg = "JSON file ({0}) is not properly formatted: {1}"
reraise(
ValueError,
ValueError(msg.format(os.path.abspath(path), e)),
sys.exc_info()[2],
)
raise ValueError(
msg.format(os.path.abspath(path), e)
).with_traceback(sys.exc_info()[2])
return data
@property
@@ -894,7 +891,7 @@ class BaseMarionetteTestRunner(object):
except Exception as e:
exc_cls, _, tb = sys.exc_info()
msg = "Connection attempt to {0}:{1} failed with error: {2}"
reraise(exc_cls, exc_cls(msg.format(host, port, e)), tb)
raise exc_cls(msg.format(host, port, e)).with_traceback(tb)
if self.workspace:
kwargs["workspace"] = self.workspace_path
if self.headless:
@@ -1048,7 +1045,7 @@ class BaseMarionetteTestRunner(object):
# reraise previous interruption now
if interrupted:
reraise(interrupted[0], interrupted[1], interrupted[2])
raise interrupted[1].with_traceback(interrupted[2])
def _print_summary(self, tests):
self.logger.info("\nSUMMARY\n-------")

View File

@@ -15,8 +15,6 @@ import os
import sys
from collections import defaultdict
from six import iteritems
from . import httpd
__all__ = [
@@ -199,12 +197,12 @@ def where_is(uri, on="http"):
def iter_proc(servers):
for _, (_, proc) in iteritems(servers):
for _, (_, proc) in servers.items():
yield proc
def iter_url(servers):
for _, (url, _) in iteritems(servers):
for _, (url, _) in servers.items():
yield url

View File

@@ -7,7 +7,6 @@ import sys
import traceback
import types
import six
from mozlog import commandline, get_default_logger
@@ -54,7 +53,7 @@ class TestRunner(object):
self.logger = get_default_logger(component="TestRunner")
def gather_tests(self):
for item in six.itervalues(globals()):
for item in globals().values():
if isinstance(item, types.FunctionType) and item.__name__.startswith(
"test_"
):

View File

@@ -696,10 +696,10 @@ if mozinfo.isWin:
log.warning("unable to get handle for pid %d: %d" % (pid, err))
return
if not isinstance(file_name, six.text_type):
if not isinstance(file_name, str):
# Convert to unicode explicitly so our path will be valid as input
# to CreateFileW
file_name = six.text_type(file_name, sys.getfilesystemencoding())
file_name = str(file_name, sys.getfilesystemencoding())
file_handle = kernel32.CreateFileW(
file_name,

View File

@@ -12,7 +12,6 @@ import unittest
import mozfile
import mozunit
import six
class TestNamedTemporaryFile(unittest.TestCase):
@@ -68,7 +67,7 @@ class TestNamedTemporaryFile(unittest.TestCase):
path = None
with mozfile.NamedTemporaryFile(delete=True) as tf:
path = tf.name
self.assertTrue(isinstance(path, six.string_types))
self.assertTrue(isinstance(path, (str,)))
self.assertFalse(os.path.exists(path))
# it is also deleted when __del__ is called

View File

@@ -20,7 +20,7 @@ from socketserver import ThreadingMixIn
from urllib.parse import unquote, urlsplit
import moznetwork
from six import ensure_binary, iteritems
from six import ensure_binary
class EasyServer(ThreadingMixIn, HTTPServer):
@@ -92,7 +92,7 @@ class RequestHandler(SimpleHTTPRequestHandler):
self.request, *m.groups()
)
self.send_response(response_code)
for keyword, value in iteritems(headerdict):
for keyword, value in headerdict.items():
self.send_header(keyword, value)
self.end_headers()
self.wfile.write(ensure_binary(data))
@@ -106,7 +106,7 @@ class RequestHandler(SimpleHTTPRequestHandler):
using self.path_mappings and self.docroot.
Return (url_path, disk_path)."""
path_components = list(filter(None, self.request.path.split("/")))
for prefix, disk_path in iteritems(self.path_mappings):
for prefix, disk_path in self.path_mappings.items():
prefix_components = list(filter(None, prefix.split("/")))
if len(path_components) < len(prefix_components):
continue

View File

@@ -4,10 +4,8 @@
import re
import six
class StringVersion(six.text_type):
class StringVersion(str):
"""
A string version that can be compared with comparison operators.
"""

View File

@@ -16,7 +16,7 @@ from optparse import OptionParser
import mozfile
import mozinfo
import requests
from six import PY3, reraise
from six import PY3
try:
import pefile
@@ -114,7 +114,7 @@ def install(src, dest):
except Exception:
exc, val, tb = sys.exc_info()
error = InvalidSource("{} ({})".format(msg, val))
reraise(InvalidSource, error, tb)
raise error.with_traceback(tb)
raise InvalidSource(msg)
src = os.path.realpath(src)
@@ -154,9 +154,9 @@ def install(src, dest):
pass
if issubclass(cls, Exception):
error = InstallError('Failed to install "%s (%s)"' % (src, str(exc)))
reraise(InstallError, error, trbk)
raise error.with_traceback(trbk)
# any other kind of exception like KeyboardInterrupt is just re-raised.
reraise(cls, exc, trbk)
raise exc.with_traceback(trbk)
finally:
# trbk won't get GC'ed due to circular reference
@@ -258,7 +258,7 @@ def uninstall(install_folder):
error = UninstallError(
"Failed to uninstall %s (%s)" % (install_folder, str(ex))
)
reraise(UninstallError, error, trbk)
raise error.with_traceback(trbk)
finally:
# trbk won't get GC'ed due to circular reference

View File

@@ -8,8 +8,6 @@ import os
import sys
from collections import defaultdict
import six
from . import formatters, handlers
from .structuredlog import StructuredLogger, set_default_logger
@@ -179,14 +177,14 @@ def add_logging_group(parser, include_formatters=None):
opt_log_type = log_file
group_add = group.add_argument
for name, (cls, help_str) in six.iteritems(log_formatters):
for name, (cls, help_str) in log_formatters.items():
if name in include_formatters:
group_add(
"--log-" + name, action="append", type=opt_log_type, help=help_str
)
for fmt in include_formatters:
for optname, (cls, help_str, formatters_, action) in six.iteritems(fmt_options):
for optname, (cls, help_str, formatters_, action) in fmt_options.items():
if fmt not in formatters_:
continue
if optname.startswith("no-") and action == "store_false":
@@ -220,12 +218,12 @@ def setup_handlers(logger, formatters, formatter_options, allow_unused_options=F
)
raise ValueError(msg)
for fmt, streams in six.iteritems(formatters):
for fmt, streams in formatters.items():
formatter_cls = log_formatters[fmt][0]
formatter = formatter_cls()
handler_wrappers_and_options = []
for option, value in six.iteritems(formatter_options[fmt]):
for option, value in formatter_options[fmt].items():
wrapper, wrapper_args = None, ()
if option == "valgrind":
wrapper = valgrind_handler_wrapper
@@ -291,7 +289,7 @@ def setup_logging(
else:
defaults = {"raw": sys.stdout}
for name, values in six.iteritems(args):
for name, values in args.items():
parts = name.split("_")
if len(parts) > 3:
continue
@@ -305,7 +303,7 @@ def setup_logging(
_, formatter = parts
for value in values:
found = True
if isinstance(value, six.string_types):
if isinstance(value, (str,)):
value = log_file(value)
if value == sys.stdout:
found_stdout_logger = True
@@ -320,11 +318,11 @@ def setup_logging(
# If there is no user-specified logging, go with the default options
if not found:
for name, value in six.iteritems(defaults):
for name, value in defaults.items():
formatters[name].append(value)
elif not found_stdout_logger and sys.stdout in list(defaults.values()):
for name, value in six.iteritems(defaults):
for name, value in defaults.items():
if value == sys.stdout:
formatters[name].append(value)

View File

@@ -7,8 +7,6 @@ import platform
import subprocess
import sys
import six
from mozlog.formatters import base
DEFAULT_MOVE_UP_CODE = "\x1b[A"
@@ -139,9 +137,7 @@ class GroupingFormatter(base.BaseFormatter):
return new_display + "No tests running.\n"
def suite_start(self, data):
self.number_of_tests = sum(
len(tests) for tests in six.itervalues(data["tests"])
)
self.number_of_tests = sum(len(tests) for tests in data["tests"].values())
self.start_time = data["time"]
if self.number_of_tests == 0:
@@ -189,7 +185,7 @@ class GroupingFormatter(base.BaseFormatter):
def get_lines_for_known_intermittents(self, known_intermittent_results):
lines = []
for (test, subtest), data in six.iteritems(self.known_intermittent_results):
for (test, subtest), data in self.known_intermittent_results.items():
status = data["status"]
known_intermittent = ", ".join(data["known_intermittent"])
expected = " [expected %s, known intermittent [%s]" % (
@@ -240,7 +236,7 @@ class GroupingFormatter(base.BaseFormatter):
else:
failures_by_stack[failure["stack"]].append(failure)
for stack, failures in six.iteritems(failures_by_stack):
for stack, failures in failures_by_stack.items():
output += make_subtests_failure(test_name, failures, stack)
return output

View File

@@ -185,17 +185,17 @@ class HTMLFormatter(base.BaseFormatter):
else:
href = content
else:
if not isinstance(content, (six.text_type, six.binary_type)):
if not isinstance(content, (str, bytes)):
# All types must be json serializable
content = json.dumps(content)
# Decode to text type if JSON output is byte string
if not isinstance(content, six.text_type):
if not isinstance(content, str):
content = content.decode("utf-8")
# Encode base64 to avoid that some browsers (such as Firefox, Opera)
# treats '#' as the start of another link if it is contained in the data URL.
if isinstance(content, six.text_type):
if isinstance(content, str):
is_known_utf8 = True
content_bytes = six.text_type(content).encode(
content_bytes = str(content).encode(
"utf-8", "xmlcharrefreplace"
)
else:
@@ -274,7 +274,7 @@ class HTMLFormatter(base.BaseFormatter):
html.p(
"%i tests ran in %.1f seconds."
% (
sum(six.itervalues(self.test_count)),
sum(self.test_count.values()),
(self.suite_times["end"] - self.suite_times["start"])
/ 1000.0,
),

View File

@@ -6,7 +6,6 @@
import time
from functools import reduce
import six
from mozterm import Terminal
from ..handlers import SummaryHandler
@@ -142,7 +141,7 @@ class MachFormatter(base.BaseFormatter):
return test_id
def _get_file_name(self, test_id):
if isinstance(test_id, (str, six.text_type)):
if isinstance(test_id, (str, str)):
return test_id
if isinstance(test_id, tuple):
@@ -151,7 +150,7 @@ class MachFormatter(base.BaseFormatter):
assert False, "unexpected test_id"
def suite_start(self, data):
num_tests = reduce(lambda x, y: x + len(y), six.itervalues(data["tests"]), 0)
num_tests = reduce(lambda x, y: x + len(y), data["tests"].values(), 0)
action = self.color_formatter.action(data["action"].upper())
name = ""
if "name" in data:

View File

@@ -6,8 +6,6 @@ import functools
from collections import deque
from functools import reduce
import six
from ..handlers import SummaryHandler
from .base import BaseFormatter
from .process import strstatus
@@ -159,7 +157,7 @@ class TbplFormatter(BaseFormatter):
def suite_start(self, data):
self.suite_start_time = data["time"]
num_tests = reduce(lambda x, y: x + len(y), six.itervalues(data["tests"]), 0)
num_tests = reduce(lambda x, y: x + len(y), data["tests"].values(), 0)
return "SUITE-START | Running %i tests\n" % num_tests
def test_start(self, data):
@@ -325,7 +323,7 @@ class TbplFormatter(BaseFormatter):
return "SUITE-END | took %is\n" % time
def test_id(self, test_id):
if isinstance(test_id, (str, six.text_type)):
if isinstance(test_id, (str, str)):
return test_id
else:
return tuple(test_id)

View File

@@ -4,15 +4,13 @@
from xml.etree import ElementTree
import six
from . import base
def format_test_id(test_id):
"""Take a test id and return something that looks a bit like
a class path"""
if not isinstance(test_id, six.string_types):
if not isinstance(test_id, (str,)):
# Not sure how to deal with reftests yet
raise NotImplementedError

View File

@@ -114,7 +114,7 @@ class StreamHandler(BaseHandler):
except UnicodeEncodeError:
return
else:
if isinstance(formatted, six.text_type):
if isinstance(formatted, str):
self.stream.write(formatted.encode("utf-8", "replace"))
elif isinstance(formatted, str):
self.stream.write(formatted)

View File

@@ -4,8 +4,6 @@
from collections import OrderedDict, defaultdict
import six
from ..reader import LogHandler
@@ -76,7 +74,7 @@ class SummaryHandler(LogHandler):
Yields a tuple of (suite, summary). The summary returned is
the same format as returned by 'get'.
"""
for suite, data in six.iteritems(self.summary):
for suite, data in self.summary.items():
yield suite, data
@classmethod

View File

@@ -4,8 +4,6 @@
import inspect
import six
convertor_registry = {}
missing = object()
no_default = object()
@@ -94,7 +92,7 @@ class log_action(object):
if name not in values:
values[name] = self.args[name].default
for key, value in six.iteritems(values):
for key, value in values.items():
if key in self.args:
out_value = self.args[key](value)
if out_value is not missing:
@@ -109,7 +107,7 @@ class log_action(object):
def convert_known(self, **kwargs):
known_kwargs = {
name: value for name, value in six.iteritems(kwargs) if name in self.args
name: value for name, value in kwargs.items() if name in self.args
}
return self.convert(**known_kwargs)
@@ -166,16 +164,16 @@ class ContainerType(DataType):
class Unicode(DataType):
def convert(self, data):
if isinstance(data, six.text_type):
if isinstance(data, str):
return data
if isinstance(data, str):
return data.decode("utf8", "replace")
return six.text_type(data)
return str(data)
class TestId(DataType):
def convert(self, data):
if isinstance(data, six.text_type):
if isinstance(data, str):
return data
elif isinstance(data, bytes):
return data.decode("utf-8", "replace")
@@ -245,7 +243,7 @@ class List(ContainerType):
def convert(self, data):
# while dicts and strings _can_ be cast to lists,
# doing so is likely not intentional behaviour
if isinstance(data, (six.string_types, dict)):
if isinstance(data, ((str,), dict)):
raise ValueError("Expected list but got %s" % type(data))
return [self.item_type.convert(item) for item in data]

View File

@@ -5,7 +5,6 @@
import time
import pytest
import six
import mozlog
@@ -15,10 +14,10 @@ def pytest_addoption(parser):
# Pytest's parser doesn't have the add_argument_group method Mozlog expects.
group = parser.getgroup("mozlog")
for name, (_class, _help) in six.iteritems(mozlog.commandline.log_formatters):
for name, (_class, _help) in mozlog.commandline.log_formatters.items():
group.addoption("--log-{0}".format(name), action="append", help=_help)
formatter_options = six.iteritems(mozlog.commandline.fmt_options)
formatter_options = mozlog.commandline.fmt_options.items()
for name, (_class, _help, formatters, action) in formatter_options:
for formatter in formatters:
if formatter in mozlog.commandline.log_formatters:
@@ -90,7 +89,7 @@ class MozLog(object):
status = "SKIP" if not hasattr(report, "wasxfail") else "FAIL"
if report.longrepr is not None:
longrepr = report.longrepr
if isinstance(longrepr, six.string_types):
if isinstance(longrepr, (str,)):
# When using pytest-xdist, longrepr is serialised as a str
message = stack = longrepr
if longrepr.startswith("[XPASS(strict)]"):

View File

@@ -8,7 +8,6 @@ import argparse
import format as formatlog
import logmerge
import six
import unstable
@@ -25,7 +24,7 @@ def get_parser():
sub_parser = parser.add_subparsers(title="Subcommands")
for command, (parser_func, main_func) in six.iteritems(commands):
for command, (parser_func, main_func) in commands.items():
parent = parser_func(False)
command_parser = sub_parser.add_parser(
command, description=parent.description, parents=[parent]

View File

@@ -6,8 +6,6 @@ import argparse
import json
from collections import defaultdict
import six
from mozlog import reader
@@ -19,7 +17,7 @@ class StatusHandler(reader.LogHandler):
)
def test_id(self, test):
if type(test) in (str, six.text_type):
if type(test) in (str, str):
return test
else:
return tuple(test)
@@ -55,9 +53,9 @@ def _filter(results_cmp):
def inner(statuses):
rv = defaultdict(lambda: defaultdict(dict))
for run_info, tests in six.iteritems(statuses):
for test, subtests in six.iteritems(tests):
for name, results in six.iteritems(subtests):
for run_info, tests in statuses.items():
for test, subtests in tests.items():
for name, results in subtests.items():
if results_cmp(results):
rv[run_info][test][name] = results
@@ -73,16 +71,16 @@ filter_stable = _filter(lambda x: len(x) == 1)
def group_results(data):
rv = defaultdict(lambda: defaultdict(lambda: defaultdict(int)))
for run_info, tests in six.iteritems(data):
for test, subtests in six.iteritems(tests):
for name, results in six.iteritems(subtests):
for status, number in six.iteritems(results):
for run_info, tests in data.items():
for test, subtests in tests.items():
for name, results in subtests.items():
for status, number in results.items():
rv[test][name][status] += number
return rv
def print_results(data):
for run_info, tests in six.iteritems(data):
for run_info, tests in data.items():
run_str = (
" ".join("%s:%s" % (k, v) for k, v in run_info)
if run_info
@@ -97,12 +95,12 @@ def print_run(tests):
for test, subtests in sorted(tests.items()):
print("\n" + str(test))
print("-" * len(test))
for name, results in six.iteritems(subtests):
for name, results in subtests.items():
print(
"[%s]: %s"
% (
name if name is not None else "",
" ".join("%s (%i)" % (k, v) for k, v in six.iteritems(results)),
" ".join("%s (%i)" % (k, v) for k, v in results.items()),
)
)

View File

@@ -9,8 +9,6 @@ import traceback
from multiprocessing import current_process
from threading import Lock, current_thread
import six
from .logtypes import (
Any,
Boolean,
@@ -277,7 +275,7 @@ class StructuredLogger(object):
action = raw_data["action"]
converted_data = convertor_registry[action].convert_known(**raw_data)
for k, v in six.iteritems(raw_data):
for k, v in raw_data.items():
if (
k not in converted_data
and k not in convertor_registry[action].optional_args

View File

@@ -12,7 +12,6 @@ import unittest
import mozfile
import mozlog.unstructured as mozlog
import mozunit
import six
class ListHandler(mozlog.Handler):
@@ -80,7 +79,7 @@ class TestStructuredLogging(unittest.TestCase):
The actual message should contain no fields other than the timestamp
field and those present in expected."""
self.assertTrue(isinstance(actual["_time"], six.integer_types))
self.assertTrue(isinstance(actual["_time"], (int,)))
for k, v in expected.items():
self.assertEqual(v, actual[k])

View File

@@ -45,7 +45,7 @@ class BaseStructuredTest(unittest.TestCase):
specials = set(["time"])
all_expected.update(expected)
for key, value in six.iteritems(all_expected):
for key, value in all_expected.items():
self.assertEqual(actual[key], value)
self.assertEqual(set(all_expected.keys()) | specials, set(actual.keys()))
@@ -967,7 +967,7 @@ class TestBuffer(BaseStructuredTest):
specials = set(["time"])
all_expected.update(expected)
for key, value in six.iteritems(all_expected):
for key, value in all_expected.items():
self.assertEqual(actual[key], value)
self.assertEqual(set(all_expected.keys()) | specials, set(actual.keys()))

View File

@@ -6,8 +6,6 @@ import platform
import re
import subprocess
import six
from .macintelpower import MacIntelPower
from .mozpowerutils import average_summary, frequency_summary, get_logger, sum_summary
@@ -105,7 +103,7 @@ class MozPower(object):
raise NotImplementedError
else:
self._os = self._get_os().lower()
cpu = six.text_type(self._get_processor_info().lower())
cpu = str(self._get_processor_info().lower())
if "intel" in cpu:
self._cpu = "intel"

View File

@@ -293,7 +293,7 @@ class ProcessHandlerMixin(object):
errwrite,
*_,
) = args_tuple
if not isinstance(args, six.string_types):
if not isinstance(args, (str,)):
args = subprocess.list2cmdline(args)
# Always or in the create new process group

View File

@@ -16,8 +16,6 @@ from ctypes import (
)
from ctypes.wintypes import BOOL, BYTE, DWORD, HANDLE, LARGE_INTEGER
import six
LPVOID = c_void_p
LPDWORD = POINTER(DWORD)
SIZE_T = c_size_t
@@ -106,7 +104,7 @@ class JobObjectInfo(object):
}
def __init__(self, _class):
if isinstance(_class, six.string_types):
if isinstance(_class, (str,)):
assert _class in self.mapping, "Class should be one of %s; you gave %s" % (
self.mapping,
_class,

View File

@@ -14,7 +14,7 @@ from xml.dom import minidom
import mozfile
from mozlog.unstructured import getLogger
from six import reraise, string_types
from six import string_types
_SALT = binascii.hexlify(os.urandom(32))
_TEMPORARY_ADDON_SUFFIX = "@temporary-addon"
@@ -295,7 +295,7 @@ class AddonManager(object):
"Add-on path is neither an XPI nor a directory: %s" % addon_path
)
except (OSError, KeyError) as e:
reraise(AddonFormatError, AddonFormatError(str(e)), sys.exc_info()[2])
raise AddonFormatError(str(e)).with_traceback(sys.exc_info()[2])
if is_webext:
details["version"] = manifest["version"]
@@ -333,7 +333,7 @@ class AddonManager(object):
if entry in details.keys():
details.update({entry: get_text(node)})
except Exception as e:
reraise(AddonFormatError, AddonFormatError(str(e)), sys.exc_info()[2])
raise AddonFormatError(str(e)).with_traceback(sys.exc_info()[2])
# turn unpack into a true/false value
if isinstance(details["unpack"], string_types):

View File

@@ -10,7 +10,6 @@ import sys
import time
import mozinfo
import six
from mozprocess import ProcessHandler
from mozproxy.backends.base import Playback
@@ -61,7 +60,7 @@ class Mitmproxy(Playback):
)
raise Exception("Please provide a playback_files list.")
if not isinstance(self.config.get("recording_file"), six.string_types):
if not isinstance(self.config.get("recording_file"), (str,)):
LOG.error("recording_file argument type is not str!")
raise Exception("recording_file argument type invalid!")

View File

@@ -18,7 +18,6 @@ try:
import mozcrash
except ImportError:
mozcrash = None
from six import reraise
from ..application import DefaultContext
from ..errors import RunnerNotStartedError
@@ -143,11 +142,9 @@ class BaseRunner(object):
self.process_handler = process
except Exception as e:
reraise(
RunnerNotStartedError,
RunnerNotStartedError("Failed to start the process: {}".format(e)),
sys.exc_info()[2],
)
raise RunnerNotStartedError(
"Failed to start the process: {}".format(e)
).with_traceback(sys.exc_info()[2])
self.crashed = 0
return self.process_handler.pid

View File

@@ -910,12 +910,12 @@ class AndroidEmulator(object):
f.write(line)
def _telnet_read_until(self, telnet, expected, timeout):
if six.PY3 and isinstance(expected, six.text_type):
if six.PY3 and isinstance(expected, str):
expected = expected.encode("ascii")
return telnet.read_until(expected, timeout)
def _telnet_write(self, telnet, command):
if six.PY3 and isinstance(command, six.text_type):
if six.PY3 and isinstance(command, str):
command = command.encode("ascii")
telnet.write(command)
@@ -1144,7 +1144,7 @@ def _verify_kvm(substs):
command = [emulator_path, "-accel-check"]
try:
out = subprocess.check_output(command)
if six.PY3 and not isinstance(out, six.text_type):
if six.PY3 and not isinstance(out, str):
out = out.decode("utf-8")
if "is installed and usable" in "".join(out):
return

View File

@@ -565,7 +565,7 @@ class BuildBackendLoader(TestLoader):
# self.topsrcdir was normalized to use /, revert back to \ if needed.
topsrcdir = os.path.normpath(self.topsrcdir)
for path, tests in six.iteritems(test_data):
for path, tests in test_data.items():
for metadata in tests:
defaults_manifests = [metadata["manifest"]]
@@ -604,7 +604,7 @@ class TestManifestLoader(TestLoader):
self.finder = FileFinder(self.topsrcdir)
self.reader = self.mozbuild_reader(config_mode="empty")
self.variables = {
"{}_MANIFESTS".format(k): v[0] for k, v in six.iteritems(TEST_MANIFESTS)
"{}_MANIFESTS".format(k): v[0] for k, v in TEST_MANIFESTS.items()
}
self.variables.update(
{"{}_MANIFESTS".format(f.upper()): f for f in REFTEST_FLAVORS}
@@ -1199,7 +1199,7 @@ class TestResolver(MozbuildObject):
print("Loading wpt manifest failed")
return
for manifest, data in six.iteritems(manifests):
for manifest, data in manifests.items():
tests_root = data[
"tests_path"
] # full path on disk until web-platform tests directory
@@ -1330,7 +1330,7 @@ class TestResolver(MozbuildObject):
run_suites.add(entry)
continue
suitefound = False
for suite, v in six.iteritems(TEST_SUITES):
for suite, v in TEST_SUITES.items():
if entry.lower() in v.get("aliases", []):
run_suites.add(suite)
suitefound = True

View File

@@ -6,7 +6,6 @@ import os
import time
import mozinfo
import six
class TestContext(object):
@@ -69,7 +68,7 @@ class TestContext(object):
def get(attr):
value = getattr(self, attr)
if isinstance(value, dict):
value = frozenset(six.iteritems(value))
value = frozenset(value.items())
return value
return hash(frozenset([get(a) for a in self.attrs]))
@@ -111,7 +110,7 @@ class TestResult(object):
result_expected,
", ".join(self.POSSIBLE_RESULTS),
)
assert isinstance(name, six.string_types), "name has to be a string"
assert isinstance(name, (str,)), "name has to be a string"
assert result_expected in self.POSSIBLE_RESULTS, msg
self.name = name
@@ -203,7 +202,7 @@ class TestResult(object):
raise ValueError(msg)
# use lists instead of multiline strings
if isinstance(output, six.string_types):
if isinstance(output, (str,)):
output = output.splitlines()
self.time_end = time_end if time_end is not None else time.time()

View File

@@ -345,7 +345,7 @@ class ScriptMixin(PlatformMixin):
str: in case `path` is a string. The result is the path with the new notation.
type(path): `path` itself is returned in case `path` is not str type.
"""
if not isinstance(path, six.string_types):
if not isinstance(path, (str,)):
return path
path = path.replace("\\", "/")
@@ -1393,7 +1393,7 @@ class ScriptMixin(PlatformMixin):
del env[k]
if os.name == "nt":
pref_encoding = locale.getpreferredencoding()
for k, v in six.iteritems(env):
for k, v in env.items():
# When run locally on Windows machines, some environment
# variables may be unicode.
env[k] = six.ensure_str(v, pref_encoding)
@@ -1446,7 +1446,7 @@ class ScriptMixin(PlatformMixin):
if isinstance(exe, dict):
found = False
# allow for searchable paths of the exe
for name, path in six.iteritems(exe):
for name, path in exe.items():
if isinstance(path, list) or isinstance(path, tuple):
path = [x % repl_dict for x in path]
if all([os.path.exists(section) for section in path]):

View File

@@ -686,7 +686,7 @@ items from that key's value."
# explicitly
if c.get("update_channel"):
update_channel = c["update_channel"]
if six.PY2 and isinstance(update_channel, six.text_type):
if six.PY2 and isinstance(update_channel, str):
update_channel = update_channel.encode("utf-8")
env["MOZ_UPDATE_CHANNEL"] = update_channel
else: # let's just give the generic channel based on branch

View File

@@ -143,7 +143,7 @@ class MercurialRepoManipulationMixin(object):
force=None,
halt_on_failure=True,
):
if isinstance(tags, six.string_types):
if isinstance(tags, (str,)):
tags = [tags]
cmd = self.query_exe("hg", return_type="list") + ["tag"]
if not message:

View File

@@ -16,7 +16,6 @@ import shutil
import subprocess
import sys
import six
from mozsystemmonitor.resourcemonitor import SystemResourceMonitor
import mozharness
@@ -576,7 +575,7 @@ class Talos(
kw_options.update(kw)
# talos expects tests to be in the format (e.g.) 'ts:tp5:tsvg'
tests = kw_options.get("activeTests")
if tests and not isinstance(tests, six.string_types):
if tests and not isinstance(tests, (str,)):
tests = ":".join(tests) # Talos expects this format
kw_options["activeTests"] = tests
for key, value in kw_options.items():

View File

@@ -8,8 +8,6 @@ import os
import re
from collections import defaultdict
import six
from mozharness.base.script import PostScriptAction
from mozharness.base.transfer import TransferMixin
@@ -163,7 +161,7 @@ class TryToolsMixin(TransferMixin):
return label_dict[val]
return "--%s" % val.replace("_", "-")
for label, (opts, _) in six.iteritems(self.known_try_arguments):
for label, (opts, _) in self.known_try_arguments.items():
if "action" in opts and opts["action"] not in (
"append",
"store",
@@ -188,7 +186,7 @@ class TryToolsMixin(TransferMixin):
# This is a pretty hacky way to echo arguments down to the harness.
# Hopefully this can be improved once we have a configuration system
# in tree for harnesses that relies less on a command line.
for arg, value in six.iteritems(vars(args)):
for arg, value in vars(args).items():
if value:
label = label_from_val(arg)
_, flavors = self.known_try_arguments[label]

View File

@@ -9,8 +9,6 @@ import re
import sys
from multiprocessing.pool import ThreadPool
import six
sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
from mozharness.base.python import VirtualenvMixin, virtualenv_config_options
@@ -195,7 +193,7 @@ class ChecksumsGenerator(BaseScript, VirtualenvMixin):
pool.map(worker, find_checksums_files())
for c in raw_checksums:
for f, info in six.iteritems(parse_checksums_file(c)):
for f, info in parse_checksums_file(c).items():
for pattern in self.config["includes"]:
if re.search(pattern, f):
if f in self.checksums:

View File

@@ -4,7 +4,6 @@
import argparse
import os
import six
from mozlog.commandline import add_logging_group
(FIREFOX, CHROME, SAFARI, SAFARI_TP, CHROMIUM_RELEASE) = DESKTOP_APPS = [
@@ -722,7 +721,7 @@ class _PrintTests(_StopAction):
test_list[suite]["subtests"].append(subtest)
# print the list in a nice, readable format
for key in sorted(six.iterkeys(test_list)):
for key in sorted(test_list.keys()):
print("\n%s" % key)
print(" type: %s" % test_list[key]["type"])
if len(test_list[key]["subtests"]) != 0:

View File

@@ -818,7 +818,7 @@ class PerftestOutput(object):
failed_tests = []
for pagecycle in data:
for _sub, _value in six.iteritems(pagecycle[0]):
for _sub, _value in pagecycle[0].items():
if _value["decodedFrames"] == 0:
failed_tests.append(
"%s test Failed. decodedFrames %s droppedFrames %s."

View File

@@ -1147,9 +1147,7 @@ class BrowsertimeResultsHandler(PerftestResultsHandler):
item
):
# add page cycle custom measurements to the existing results
for measurement in six.iteritems(
new_result["measurements"]
):
for measurement in new_result["measurements"].items():
self.results[i]["measurements"][measurement[0]].extend(
measurement[1]
)

View File

@@ -15,8 +15,6 @@ from ctypes import (
)
from ctypes.wintypes import DWORD, HANDLE, LONG, LPCSTR, LPCWSTR, LPSTR
import six
from talos.cmanager_base import CounterManager
from talos.utils import TalosError
@@ -69,7 +67,7 @@ def _getExpandedCounterPaths(processName, counterName):
paths = []
i = 0
path = ""
for j in six.moves.range(0, pcchPathListLength.value):
for j in range(0, pcchPathListLength.value):
c = struct.unpack_from("c", buffer, offset=j)[0]
if c == "\0":
if j == i:

View File

@@ -4,7 +4,6 @@
import argparse
import os
import six
from mozlog.commandline import add_logging_group
@@ -33,7 +32,7 @@ class _ListTests(_StopAction):
print("================\n")
test_class_names = [
(test_class.name(), test_class.description())
for test_class in six.itervalues(test.test_dict())
for test_class in test.test_dict().values()
]
test_class_names.sort()
for name, description in test_class_names:

View File

@@ -13,7 +13,6 @@ import tempfile
import mozfile
import mozinfo
import mozrunner
import six
from mozlog import get_proxy_logger
from mozprofile.profile import Profile
@@ -70,7 +69,7 @@ class FFSetup(object):
def _init_env(self):
self.env = dict(os.environ)
for k, v in six.iteritems(self.browser_config["env"]):
for k, v in self.browser_config["env"].items():
self.env[k] = str(v)
self.env["MOZ_CRASHREPORTER_NO_REPORT"] = "1"
if self.browser_config["symbols_path"]:
@@ -156,14 +155,14 @@ class FFSetup(object):
# installing webextensions
webextensions_to_install = []
webextensions_folder = self.test_config.get("webextensions_folder", None)
if isinstance(webextensions_folder, six.string_types):
if isinstance(webextensions_folder, (str,)):
folder = utils.interpolate(webextensions_folder)
for file in os.listdir(folder):
if file.endswith(".xpi"):
webextensions_to_install.append(os.path.join(folder, file))
webextensions = self.test_config.get("webextensions", None)
if isinstance(webextensions, six.string_types):
if isinstance(webextensions, (str,)):
webextensions_to_install.append(webextensions)
if webextensions_to_install is not None:

View File

@@ -4,8 +4,6 @@
import math
import six
"""
data filters:
takes a series of run data and applies statistical transforms to it
@@ -166,7 +164,7 @@ def dromaeo(series):
@register_filter
@define_filter
def dromaeo_chunks(series, size):
for i in six.moves.range(0, len(series), size):
for i in range(0, len(series), size):
yield series[i : i + size]

View File

@@ -41,7 +41,7 @@ def useBaseTestDefaults(base, tests):
def set_tp_preferences(test, browser_config):
# sanity check pageloader values
# mandatory options: tpmanifest, tpcycles
if test["tpcycles"] not in six.moves.range(1, 1000):
if test["tpcycles"] not in range(1, 1000):
raise TalosError("pageloader cycles must be int 1 to 1,000")
if "tpmanifest" not in test:
raise TalosError("tpmanifest not found in test: %s" % test)

Some files were not shown because too many files have changed in this diff Show More