Bug 1918098 - ruff: fix the errors identified by 0.6.4 r=linter-reviewers,taskgraph-reviewers,releng-reviewers,webdriver-reviewers,perftest-reviewers,migration-reviewers,jmaher,whimboo,sparky,xpcom-reviewers,beth,ahal,mconley
Differential Revision: https://phabricator.services.mozilla.com/D221874
This commit is contained in:
@@ -306,7 +306,7 @@ class TestFirefoxRefresh(MarionetteTestCase):
|
||||
""",
|
||||
script_args=(self._historyURL,),
|
||||
)
|
||||
if type(historyResult) == str:
|
||||
if type(historyResult) is str:
|
||||
self.fail(historyResult)
|
||||
return
|
||||
|
||||
@@ -322,7 +322,7 @@ class TestFirefoxRefresh(MarionetteTestCase):
|
||||
""",
|
||||
script_args=(self._formHistoryFieldName,),
|
||||
)
|
||||
if type(formFieldResults) == str:
|
||||
if type(formFieldResults) is str:
|
||||
self.fail(formFieldResults)
|
||||
return
|
||||
|
||||
@@ -357,7 +357,7 @@ class TestFirefoxRefresh(MarionetteTestCase):
|
||||
}).then(resolve);
|
||||
""",
|
||||
)
|
||||
if type(formAutofillResults) == str:
|
||||
if type(formAutofillResults) is str:
|
||||
self.fail(formAutofillResults)
|
||||
return
|
||||
|
||||
@@ -465,7 +465,7 @@ class TestFirefoxRefresh(MarionetteTestCase):
|
||||
});
|
||||
"""
|
||||
)
|
||||
if type(result) != dict:
|
||||
if type(result) is not dict:
|
||||
self.fail(result)
|
||||
return
|
||||
self.assertEqual(result["accountData"]["email"], "test@test.com")
|
||||
|
||||
@@ -556,7 +556,7 @@ def main():
|
||||
elif value is None:
|
||||
if key in config:
|
||||
del config[key]
|
||||
elif type(old_value) != type(value):
|
||||
elif type(old_value) is not type(value):
|
||||
raise Exception(
|
||||
"{} is overriding `{}` with a value of the wrong type".format(
|
||||
c.name, key
|
||||
|
||||
@@ -33,7 +33,7 @@ U = TypeVar("U")
|
||||
def cross_combine(*args_tup: list[dict]) -> list[dict]:
|
||||
args = list(args_tup)
|
||||
for i, a in enumerate(args):
|
||||
assert type(a) == list, f"Arg{i} is {type(a)}, expected {list}."
|
||||
assert type(a) is list, f"Arg{i} is {type(a)}, expected {list}."
|
||||
|
||||
def cross_combine2(listA, listB):
|
||||
listC = []
|
||||
|
||||
@@ -172,7 +172,7 @@ def writeMappingsBinarySearchBody(
|
||||
# Sort the subtags by length. That enables using an optimized comparator
|
||||
# for the binary search, which only performs a single |memcmp| for multiple
|
||||
# of two subtag lengths.
|
||||
mappings_keys = mappings.keys() if type(mappings) == dict else mappings
|
||||
mappings_keys = mappings.keys() if type(mappings) is dict else mappings
|
||||
for length, subtags in groupby(sorted(mappings_keys, key=len), len):
|
||||
# Omit the length check if the current length is the maximum length.
|
||||
if length != tag_maxlength:
|
||||
@@ -203,7 +203,7 @@ def writeMappingsBinarySearchBody(
|
||||
|
||||
# Don't emit a binary search for short lists.
|
||||
if len(subtags) == 1:
|
||||
if type(mappings) == dict:
|
||||
if type(mappings) is dict:
|
||||
println(
|
||||
"""
|
||||
if ({}) {{
|
||||
@@ -228,7 +228,7 @@ def writeMappingsBinarySearchBody(
|
||||
)
|
||||
)
|
||||
elif len(subtags) <= 4:
|
||||
if type(mappings) == dict:
|
||||
if type(mappings) is dict:
|
||||
for subtag in subtags:
|
||||
println(
|
||||
"""
|
||||
@@ -265,7 +265,7 @@ def writeMappingsBinarySearchBody(
|
||||
else:
|
||||
write_array(subtags, source_name + "s", length, True)
|
||||
|
||||
if type(mappings) == dict:
|
||||
if type(mappings) is dict:
|
||||
write_array([mappings[k] for k in subtags], "aliases", length, False)
|
||||
|
||||
println(
|
||||
|
||||
@@ -150,7 +150,7 @@ def sourcelink(symbol=None, loc=None, range=None):
|
||||
|
||||
|
||||
def quoted_dict(d):
|
||||
return {k: escape(v) for k, v in d.items() if type(v) == str}
|
||||
return {k: escape(v) for k, v in d.items() if type(v) is str}
|
||||
|
||||
|
||||
num_hazards = 0
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
# flake8: noqa: F821
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
test.compile("source.cpp")
|
||||
test.run_analysis_script()
|
||||
|
||||
@@ -29,7 +29,7 @@ try:
|
||||
# testlibdir is set on the GDB command line, via:
|
||||
# --eval-command python testlibdir=...
|
||||
execfile(os.path.join(testlibdir, "prologue.py"), globals(), locals())
|
||||
except Exception as err:
|
||||
except Exception:
|
||||
sys.stderr.write("Error running GDB prologue:\n")
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
|
||||
@@ -5,7 +5,7 @@ import mozilla.prettyprinters
|
||||
|
||||
|
||||
@mozilla.prettyprinters.pretty_printer("unscoped_no_storage")
|
||||
class my_typedef(object):
|
||||
class UnscopedNoStoragePrinter(object):
|
||||
def __init__(self, value, cache):
|
||||
pass
|
||||
|
||||
@@ -14,7 +14,7 @@ class my_typedef(object):
|
||||
|
||||
|
||||
@mozilla.prettyprinters.pretty_printer("unscoped_with_storage")
|
||||
class my_typedef(object):
|
||||
class UnscopedWithStoragePrinter(object):
|
||||
def __init__(self, value, cache):
|
||||
pass
|
||||
|
||||
@@ -23,7 +23,7 @@ class my_typedef(object):
|
||||
|
||||
|
||||
@mozilla.prettyprinters.pretty_printer("scoped_no_storage")
|
||||
class my_typedef(object):
|
||||
class ScopedNoStoragePrinter(object):
|
||||
def __init__(self, value, cache):
|
||||
pass
|
||||
|
||||
@@ -32,7 +32,7 @@ class my_typedef(object):
|
||||
|
||||
|
||||
@mozilla.prettyprinters.pretty_printer("scoped_with_storage")
|
||||
class my_typedef(object):
|
||||
class ScopedWithStoragePrinter(object):
|
||||
def __init__(self, value, cache):
|
||||
pass
|
||||
|
||||
|
||||
@@ -311,7 +311,7 @@ def convertTestFile(test262parser, testSource, testName, includeSet, strictTests
|
||||
# currently ignoring the error phase attribute.
|
||||
# testRec["negative"] == {type=<error name>, phase=parse|resolution|runtime}
|
||||
isNegative = "negative" in testRec
|
||||
assert not isNegative or type(testRec["negative"]) == dict
|
||||
assert not isNegative or type(testRec["negative"]) is dict
|
||||
errorType = testRec["negative"]["type"] if isNegative else None
|
||||
|
||||
# Async tests are marked with the "async" attribute.
|
||||
|
||||
@@ -156,7 +156,7 @@ def check_pref_list(pref_list):
|
||||
if "name" not in pref:
|
||||
error("missing `name` key")
|
||||
name = pref["name"]
|
||||
if type(name) != str:
|
||||
if type(name) is not str:
|
||||
error("non-string `name` value `{}`".format(name))
|
||||
if "." not in name:
|
||||
error("`name` value `{}` lacks a '.'".format(name))
|
||||
@@ -185,7 +185,7 @@ def check_pref_list(pref_list):
|
||||
error("missing `value` key for pref `{}`".format(name))
|
||||
value = pref["value"]
|
||||
if typ == "String" or typ == "DataMutexString":
|
||||
if type(value) != str:
|
||||
if type(value) is not str:
|
||||
error(
|
||||
"non-string `value` value `{}` for `{}` pref `{}`; "
|
||||
"add double quotes".format(value, typ, name)
|
||||
@@ -206,7 +206,7 @@ def check_pref_list(pref_list):
|
||||
# Check 'do_not_use_directly' if present.
|
||||
if "do_not_use_directly" in pref:
|
||||
do_not_use_directly = pref["do_not_use_directly"]
|
||||
if type(do_not_use_directly) != bool:
|
||||
if type(do_not_use_directly) is not bool:
|
||||
error(
|
||||
"non-boolean `do_not_use_directly` value `{}` for pref "
|
||||
"`{}`".format(do_not_use_directly, name)
|
||||
@@ -220,7 +220,7 @@ def check_pref_list(pref_list):
|
||||
# Check 'include' if present.
|
||||
if "include" in pref:
|
||||
include = pref["include"]
|
||||
if type(include) != str:
|
||||
if type(include) is not str:
|
||||
error(
|
||||
"non-string `include` value `{}` for pref `{}`".format(
|
||||
include, name
|
||||
@@ -235,7 +235,7 @@ def check_pref_list(pref_list):
|
||||
# Check 'rust' if present.
|
||||
if "rust" in pref:
|
||||
rust = pref["rust"]
|
||||
if type(rust) != bool:
|
||||
if type(rust) is not bool:
|
||||
error("non-boolean `rust` value `{}` for pref `{}`".format(rust, name))
|
||||
if rust and mirror == "never":
|
||||
error(
|
||||
|
||||
@@ -7,7 +7,6 @@ from textwrap import TextWrapper
|
||||
from mach.config import TYPE_CLASSES
|
||||
from mach.decorators import Command, CommandArgument
|
||||
|
||||
|
||||
# Interact with settings for mach.
|
||||
|
||||
# Currently, we only provide functionality to view what settings are
|
||||
|
||||
@@ -168,7 +168,7 @@ class MozSiteMetadata:
|
||||
|
||||
def __eq__(self, other):
|
||||
return (
|
||||
type(self) == type(other)
|
||||
type(self) is type(other)
|
||||
and self.hex_version == other.hex_version
|
||||
and self.site_name == other.site_name
|
||||
and self.mach_site_packages_source == other.mach_site_packages_source
|
||||
|
||||
@@ -818,7 +818,7 @@ def update_git_tools(git: Optional[Path], root_state_dir: Path):
|
||||
os.chmod(path, stat.S_IRWXU)
|
||||
func(path)
|
||||
else:
|
||||
raise
|
||||
raise exc
|
||||
|
||||
shutil.rmtree(str(cinnabar_dir), onerror=onerror)
|
||||
|
||||
|
||||
@@ -9,7 +9,6 @@ from mach.decorators import Command, CommandArgument
|
||||
from mozbuild.shellutil import quote as shell_quote
|
||||
from mozbuild.shellutil import split as shell_split
|
||||
|
||||
|
||||
# Instropection commands.
|
||||
|
||||
|
||||
|
||||
@@ -210,7 +210,7 @@ class LintSandbox(ConfigureSandbox):
|
||||
name = args[0]
|
||||
default = kwargs.get("default")
|
||||
|
||||
if type(default) != bool:
|
||||
if type(default) is not bool:
|
||||
return
|
||||
|
||||
table = {
|
||||
|
||||
@@ -94,12 +94,12 @@ class OptionValue(tuple):
|
||||
)
|
||||
|
||||
# Allow explicit tuples to be compared.
|
||||
if type(other) == tuple:
|
||||
if type(other) is tuple:
|
||||
return tuple.__eq__(self, other)
|
||||
elif isinstance(other, bool):
|
||||
return bool(self) == other
|
||||
# Else we're likely an OptionValue class.
|
||||
elif type(other) != type(self):
|
||||
elif type(other) is not type(self):
|
||||
return False
|
||||
else:
|
||||
return super(OptionValue, self).__eq__(other)
|
||||
|
||||
@@ -2604,7 +2604,7 @@ VARIABLES = {
|
||||
|
||||
# Sanity check: we don't want any variable above to have a list as storage type.
|
||||
for name, (storage_type, input_types, docs) in VARIABLES.items():
|
||||
if storage_type == list:
|
||||
if storage_type is list:
|
||||
raise RuntimeError('%s has a "list" storage type. Use "List" instead.' % name)
|
||||
|
||||
# Set of variables that are only allowed in templates:
|
||||
|
||||
@@ -148,7 +148,7 @@ class VariablePassthru(ContextDerived):
|
||||
in our build backends since we will continue to be tied to our rules.mk.
|
||||
"""
|
||||
|
||||
__slots__ = "variables"
|
||||
__slots__ = ("variables",)
|
||||
|
||||
def __init__(self, context):
|
||||
ContextDerived.__init__(self, context)
|
||||
@@ -197,7 +197,7 @@ class BaseDefines(ContextDerived):
|
||||
which are OrderedDicts.
|
||||
"""
|
||||
|
||||
__slots__ = "defines"
|
||||
__slots__ = ("defines",)
|
||||
|
||||
def __init__(self, context, defines):
|
||||
ContextDerived.__init__(self, context)
|
||||
@@ -477,7 +477,7 @@ class BaseProgram(Linkable):
|
||||
Otherwise, the suffix is appended to the program name.
|
||||
"""
|
||||
|
||||
__slots__ = "program"
|
||||
__slots__ = ("program",)
|
||||
|
||||
DICT_ATTRS = {"install_target", "KIND", "program", "relobjdir"}
|
||||
|
||||
@@ -1201,7 +1201,7 @@ class FinalTargetFiles(ContextDerived):
|
||||
HierarchicalStringList, which is created when parsing FINAL_TARGET_FILES.
|
||||
"""
|
||||
|
||||
__slots__ = "files"
|
||||
__slots__ = ("files",)
|
||||
|
||||
def __init__(self, sandbox, files):
|
||||
ContextDerived.__init__(self, sandbox)
|
||||
@@ -1218,7 +1218,7 @@ class FinalTargetPreprocessedFiles(ContextDerived):
|
||||
FINAL_TARGET_PP_FILES.
|
||||
"""
|
||||
|
||||
__slots__ = "files"
|
||||
__slots__ = ("files",)
|
||||
|
||||
def __init__(self, sandbox, files):
|
||||
ContextDerived.__init__(self, sandbox)
|
||||
|
||||
@@ -799,7 +799,7 @@ class BuildReaderError(Exception):
|
||||
s.write("\n")
|
||||
s.write("This variable expects the following type(s):\n")
|
||||
s.write("\n")
|
||||
if type(inner.args[4]) == type:
|
||||
if type(inner.args[4]) is type:
|
||||
s.write(" %s\n" % inner.args[4].__name__)
|
||||
else:
|
||||
for t in inner.args[4]:
|
||||
|
||||
@@ -181,7 +181,7 @@ def _quote(s):
|
||||
As a special case, if given an int, returns a string containing the int,
|
||||
not enclosed in quotes.
|
||||
"""
|
||||
if type(s) == int:
|
||||
if type(s) is int:
|
||||
return f"{s}"
|
||||
|
||||
# Empty strings need to be quoted to have any significance
|
||||
|
||||
@@ -8,7 +8,8 @@ import six
|
||||
|
||||
|
||||
def toolchain_task_definitions():
|
||||
import gecko_taskgraph # noqa: triggers override of the `graph_config_schema`
|
||||
# triggers override of the `graph_config_schema` noqa
|
||||
import gecko_taskgraph # noqa
|
||||
from taskgraph.generator import load_tasks_for_kind
|
||||
|
||||
# Don't import globally to allow this module being imported without
|
||||
|
||||
@@ -191,8 +191,8 @@ class FileAvoidWrite(BytesIO):
|
||||
def __init__(self, filename, capture_diff=False, dry_run=False, readmode="r"):
|
||||
BytesIO.__init__(self)
|
||||
self.name = filename
|
||||
assert type(capture_diff) == bool
|
||||
assert type(dry_run) == bool
|
||||
assert type(capture_diff) is bool
|
||||
assert type(dry_run) is bool
|
||||
assert "r" in readmode
|
||||
self._capture_diff = capture_diff
|
||||
self._write_to_file = not dry_run
|
||||
|
||||
@@ -345,7 +345,7 @@ MANIFESTS_TYPES = dict(
|
||||
[
|
||||
(c.type, c)
|
||||
for c in globals().values()
|
||||
if type(c) == type
|
||||
if type(c) is type
|
||||
and issubclass(c, ManifestEntry)
|
||||
and hasattr(c, "type")
|
||||
and c.type
|
||||
|
||||
@@ -41,7 +41,7 @@ class TestPkg(TestWithTmpDir):
|
||||
|
||||
def test_get_apple_template(self):
|
||||
tmpl = get_apple_template("Distribution.template")
|
||||
assert type(tmpl) == Template
|
||||
assert type(tmpl) is Template
|
||||
|
||||
def test_get_apple_template_not_file(self):
|
||||
with self.assertRaises(Exception):
|
||||
|
||||
@@ -45,7 +45,7 @@ def test_notebookupload_with_filter(notebook, no_filter):
|
||||
|
||||
if no_filter:
|
||||
args, kwargs = notebook.call_args_list[0]
|
||||
assert type(kwargs["data"][0]["data"][0]["value"]) == str
|
||||
assert type(kwargs["data"][0]["data"][0]["value"]) is str
|
||||
else:
|
||||
for call in notebook.call_args_list:
|
||||
args, kwargs = call
|
||||
|
||||
@@ -13,7 +13,7 @@ def has_pkg_section(p, section):
|
||||
has_section = section in p.keys()
|
||||
if has_section:
|
||||
for pkg in p[section]:
|
||||
if type(pkg) == str:
|
||||
if type(pkg) is str:
|
||||
yield pkg
|
||||
else:
|
||||
yield from has_pkg_section(pkg, next(iter(pkg.keys())))
|
||||
|
||||
@@ -400,7 +400,7 @@ class SnapTests(SnapTestsBase):
|
||||
video = self._wait.until(
|
||||
EC.visibility_of_element_located((By.CLASS_NAME, "html5-main-video"))
|
||||
)
|
||||
self._wait.until(lambda d: type(video.get_property("duration")) == float)
|
||||
self._wait.until(lambda d: type(video.get_property("duration")) is float)
|
||||
self._logger.info("video duration: {}".format(video.get_property("duration")))
|
||||
assert (
|
||||
video.get_property("duration") > exp["duration"]
|
||||
@@ -470,7 +470,7 @@ class SnapTests(SnapTestsBase):
|
||||
(By.CSS_SELECTOR, "video.html5-main-video")
|
||||
)
|
||||
)
|
||||
self._wait.until(lambda d: type(video.get_property("duration")) == float)
|
||||
self._wait.until(lambda d: type(video.get_property("duration")) is float)
|
||||
self._logger.info("video duration: {}".format(video.get_property("duration")))
|
||||
assert (
|
||||
video.get_property("duration") > exp["duration"]
|
||||
|
||||
@@ -58,7 +58,7 @@ class QATests(SnapTestsBase):
|
||||
(By.CSS_SELECTOR, video_selector or "video")
|
||||
)
|
||||
)
|
||||
self._wait.until(lambda d: type(video.get_property("duration")) == float)
|
||||
self._wait.until(lambda d: type(video.get_property("duration")) is float)
|
||||
assert video.get_property("duration") > 0.0, "<video> duration null"
|
||||
|
||||
# For HE-AAC page, Google Drive does not like SPACE
|
||||
|
||||
@@ -55,8 +55,9 @@ def register(graph_config):
|
||||
|
||||
del registry["skip-unless-changed"]
|
||||
|
||||
from gecko_taskgraph import ( # noqa: trigger target task method registration
|
||||
morph,
|
||||
from gecko_taskgraph import ( # noqa
|
||||
# trigger target task method registration
|
||||
morph, # noqa
|
||||
filter_tasks,
|
||||
target_tasks,
|
||||
)
|
||||
@@ -64,7 +65,9 @@ def register(graph_config):
|
||||
android_taskgraph.register(graph_config)
|
||||
|
||||
from gecko_taskgraph.parameters import register_parameters
|
||||
from gecko_taskgraph.util import dependencies # noqa: trigger group_by registration
|
||||
|
||||
# trigger group_by registration
|
||||
from gecko_taskgraph.util import dependencies # noqa
|
||||
from gecko_taskgraph.util.verify import verifications
|
||||
|
||||
# Don't use the upstream verifications, and replace them with our own.
|
||||
|
||||
@@ -287,7 +287,8 @@ def create_tasks(
|
||||
If you wish to create the tasks in a new group, leave out decision_task_id.
|
||||
|
||||
Returns an updated label_to_taskid containing the new tasks"""
|
||||
import gecko_taskgraph.optimize # noqa: triggers registration of strategies
|
||||
# triggers registration of strategies
|
||||
import gecko_taskgraph.optimize # noqa
|
||||
|
||||
if suffix != "":
|
||||
suffix = f"-{suffix}"
|
||||
|
||||
@@ -192,7 +192,7 @@ class UnresponsiveInstanceException(Exception):
|
||||
es_ = [
|
||||
e
|
||||
for e in locals().values()
|
||||
if type(e) == type and issubclass(e, MarionetteException)
|
||||
if type(e) is type and issubclass(e, MarionetteException)
|
||||
]
|
||||
by_string = {e.status: e for e in es_}
|
||||
|
||||
|
||||
@@ -1729,13 +1729,13 @@ class Marionette(object):
|
||||
wrapped = {}
|
||||
for arg in args:
|
||||
wrapped[arg] = self._to_json(args[arg])
|
||||
elif type(args) == WebElement:
|
||||
elif type(args) is WebElement:
|
||||
wrapped = {WEB_ELEMENT_KEY: args.id}
|
||||
elif type(args) == ShadowRoot:
|
||||
elif type(args) is ShadowRoot:
|
||||
wrapped = {WEB_SHADOW_ROOT_KEY: args.id}
|
||||
elif type(args) == WebFrame:
|
||||
elif type(args) is WebFrame:
|
||||
wrapped = {WEB_FRAME_KEY: args.id}
|
||||
elif type(args) == WebWindow:
|
||||
elif type(args) is WebWindow:
|
||||
wrapped = {WEB_WINDOW_KEY: args.id}
|
||||
elif isinstance(args, (bool, int, float, six.string_types)) or args is None:
|
||||
wrapped = args
|
||||
|
||||
@@ -1244,7 +1244,7 @@ class MochitestDesktop(object):
|
||||
- create it if it does
|
||||
Removal of those directories is handled in cleanup()
|
||||
"""
|
||||
if type(extraTestsDirs) != list:
|
||||
if type(extraTestsDirs) is not list:
|
||||
return
|
||||
|
||||
for d in extraTestsDirs:
|
||||
|
||||
@@ -50,7 +50,7 @@ def test_logging_enabled(request_log):
|
||||
log_entry = request_log[0]
|
||||
assert log_entry["method"] == "GET"
|
||||
assert log_entry["path"] == "/"
|
||||
assert type(log_entry["time"]) == float
|
||||
assert type(log_entry["time"]) is float
|
||||
|
||||
|
||||
@log_requests(False)
|
||||
|
||||
@@ -154,7 +154,7 @@ class Preferences(object):
|
||||
if type(prefs) not in [list, dict]:
|
||||
raise PreferencesReadError("Malformed preferences: %s" % path)
|
||||
if isinstance(prefs, list):
|
||||
if [i for i in prefs if type(i) != list or len(i) != 2]:
|
||||
if [i for i in prefs if type(i) is not list or len(i) != 2]:
|
||||
raise PreferencesReadError("Malformed preferences: %s" % path)
|
||||
values = [i[1] for i in prefs]
|
||||
elif isinstance(prefs, dict):
|
||||
|
||||
@@ -11,18 +11,14 @@ except ImportError:
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import tempfile
|
||||
from collections import defaultdict
|
||||
|
||||
import manifestupdate
|
||||
import mozpack.path as mozpath
|
||||
import mozunit
|
||||
import pytest
|
||||
from mozbuild.base import MozbuildObject
|
||||
from mozbuild.frontend.reader import BuildReader
|
||||
from mozbuild.test.common import MockConfig
|
||||
from mozfile import NamedTemporaryFile
|
||||
from moztest.resolve import (
|
||||
TEST_SUITES,
|
||||
BuildBackendLoader,
|
||||
|
||||
@@ -530,12 +530,12 @@ class VirtualenvMixin(object):
|
||||
)
|
||||
|
||||
if debug_exe_dir.exists():
|
||||
for executable in {
|
||||
for executable in (
|
||||
"python.exe",
|
||||
"python_d.exe",
|
||||
"pythonw.exe",
|
||||
"pythonw_d.exe",
|
||||
}:
|
||||
):
|
||||
expected_python_debug_exe = debug_exe_dir / executable
|
||||
if not expected_python_debug_exe.exists():
|
||||
shutil.copy(
|
||||
|
||||
@@ -2032,7 +2032,7 @@ def PreScriptAction(action=None):
|
||||
func._pre_action_listener = None
|
||||
return func
|
||||
|
||||
if type(action) == type(_wrapped):
|
||||
if type(action) is type(_wrapped):
|
||||
return _wrapped_none(action)
|
||||
|
||||
return _wrapped
|
||||
@@ -2063,7 +2063,7 @@ def PostScriptAction(action=None):
|
||||
func._post_action_listener = None
|
||||
return func
|
||||
|
||||
if type(action) == type(_wrapped):
|
||||
if type(action) is type(_wrapped):
|
||||
return _wrapped_none(action)
|
||||
|
||||
return _wrapped
|
||||
@@ -2189,7 +2189,7 @@ class BaseScript(ScriptMixin, LogMixin, object):
|
||||
item = getattr(self, name)
|
||||
else:
|
||||
item = inspect.getattr_static(self, name)
|
||||
if type(item) == property:
|
||||
if type(item) is property:
|
||||
item = None
|
||||
else:
|
||||
item = getattr(self, name)
|
||||
|
||||
@@ -296,7 +296,7 @@ class TestingMixin(
|
||||
)
|
||||
|
||||
for key, value in self.config.items():
|
||||
if type(value) == str and value.startswith("http"):
|
||||
if type(value) is str and value.startswith("http"):
|
||||
self.config[key] = _replace_url(value, c["replace_urls"])
|
||||
|
||||
# Any changes to c means that we need credentials
|
||||
|
||||
@@ -35,7 +35,7 @@ def _get_raptor_val(mdict, mname, retval=False):
|
||||
# mdict: a dictionary to look through to find the mname
|
||||
# value.
|
||||
|
||||
if type(mname) != list:
|
||||
if type(mname) is not list:
|
||||
if mname in mdict:
|
||||
return mdict[mname]
|
||||
return retval
|
||||
|
||||
@@ -679,7 +679,7 @@ class BrowsertimeResultsHandler(PerftestResultsHandler):
|
||||
# mdict: a dictionary to look through to find the mname
|
||||
# value.
|
||||
|
||||
if type(mname) != list:
|
||||
if type(mname) is not list:
|
||||
if mname in mdict:
|
||||
return mdict[mname]
|
||||
return retval
|
||||
|
||||
@@ -250,7 +250,7 @@ class TestInfoReport(TestInfo):
|
||||
# returns multiple records for the same test; that can happen if the report
|
||||
# sometimes maps more than one ActiveData record to the same path.
|
||||
new_value = item.get(label, 0) + value
|
||||
if type(new_value) == int:
|
||||
if type(new_value) is int:
|
||||
item[label] = new_value
|
||||
else:
|
||||
item[label] = float(round(new_value, 2)) # pylint: disable=W1633
|
||||
|
||||
@@ -554,11 +554,11 @@ class Client:
|
||||
out.append({"type": "undefined"})
|
||||
continue
|
||||
t = type(arg)
|
||||
if t == int or t == float:
|
||||
if t is int or t is float:
|
||||
out.append({"type": "number", "value": arg})
|
||||
elif t == bool:
|
||||
elif t is bool:
|
||||
out.append({"type": "boolean", "value": arg})
|
||||
elif t == str:
|
||||
elif t is str:
|
||||
out.append({"type": "string", "value": arg})
|
||||
else:
|
||||
if "type" in arg:
|
||||
@@ -571,7 +571,7 @@ class Client:
|
||||
def __init__(self, client, script, target):
|
||||
self.client = client
|
||||
self.script = script
|
||||
if type(target) == list:
|
||||
if type(target) is list:
|
||||
self.target = target[0]
|
||||
else:
|
||||
self.target = target
|
||||
|
||||
@@ -335,7 +335,7 @@ class Linter(visitor.Visitor):
|
||||
# Store the variable used for the SelectExpression, excluding functions
|
||||
# like PLATFORM()
|
||||
if (
|
||||
type(node.selector) == ast.VariableReference
|
||||
type(node.selector) is ast.VariableReference
|
||||
and node.selector.id.name not in self.state["variables"]
|
||||
):
|
||||
self.state["variables"].append(node.selector.id.name)
|
||||
|
||||
@@ -64,7 +64,11 @@ class TableBuilder(object):
|
||||
self.add_rows(self.headers)
|
||||
|
||||
def add_rows(self, rows):
|
||||
if type(rows) != list or type(rows[0]) != list or type(rows[0][0]) != str:
|
||||
if (
|
||||
type(rows) is not list
|
||||
or type(rows[0]) is not list
|
||||
or type(rows[0][0]) is not str
|
||||
):
|
||||
raise TypeError("add_rows() requires a two-dimensional list of strings.")
|
||||
for row in rows:
|
||||
self.add_row(row)
|
||||
|
||||
@@ -197,7 +197,7 @@ class RaptorGatherer(FrameworkGatherer):
|
||||
|
||||
def _get_ci_tasks(self):
|
||||
for task in self._taskgraph.keys():
|
||||
if type(self._taskgraph[task]) == dict:
|
||||
if type(self._taskgraph[task]) is dict:
|
||||
command = self._taskgraph[task]["task"]["payload"].get("command", [])
|
||||
run_on_projects = self._taskgraph[task]["attributes"]["run_on_projects"]
|
||||
else:
|
||||
@@ -521,7 +521,7 @@ class TalosGatherer(FrameworkGatherer):
|
||||
for task_name in self._taskgraph.keys():
|
||||
task = self._taskgraph[task_name]
|
||||
|
||||
if type(task) == dict:
|
||||
if type(task) is dict:
|
||||
is_talos = task["task"]["extra"].get("suite", [])
|
||||
command = task["task"]["payload"].get("command", [])
|
||||
run_on_projects = task["attributes"]["run_on_projects"]
|
||||
@@ -646,7 +646,7 @@ class AwsyGatherer(FrameworkGatherer):
|
||||
for task_name in self._taskgraph.keys():
|
||||
task = self._taskgraph[task_name]
|
||||
|
||||
if type(task) == dict:
|
||||
if type(task) is dict:
|
||||
awsy_test = task["task"]["extra"].get("suite", [])
|
||||
run_on_projects = task["attributes"]["run_on_projects"]
|
||||
else:
|
||||
|
||||
@@ -104,7 +104,7 @@ class Gatherer(object):
|
||||
matched["static"].append(file)
|
||||
|
||||
# Append to structdocs if all the searched files were found
|
||||
if all(val for val in matched.values() if not type(val) == list):
|
||||
if all(val for val in matched.values() if type(val) is not list):
|
||||
self._perfdocs_tree.append(matched)
|
||||
|
||||
logger.log(
|
||||
|
||||
@@ -45,7 +45,7 @@ class PerfDocLogger(object):
|
||||
:param list/str files: The file(s) that this warning is about.
|
||||
:param boolean restricted: If the param is False, the lint error can be used anywhere.
|
||||
"""
|
||||
if type(files) != list:
|
||||
if type(files) is not list:
|
||||
files = [files]
|
||||
|
||||
if len(files) == 0:
|
||||
|
||||
@@ -492,11 +492,11 @@ def write_interface(iface, fd):
|
||||
names.add(name)
|
||||
|
||||
for m in iface.members:
|
||||
if type(m) == xpidl.Attribute:
|
||||
if type(m) is xpidl.Attribute:
|
||||
record_name(attributeNativeName(m, getter=True))
|
||||
if not m.readonly:
|
||||
record_name(attributeNativeName(m, getter=False))
|
||||
elif type(m) == xpidl.Method:
|
||||
elif type(m) is xpidl.Method:
|
||||
record_name(methodNativeName(m))
|
||||
|
||||
def write_const_decls(g):
|
||||
|
||||
@@ -588,7 +588,7 @@ def write_interface(iface, fd):
|
||||
|
||||
entries = []
|
||||
for member in iface.members:
|
||||
if type(member) == xpidl.Attribute:
|
||||
if type(member) is xpidl.Attribute:
|
||||
entries.append(
|
||||
vtable_entry_tmpl
|
||||
% {
|
||||
@@ -605,7 +605,7 @@ def write_interface(iface, fd):
|
||||
}
|
||||
)
|
||||
|
||||
elif type(member) == xpidl.Method:
|
||||
elif type(member) is xpidl.Method:
|
||||
entries.append(
|
||||
vtable_entry_tmpl
|
||||
% {
|
||||
@@ -626,7 +626,7 @@ def write_interface(iface, fd):
|
||||
# Get all of the constants
|
||||
consts = []
|
||||
for member in iface.members:
|
||||
if type(member) == xpidl.ConstMember:
|
||||
if type(member) is xpidl.ConstMember:
|
||||
consts.append(
|
||||
const_wrapper_tmpl
|
||||
% {
|
||||
@@ -636,7 +636,7 @@ def write_interface(iface, fd):
|
||||
"val": member.getValue(),
|
||||
}
|
||||
)
|
||||
if type(member) == xpidl.CEnum:
|
||||
if type(member) is xpidl.CEnum:
|
||||
for var in member.variants:
|
||||
consts.append(
|
||||
const_wrapper_tmpl
|
||||
@@ -650,7 +650,7 @@ def write_interface(iface, fd):
|
||||
|
||||
methods = []
|
||||
for member in iface.members:
|
||||
if type(member) == xpidl.Attribute:
|
||||
if type(member) is xpidl.Attribute:
|
||||
methods.append(
|
||||
method_wrapper_tmpl
|
||||
% {
|
||||
@@ -669,7 +669,7 @@ def write_interface(iface, fd):
|
||||
}
|
||||
)
|
||||
|
||||
elif type(member) == xpidl.Method:
|
||||
elif type(member) is xpidl.Method:
|
||||
methods.append(
|
||||
method_wrapper_tmpl
|
||||
% {
|
||||
|
||||
@@ -59,14 +59,14 @@ def write_interface(iface, fd):
|
||||
try:
|
||||
methods = ""
|
||||
for member in iface.members:
|
||||
if type(member) == xpidl.Attribute:
|
||||
if type(member) is xpidl.Attribute:
|
||||
methods += "/* %s */\n" % member.toIDL()
|
||||
methods += "%s,\n" % attrAsMethodStruct(iface, member, True)
|
||||
if not member.readonly:
|
||||
methods += "%s,\n" % attrAsMethodStruct(iface, member, False)
|
||||
methods += "\n"
|
||||
|
||||
elif type(member) == xpidl.Method:
|
||||
elif type(member) is xpidl.Method:
|
||||
methods += "/* %s */\n" % member.toIDL()
|
||||
methods += "%s,\n\n" % methodAsMethodStruct(iface, member)
|
||||
fd.write(
|
||||
|
||||
@@ -21,7 +21,7 @@ def indented(s):
|
||||
|
||||
|
||||
def cpp(v):
|
||||
if type(v) == bool:
|
||||
if type(v) is bool:
|
||||
return "true" if v else "false"
|
||||
return str(v)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user