Bug 1918098 - ruff: fix the errors identified by 0.6.4 r=linter-reviewers,taskgraph-reviewers,releng-reviewers,webdriver-reviewers,perftest-reviewers,migration-reviewers,jmaher,whimboo,sparky,xpcom-reviewers,beth,ahal,mconley
Differential Revision: https://phabricator.services.mozilla.com/D221874
This commit is contained in:
@@ -306,7 +306,7 @@ class TestFirefoxRefresh(MarionetteTestCase):
|
|||||||
""",
|
""",
|
||||||
script_args=(self._historyURL,),
|
script_args=(self._historyURL,),
|
||||||
)
|
)
|
||||||
if type(historyResult) == str:
|
if type(historyResult) is str:
|
||||||
self.fail(historyResult)
|
self.fail(historyResult)
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -322,7 +322,7 @@ class TestFirefoxRefresh(MarionetteTestCase):
|
|||||||
""",
|
""",
|
||||||
script_args=(self._formHistoryFieldName,),
|
script_args=(self._formHistoryFieldName,),
|
||||||
)
|
)
|
||||||
if type(formFieldResults) == str:
|
if type(formFieldResults) is str:
|
||||||
self.fail(formFieldResults)
|
self.fail(formFieldResults)
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -357,7 +357,7 @@ class TestFirefoxRefresh(MarionetteTestCase):
|
|||||||
}).then(resolve);
|
}).then(resolve);
|
||||||
""",
|
""",
|
||||||
)
|
)
|
||||||
if type(formAutofillResults) == str:
|
if type(formAutofillResults) is str:
|
||||||
self.fail(formAutofillResults)
|
self.fail(formAutofillResults)
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -465,7 +465,7 @@ class TestFirefoxRefresh(MarionetteTestCase):
|
|||||||
});
|
});
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
if type(result) != dict:
|
if type(result) is not dict:
|
||||||
self.fail(result)
|
self.fail(result)
|
||||||
return
|
return
|
||||||
self.assertEqual(result["accountData"]["email"], "test@test.com")
|
self.assertEqual(result["accountData"]["email"], "test@test.com")
|
||||||
|
|||||||
@@ -556,7 +556,7 @@ def main():
|
|||||||
elif value is None:
|
elif value is None:
|
||||||
if key in config:
|
if key in config:
|
||||||
del config[key]
|
del config[key]
|
||||||
elif type(old_value) != type(value):
|
elif type(old_value) is not type(value):
|
||||||
raise Exception(
|
raise Exception(
|
||||||
"{} is overriding `{}` with a value of the wrong type".format(
|
"{} is overriding `{}` with a value of the wrong type".format(
|
||||||
c.name, key
|
c.name, key
|
||||||
|
|||||||
@@ -33,7 +33,7 @@ U = TypeVar("U")
|
|||||||
def cross_combine(*args_tup: list[dict]) -> list[dict]:
|
def cross_combine(*args_tup: list[dict]) -> list[dict]:
|
||||||
args = list(args_tup)
|
args = list(args_tup)
|
||||||
for i, a in enumerate(args):
|
for i, a in enumerate(args):
|
||||||
assert type(a) == list, f"Arg{i} is {type(a)}, expected {list}."
|
assert type(a) is list, f"Arg{i} is {type(a)}, expected {list}."
|
||||||
|
|
||||||
def cross_combine2(listA, listB):
|
def cross_combine2(listA, listB):
|
||||||
listC = []
|
listC = []
|
||||||
|
|||||||
@@ -172,7 +172,7 @@ def writeMappingsBinarySearchBody(
|
|||||||
# Sort the subtags by length. That enables using an optimized comparator
|
# Sort the subtags by length. That enables using an optimized comparator
|
||||||
# for the binary search, which only performs a single |memcmp| for multiple
|
# for the binary search, which only performs a single |memcmp| for multiple
|
||||||
# of two subtag lengths.
|
# of two subtag lengths.
|
||||||
mappings_keys = mappings.keys() if type(mappings) == dict else mappings
|
mappings_keys = mappings.keys() if type(mappings) is dict else mappings
|
||||||
for length, subtags in groupby(sorted(mappings_keys, key=len), len):
|
for length, subtags in groupby(sorted(mappings_keys, key=len), len):
|
||||||
# Omit the length check if the current length is the maximum length.
|
# Omit the length check if the current length is the maximum length.
|
||||||
if length != tag_maxlength:
|
if length != tag_maxlength:
|
||||||
@@ -203,7 +203,7 @@ def writeMappingsBinarySearchBody(
|
|||||||
|
|
||||||
# Don't emit a binary search for short lists.
|
# Don't emit a binary search for short lists.
|
||||||
if len(subtags) == 1:
|
if len(subtags) == 1:
|
||||||
if type(mappings) == dict:
|
if type(mappings) is dict:
|
||||||
println(
|
println(
|
||||||
"""
|
"""
|
||||||
if ({}) {{
|
if ({}) {{
|
||||||
@@ -228,7 +228,7 @@ def writeMappingsBinarySearchBody(
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
elif len(subtags) <= 4:
|
elif len(subtags) <= 4:
|
||||||
if type(mappings) == dict:
|
if type(mappings) is dict:
|
||||||
for subtag in subtags:
|
for subtag in subtags:
|
||||||
println(
|
println(
|
||||||
"""
|
"""
|
||||||
@@ -265,7 +265,7 @@ def writeMappingsBinarySearchBody(
|
|||||||
else:
|
else:
|
||||||
write_array(subtags, source_name + "s", length, True)
|
write_array(subtags, source_name + "s", length, True)
|
||||||
|
|
||||||
if type(mappings) == dict:
|
if type(mappings) is dict:
|
||||||
write_array([mappings[k] for k in subtags], "aliases", length, False)
|
write_array([mappings[k] for k in subtags], "aliases", length, False)
|
||||||
|
|
||||||
println(
|
println(
|
||||||
|
|||||||
@@ -150,7 +150,7 @@ def sourcelink(symbol=None, loc=None, range=None):
|
|||||||
|
|
||||||
|
|
||||||
def quoted_dict(d):
|
def quoted_dict(d):
|
||||||
return {k: escape(v) for k, v in d.items() if type(v) == str}
|
return {k: escape(v) for k, v in d.items() if type(v) is str}
|
||||||
|
|
||||||
|
|
||||||
num_hazards = 0
|
num_hazards = 0
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
# flake8: noqa: F821
|
# flake8: noqa: F821
|
||||||
|
|
||||||
from collections import defaultdict
|
|
||||||
|
|
||||||
test.compile("source.cpp")
|
test.compile("source.cpp")
|
||||||
test.run_analysis_script()
|
test.run_analysis_script()
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ try:
|
|||||||
# testlibdir is set on the GDB command line, via:
|
# testlibdir is set on the GDB command line, via:
|
||||||
# --eval-command python testlibdir=...
|
# --eval-command python testlibdir=...
|
||||||
execfile(os.path.join(testlibdir, "prologue.py"), globals(), locals())
|
execfile(os.path.join(testlibdir, "prologue.py"), globals(), locals())
|
||||||
except Exception as err:
|
except Exception:
|
||||||
sys.stderr.write("Error running GDB prologue:\n")
|
sys.stderr.write("Error running GDB prologue:\n")
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import mozilla.prettyprinters
|
|||||||
|
|
||||||
|
|
||||||
@mozilla.prettyprinters.pretty_printer("unscoped_no_storage")
|
@mozilla.prettyprinters.pretty_printer("unscoped_no_storage")
|
||||||
class my_typedef(object):
|
class UnscopedNoStoragePrinter(object):
|
||||||
def __init__(self, value, cache):
|
def __init__(self, value, cache):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -14,7 +14,7 @@ class my_typedef(object):
|
|||||||
|
|
||||||
|
|
||||||
@mozilla.prettyprinters.pretty_printer("unscoped_with_storage")
|
@mozilla.prettyprinters.pretty_printer("unscoped_with_storage")
|
||||||
class my_typedef(object):
|
class UnscopedWithStoragePrinter(object):
|
||||||
def __init__(self, value, cache):
|
def __init__(self, value, cache):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -23,7 +23,7 @@ class my_typedef(object):
|
|||||||
|
|
||||||
|
|
||||||
@mozilla.prettyprinters.pretty_printer("scoped_no_storage")
|
@mozilla.prettyprinters.pretty_printer("scoped_no_storage")
|
||||||
class my_typedef(object):
|
class ScopedNoStoragePrinter(object):
|
||||||
def __init__(self, value, cache):
|
def __init__(self, value, cache):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -32,7 +32,7 @@ class my_typedef(object):
|
|||||||
|
|
||||||
|
|
||||||
@mozilla.prettyprinters.pretty_printer("scoped_with_storage")
|
@mozilla.prettyprinters.pretty_printer("scoped_with_storage")
|
||||||
class my_typedef(object):
|
class ScopedWithStoragePrinter(object):
|
||||||
def __init__(self, value, cache):
|
def __init__(self, value, cache):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -311,7 +311,7 @@ def convertTestFile(test262parser, testSource, testName, includeSet, strictTests
|
|||||||
# currently ignoring the error phase attribute.
|
# currently ignoring the error phase attribute.
|
||||||
# testRec["negative"] == {type=<error name>, phase=parse|resolution|runtime}
|
# testRec["negative"] == {type=<error name>, phase=parse|resolution|runtime}
|
||||||
isNegative = "negative" in testRec
|
isNegative = "negative" in testRec
|
||||||
assert not isNegative or type(testRec["negative"]) == dict
|
assert not isNegative or type(testRec["negative"]) is dict
|
||||||
errorType = testRec["negative"]["type"] if isNegative else None
|
errorType = testRec["negative"]["type"] if isNegative else None
|
||||||
|
|
||||||
# Async tests are marked with the "async" attribute.
|
# Async tests are marked with the "async" attribute.
|
||||||
|
|||||||
@@ -156,7 +156,7 @@ def check_pref_list(pref_list):
|
|||||||
if "name" not in pref:
|
if "name" not in pref:
|
||||||
error("missing `name` key")
|
error("missing `name` key")
|
||||||
name = pref["name"]
|
name = pref["name"]
|
||||||
if type(name) != str:
|
if type(name) is not str:
|
||||||
error("non-string `name` value `{}`".format(name))
|
error("non-string `name` value `{}`".format(name))
|
||||||
if "." not in name:
|
if "." not in name:
|
||||||
error("`name` value `{}` lacks a '.'".format(name))
|
error("`name` value `{}` lacks a '.'".format(name))
|
||||||
@@ -185,7 +185,7 @@ def check_pref_list(pref_list):
|
|||||||
error("missing `value` key for pref `{}`".format(name))
|
error("missing `value` key for pref `{}`".format(name))
|
||||||
value = pref["value"]
|
value = pref["value"]
|
||||||
if typ == "String" or typ == "DataMutexString":
|
if typ == "String" or typ == "DataMutexString":
|
||||||
if type(value) != str:
|
if type(value) is not str:
|
||||||
error(
|
error(
|
||||||
"non-string `value` value `{}` for `{}` pref `{}`; "
|
"non-string `value` value `{}` for `{}` pref `{}`; "
|
||||||
"add double quotes".format(value, typ, name)
|
"add double quotes".format(value, typ, name)
|
||||||
@@ -206,7 +206,7 @@ def check_pref_list(pref_list):
|
|||||||
# Check 'do_not_use_directly' if present.
|
# Check 'do_not_use_directly' if present.
|
||||||
if "do_not_use_directly" in pref:
|
if "do_not_use_directly" in pref:
|
||||||
do_not_use_directly = pref["do_not_use_directly"]
|
do_not_use_directly = pref["do_not_use_directly"]
|
||||||
if type(do_not_use_directly) != bool:
|
if type(do_not_use_directly) is not bool:
|
||||||
error(
|
error(
|
||||||
"non-boolean `do_not_use_directly` value `{}` for pref "
|
"non-boolean `do_not_use_directly` value `{}` for pref "
|
||||||
"`{}`".format(do_not_use_directly, name)
|
"`{}`".format(do_not_use_directly, name)
|
||||||
@@ -220,7 +220,7 @@ def check_pref_list(pref_list):
|
|||||||
# Check 'include' if present.
|
# Check 'include' if present.
|
||||||
if "include" in pref:
|
if "include" in pref:
|
||||||
include = pref["include"]
|
include = pref["include"]
|
||||||
if type(include) != str:
|
if type(include) is not str:
|
||||||
error(
|
error(
|
||||||
"non-string `include` value `{}` for pref `{}`".format(
|
"non-string `include` value `{}` for pref `{}`".format(
|
||||||
include, name
|
include, name
|
||||||
@@ -235,7 +235,7 @@ def check_pref_list(pref_list):
|
|||||||
# Check 'rust' if present.
|
# Check 'rust' if present.
|
||||||
if "rust" in pref:
|
if "rust" in pref:
|
||||||
rust = pref["rust"]
|
rust = pref["rust"]
|
||||||
if type(rust) != bool:
|
if type(rust) is not bool:
|
||||||
error("non-boolean `rust` value `{}` for pref `{}`".format(rust, name))
|
error("non-boolean `rust` value `{}` for pref `{}`".format(rust, name))
|
||||||
if rust and mirror == "never":
|
if rust and mirror == "never":
|
||||||
error(
|
error(
|
||||||
|
|||||||
@@ -7,7 +7,6 @@ from textwrap import TextWrapper
|
|||||||
from mach.config import TYPE_CLASSES
|
from mach.config import TYPE_CLASSES
|
||||||
from mach.decorators import Command, CommandArgument
|
from mach.decorators import Command, CommandArgument
|
||||||
|
|
||||||
|
|
||||||
# Interact with settings for mach.
|
# Interact with settings for mach.
|
||||||
|
|
||||||
# Currently, we only provide functionality to view what settings are
|
# Currently, we only provide functionality to view what settings are
|
||||||
|
|||||||
@@ -168,7 +168,7 @@ class MozSiteMetadata:
|
|||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
return (
|
return (
|
||||||
type(self) == type(other)
|
type(self) is type(other)
|
||||||
and self.hex_version == other.hex_version
|
and self.hex_version == other.hex_version
|
||||||
and self.site_name == other.site_name
|
and self.site_name == other.site_name
|
||||||
and self.mach_site_packages_source == other.mach_site_packages_source
|
and self.mach_site_packages_source == other.mach_site_packages_source
|
||||||
|
|||||||
@@ -818,7 +818,7 @@ def update_git_tools(git: Optional[Path], root_state_dir: Path):
|
|||||||
os.chmod(path, stat.S_IRWXU)
|
os.chmod(path, stat.S_IRWXU)
|
||||||
func(path)
|
func(path)
|
||||||
else:
|
else:
|
||||||
raise
|
raise exc
|
||||||
|
|
||||||
shutil.rmtree(str(cinnabar_dir), onerror=onerror)
|
shutil.rmtree(str(cinnabar_dir), onerror=onerror)
|
||||||
|
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ from mach.decorators import Command, CommandArgument
|
|||||||
from mozbuild.shellutil import quote as shell_quote
|
from mozbuild.shellutil import quote as shell_quote
|
||||||
from mozbuild.shellutil import split as shell_split
|
from mozbuild.shellutil import split as shell_split
|
||||||
|
|
||||||
|
|
||||||
# Instropection commands.
|
# Instropection commands.
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -210,7 +210,7 @@ class LintSandbox(ConfigureSandbox):
|
|||||||
name = args[0]
|
name = args[0]
|
||||||
default = kwargs.get("default")
|
default = kwargs.get("default")
|
||||||
|
|
||||||
if type(default) != bool:
|
if type(default) is not bool:
|
||||||
return
|
return
|
||||||
|
|
||||||
table = {
|
table = {
|
||||||
|
|||||||
@@ -94,12 +94,12 @@ class OptionValue(tuple):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Allow explicit tuples to be compared.
|
# Allow explicit tuples to be compared.
|
||||||
if type(other) == tuple:
|
if type(other) is tuple:
|
||||||
return tuple.__eq__(self, other)
|
return tuple.__eq__(self, other)
|
||||||
elif isinstance(other, bool):
|
elif isinstance(other, bool):
|
||||||
return bool(self) == other
|
return bool(self) == other
|
||||||
# Else we're likely an OptionValue class.
|
# Else we're likely an OptionValue class.
|
||||||
elif type(other) != type(self):
|
elif type(other) is not type(self):
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
return super(OptionValue, self).__eq__(other)
|
return super(OptionValue, self).__eq__(other)
|
||||||
|
|||||||
@@ -2604,7 +2604,7 @@ VARIABLES = {
|
|||||||
|
|
||||||
# Sanity check: we don't want any variable above to have a list as storage type.
|
# Sanity check: we don't want any variable above to have a list as storage type.
|
||||||
for name, (storage_type, input_types, docs) in VARIABLES.items():
|
for name, (storage_type, input_types, docs) in VARIABLES.items():
|
||||||
if storage_type == list:
|
if storage_type is list:
|
||||||
raise RuntimeError('%s has a "list" storage type. Use "List" instead.' % name)
|
raise RuntimeError('%s has a "list" storage type. Use "List" instead.' % name)
|
||||||
|
|
||||||
# Set of variables that are only allowed in templates:
|
# Set of variables that are only allowed in templates:
|
||||||
|
|||||||
@@ -148,7 +148,7 @@ class VariablePassthru(ContextDerived):
|
|||||||
in our build backends since we will continue to be tied to our rules.mk.
|
in our build backends since we will continue to be tied to our rules.mk.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__slots__ = "variables"
|
__slots__ = ("variables",)
|
||||||
|
|
||||||
def __init__(self, context):
|
def __init__(self, context):
|
||||||
ContextDerived.__init__(self, context)
|
ContextDerived.__init__(self, context)
|
||||||
@@ -197,7 +197,7 @@ class BaseDefines(ContextDerived):
|
|||||||
which are OrderedDicts.
|
which are OrderedDicts.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__slots__ = "defines"
|
__slots__ = ("defines",)
|
||||||
|
|
||||||
def __init__(self, context, defines):
|
def __init__(self, context, defines):
|
||||||
ContextDerived.__init__(self, context)
|
ContextDerived.__init__(self, context)
|
||||||
@@ -477,7 +477,7 @@ class BaseProgram(Linkable):
|
|||||||
Otherwise, the suffix is appended to the program name.
|
Otherwise, the suffix is appended to the program name.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__slots__ = "program"
|
__slots__ = ("program",)
|
||||||
|
|
||||||
DICT_ATTRS = {"install_target", "KIND", "program", "relobjdir"}
|
DICT_ATTRS = {"install_target", "KIND", "program", "relobjdir"}
|
||||||
|
|
||||||
@@ -1201,7 +1201,7 @@ class FinalTargetFiles(ContextDerived):
|
|||||||
HierarchicalStringList, which is created when parsing FINAL_TARGET_FILES.
|
HierarchicalStringList, which is created when parsing FINAL_TARGET_FILES.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__slots__ = "files"
|
__slots__ = ("files",)
|
||||||
|
|
||||||
def __init__(self, sandbox, files):
|
def __init__(self, sandbox, files):
|
||||||
ContextDerived.__init__(self, sandbox)
|
ContextDerived.__init__(self, sandbox)
|
||||||
@@ -1218,7 +1218,7 @@ class FinalTargetPreprocessedFiles(ContextDerived):
|
|||||||
FINAL_TARGET_PP_FILES.
|
FINAL_TARGET_PP_FILES.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__slots__ = "files"
|
__slots__ = ("files",)
|
||||||
|
|
||||||
def __init__(self, sandbox, files):
|
def __init__(self, sandbox, files):
|
||||||
ContextDerived.__init__(self, sandbox)
|
ContextDerived.__init__(self, sandbox)
|
||||||
|
|||||||
@@ -799,7 +799,7 @@ class BuildReaderError(Exception):
|
|||||||
s.write("\n")
|
s.write("\n")
|
||||||
s.write("This variable expects the following type(s):\n")
|
s.write("This variable expects the following type(s):\n")
|
||||||
s.write("\n")
|
s.write("\n")
|
||||||
if type(inner.args[4]) == type:
|
if type(inner.args[4]) is type:
|
||||||
s.write(" %s\n" % inner.args[4].__name__)
|
s.write(" %s\n" % inner.args[4].__name__)
|
||||||
else:
|
else:
|
||||||
for t in inner.args[4]:
|
for t in inner.args[4]:
|
||||||
|
|||||||
@@ -181,7 +181,7 @@ def _quote(s):
|
|||||||
As a special case, if given an int, returns a string containing the int,
|
As a special case, if given an int, returns a string containing the int,
|
||||||
not enclosed in quotes.
|
not enclosed in quotes.
|
||||||
"""
|
"""
|
||||||
if type(s) == int:
|
if type(s) is int:
|
||||||
return f"{s}"
|
return f"{s}"
|
||||||
|
|
||||||
# Empty strings need to be quoted to have any significance
|
# Empty strings need to be quoted to have any significance
|
||||||
|
|||||||
@@ -8,7 +8,8 @@ import six
|
|||||||
|
|
||||||
|
|
||||||
def toolchain_task_definitions():
|
def toolchain_task_definitions():
|
||||||
import gecko_taskgraph # noqa: triggers override of the `graph_config_schema`
|
# triggers override of the `graph_config_schema` noqa
|
||||||
|
import gecko_taskgraph # noqa
|
||||||
from taskgraph.generator import load_tasks_for_kind
|
from taskgraph.generator import load_tasks_for_kind
|
||||||
|
|
||||||
# Don't import globally to allow this module being imported without
|
# Don't import globally to allow this module being imported without
|
||||||
|
|||||||
@@ -191,8 +191,8 @@ class FileAvoidWrite(BytesIO):
|
|||||||
def __init__(self, filename, capture_diff=False, dry_run=False, readmode="r"):
|
def __init__(self, filename, capture_diff=False, dry_run=False, readmode="r"):
|
||||||
BytesIO.__init__(self)
|
BytesIO.__init__(self)
|
||||||
self.name = filename
|
self.name = filename
|
||||||
assert type(capture_diff) == bool
|
assert type(capture_diff) is bool
|
||||||
assert type(dry_run) == bool
|
assert type(dry_run) is bool
|
||||||
assert "r" in readmode
|
assert "r" in readmode
|
||||||
self._capture_diff = capture_diff
|
self._capture_diff = capture_diff
|
||||||
self._write_to_file = not dry_run
|
self._write_to_file = not dry_run
|
||||||
|
|||||||
@@ -345,7 +345,7 @@ MANIFESTS_TYPES = dict(
|
|||||||
[
|
[
|
||||||
(c.type, c)
|
(c.type, c)
|
||||||
for c in globals().values()
|
for c in globals().values()
|
||||||
if type(c) == type
|
if type(c) is type
|
||||||
and issubclass(c, ManifestEntry)
|
and issubclass(c, ManifestEntry)
|
||||||
and hasattr(c, "type")
|
and hasattr(c, "type")
|
||||||
and c.type
|
and c.type
|
||||||
|
|||||||
@@ -41,7 +41,7 @@ class TestPkg(TestWithTmpDir):
|
|||||||
|
|
||||||
def test_get_apple_template(self):
|
def test_get_apple_template(self):
|
||||||
tmpl = get_apple_template("Distribution.template")
|
tmpl = get_apple_template("Distribution.template")
|
||||||
assert type(tmpl) == Template
|
assert type(tmpl) is Template
|
||||||
|
|
||||||
def test_get_apple_template_not_file(self):
|
def test_get_apple_template_not_file(self):
|
||||||
with self.assertRaises(Exception):
|
with self.assertRaises(Exception):
|
||||||
|
|||||||
@@ -45,7 +45,7 @@ def test_notebookupload_with_filter(notebook, no_filter):
|
|||||||
|
|
||||||
if no_filter:
|
if no_filter:
|
||||||
args, kwargs = notebook.call_args_list[0]
|
args, kwargs = notebook.call_args_list[0]
|
||||||
assert type(kwargs["data"][0]["data"][0]["value"]) == str
|
assert type(kwargs["data"][0]["data"][0]["value"]) is str
|
||||||
else:
|
else:
|
||||||
for call in notebook.call_args_list:
|
for call in notebook.call_args_list:
|
||||||
args, kwargs = call
|
args, kwargs = call
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ def has_pkg_section(p, section):
|
|||||||
has_section = section in p.keys()
|
has_section = section in p.keys()
|
||||||
if has_section:
|
if has_section:
|
||||||
for pkg in p[section]:
|
for pkg in p[section]:
|
||||||
if type(pkg) == str:
|
if type(pkg) is str:
|
||||||
yield pkg
|
yield pkg
|
||||||
else:
|
else:
|
||||||
yield from has_pkg_section(pkg, next(iter(pkg.keys())))
|
yield from has_pkg_section(pkg, next(iter(pkg.keys())))
|
||||||
|
|||||||
@@ -400,7 +400,7 @@ class SnapTests(SnapTestsBase):
|
|||||||
video = self._wait.until(
|
video = self._wait.until(
|
||||||
EC.visibility_of_element_located((By.CLASS_NAME, "html5-main-video"))
|
EC.visibility_of_element_located((By.CLASS_NAME, "html5-main-video"))
|
||||||
)
|
)
|
||||||
self._wait.until(lambda d: type(video.get_property("duration")) == float)
|
self._wait.until(lambda d: type(video.get_property("duration")) is float)
|
||||||
self._logger.info("video duration: {}".format(video.get_property("duration")))
|
self._logger.info("video duration: {}".format(video.get_property("duration")))
|
||||||
assert (
|
assert (
|
||||||
video.get_property("duration") > exp["duration"]
|
video.get_property("duration") > exp["duration"]
|
||||||
@@ -470,7 +470,7 @@ class SnapTests(SnapTestsBase):
|
|||||||
(By.CSS_SELECTOR, "video.html5-main-video")
|
(By.CSS_SELECTOR, "video.html5-main-video")
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
self._wait.until(lambda d: type(video.get_property("duration")) == float)
|
self._wait.until(lambda d: type(video.get_property("duration")) is float)
|
||||||
self._logger.info("video duration: {}".format(video.get_property("duration")))
|
self._logger.info("video duration: {}".format(video.get_property("duration")))
|
||||||
assert (
|
assert (
|
||||||
video.get_property("duration") > exp["duration"]
|
video.get_property("duration") > exp["duration"]
|
||||||
|
|||||||
@@ -58,7 +58,7 @@ class QATests(SnapTestsBase):
|
|||||||
(By.CSS_SELECTOR, video_selector or "video")
|
(By.CSS_SELECTOR, video_selector or "video")
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
self._wait.until(lambda d: type(video.get_property("duration")) == float)
|
self._wait.until(lambda d: type(video.get_property("duration")) is float)
|
||||||
assert video.get_property("duration") > 0.0, "<video> duration null"
|
assert video.get_property("duration") > 0.0, "<video> duration null"
|
||||||
|
|
||||||
# For HE-AAC page, Google Drive does not like SPACE
|
# For HE-AAC page, Google Drive does not like SPACE
|
||||||
|
|||||||
@@ -55,8 +55,9 @@ def register(graph_config):
|
|||||||
|
|
||||||
del registry["skip-unless-changed"]
|
del registry["skip-unless-changed"]
|
||||||
|
|
||||||
from gecko_taskgraph import ( # noqa: trigger target task method registration
|
from gecko_taskgraph import ( # noqa
|
||||||
morph,
|
# trigger target task method registration
|
||||||
|
morph, # noqa
|
||||||
filter_tasks,
|
filter_tasks,
|
||||||
target_tasks,
|
target_tasks,
|
||||||
)
|
)
|
||||||
@@ -64,7 +65,9 @@ def register(graph_config):
|
|||||||
android_taskgraph.register(graph_config)
|
android_taskgraph.register(graph_config)
|
||||||
|
|
||||||
from gecko_taskgraph.parameters import register_parameters
|
from gecko_taskgraph.parameters import register_parameters
|
||||||
from gecko_taskgraph.util import dependencies # noqa: trigger group_by registration
|
|
||||||
|
# trigger group_by registration
|
||||||
|
from gecko_taskgraph.util import dependencies # noqa
|
||||||
from gecko_taskgraph.util.verify import verifications
|
from gecko_taskgraph.util.verify import verifications
|
||||||
|
|
||||||
# Don't use the upstream verifications, and replace them with our own.
|
# Don't use the upstream verifications, and replace them with our own.
|
||||||
|
|||||||
@@ -287,7 +287,8 @@ def create_tasks(
|
|||||||
If you wish to create the tasks in a new group, leave out decision_task_id.
|
If you wish to create the tasks in a new group, leave out decision_task_id.
|
||||||
|
|
||||||
Returns an updated label_to_taskid containing the new tasks"""
|
Returns an updated label_to_taskid containing the new tasks"""
|
||||||
import gecko_taskgraph.optimize # noqa: triggers registration of strategies
|
# triggers registration of strategies
|
||||||
|
import gecko_taskgraph.optimize # noqa
|
||||||
|
|
||||||
if suffix != "":
|
if suffix != "":
|
||||||
suffix = f"-{suffix}"
|
suffix = f"-{suffix}"
|
||||||
|
|||||||
@@ -192,7 +192,7 @@ class UnresponsiveInstanceException(Exception):
|
|||||||
es_ = [
|
es_ = [
|
||||||
e
|
e
|
||||||
for e in locals().values()
|
for e in locals().values()
|
||||||
if type(e) == type and issubclass(e, MarionetteException)
|
if type(e) is type and issubclass(e, MarionetteException)
|
||||||
]
|
]
|
||||||
by_string = {e.status: e for e in es_}
|
by_string = {e.status: e for e in es_}
|
||||||
|
|
||||||
|
|||||||
@@ -1729,13 +1729,13 @@ class Marionette(object):
|
|||||||
wrapped = {}
|
wrapped = {}
|
||||||
for arg in args:
|
for arg in args:
|
||||||
wrapped[arg] = self._to_json(args[arg])
|
wrapped[arg] = self._to_json(args[arg])
|
||||||
elif type(args) == WebElement:
|
elif type(args) is WebElement:
|
||||||
wrapped = {WEB_ELEMENT_KEY: args.id}
|
wrapped = {WEB_ELEMENT_KEY: args.id}
|
||||||
elif type(args) == ShadowRoot:
|
elif type(args) is ShadowRoot:
|
||||||
wrapped = {WEB_SHADOW_ROOT_KEY: args.id}
|
wrapped = {WEB_SHADOW_ROOT_KEY: args.id}
|
||||||
elif type(args) == WebFrame:
|
elif type(args) is WebFrame:
|
||||||
wrapped = {WEB_FRAME_KEY: args.id}
|
wrapped = {WEB_FRAME_KEY: args.id}
|
||||||
elif type(args) == WebWindow:
|
elif type(args) is WebWindow:
|
||||||
wrapped = {WEB_WINDOW_KEY: args.id}
|
wrapped = {WEB_WINDOW_KEY: args.id}
|
||||||
elif isinstance(args, (bool, int, float, six.string_types)) or args is None:
|
elif isinstance(args, (bool, int, float, six.string_types)) or args is None:
|
||||||
wrapped = args
|
wrapped = args
|
||||||
|
|||||||
@@ -1244,7 +1244,7 @@ class MochitestDesktop(object):
|
|||||||
- create it if it does
|
- create it if it does
|
||||||
Removal of those directories is handled in cleanup()
|
Removal of those directories is handled in cleanup()
|
||||||
"""
|
"""
|
||||||
if type(extraTestsDirs) != list:
|
if type(extraTestsDirs) is not list:
|
||||||
return
|
return
|
||||||
|
|
||||||
for d in extraTestsDirs:
|
for d in extraTestsDirs:
|
||||||
|
|||||||
@@ -50,7 +50,7 @@ def test_logging_enabled(request_log):
|
|||||||
log_entry = request_log[0]
|
log_entry = request_log[0]
|
||||||
assert log_entry["method"] == "GET"
|
assert log_entry["method"] == "GET"
|
||||||
assert log_entry["path"] == "/"
|
assert log_entry["path"] == "/"
|
||||||
assert type(log_entry["time"]) == float
|
assert type(log_entry["time"]) is float
|
||||||
|
|
||||||
|
|
||||||
@log_requests(False)
|
@log_requests(False)
|
||||||
|
|||||||
@@ -154,7 +154,7 @@ class Preferences(object):
|
|||||||
if type(prefs) not in [list, dict]:
|
if type(prefs) not in [list, dict]:
|
||||||
raise PreferencesReadError("Malformed preferences: %s" % path)
|
raise PreferencesReadError("Malformed preferences: %s" % path)
|
||||||
if isinstance(prefs, list):
|
if isinstance(prefs, list):
|
||||||
if [i for i in prefs if type(i) != list or len(i) != 2]:
|
if [i for i in prefs if type(i) is not list or len(i) != 2]:
|
||||||
raise PreferencesReadError("Malformed preferences: %s" % path)
|
raise PreferencesReadError("Malformed preferences: %s" % path)
|
||||||
values = [i[1] for i in prefs]
|
values = [i[1] for i in prefs]
|
||||||
elif isinstance(prefs, dict):
|
elif isinstance(prefs, dict):
|
||||||
|
|||||||
@@ -11,18 +11,14 @@ except ImportError:
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import shutil
|
|
||||||
import tempfile
|
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
import manifestupdate
|
import manifestupdate
|
||||||
import mozpack.path as mozpath
|
import mozpack.path as mozpath
|
||||||
import mozunit
|
import mozunit
|
||||||
import pytest
|
import pytest
|
||||||
from mozbuild.base import MozbuildObject
|
|
||||||
from mozbuild.frontend.reader import BuildReader
|
from mozbuild.frontend.reader import BuildReader
|
||||||
from mozbuild.test.common import MockConfig
|
from mozbuild.test.common import MockConfig
|
||||||
from mozfile import NamedTemporaryFile
|
|
||||||
from moztest.resolve import (
|
from moztest.resolve import (
|
||||||
TEST_SUITES,
|
TEST_SUITES,
|
||||||
BuildBackendLoader,
|
BuildBackendLoader,
|
||||||
|
|||||||
@@ -530,12 +530,12 @@ class VirtualenvMixin(object):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if debug_exe_dir.exists():
|
if debug_exe_dir.exists():
|
||||||
for executable in {
|
for executable in (
|
||||||
"python.exe",
|
"python.exe",
|
||||||
"python_d.exe",
|
"python_d.exe",
|
||||||
"pythonw.exe",
|
"pythonw.exe",
|
||||||
"pythonw_d.exe",
|
"pythonw_d.exe",
|
||||||
}:
|
):
|
||||||
expected_python_debug_exe = debug_exe_dir / executable
|
expected_python_debug_exe = debug_exe_dir / executable
|
||||||
if not expected_python_debug_exe.exists():
|
if not expected_python_debug_exe.exists():
|
||||||
shutil.copy(
|
shutil.copy(
|
||||||
|
|||||||
@@ -2032,7 +2032,7 @@ def PreScriptAction(action=None):
|
|||||||
func._pre_action_listener = None
|
func._pre_action_listener = None
|
||||||
return func
|
return func
|
||||||
|
|
||||||
if type(action) == type(_wrapped):
|
if type(action) is type(_wrapped):
|
||||||
return _wrapped_none(action)
|
return _wrapped_none(action)
|
||||||
|
|
||||||
return _wrapped
|
return _wrapped
|
||||||
@@ -2063,7 +2063,7 @@ def PostScriptAction(action=None):
|
|||||||
func._post_action_listener = None
|
func._post_action_listener = None
|
||||||
return func
|
return func
|
||||||
|
|
||||||
if type(action) == type(_wrapped):
|
if type(action) is type(_wrapped):
|
||||||
return _wrapped_none(action)
|
return _wrapped_none(action)
|
||||||
|
|
||||||
return _wrapped
|
return _wrapped
|
||||||
@@ -2189,7 +2189,7 @@ class BaseScript(ScriptMixin, LogMixin, object):
|
|||||||
item = getattr(self, name)
|
item = getattr(self, name)
|
||||||
else:
|
else:
|
||||||
item = inspect.getattr_static(self, name)
|
item = inspect.getattr_static(self, name)
|
||||||
if type(item) == property:
|
if type(item) is property:
|
||||||
item = None
|
item = None
|
||||||
else:
|
else:
|
||||||
item = getattr(self, name)
|
item = getattr(self, name)
|
||||||
|
|||||||
@@ -296,7 +296,7 @@ class TestingMixin(
|
|||||||
)
|
)
|
||||||
|
|
||||||
for key, value in self.config.items():
|
for key, value in self.config.items():
|
||||||
if type(value) == str and value.startswith("http"):
|
if type(value) is str and value.startswith("http"):
|
||||||
self.config[key] = _replace_url(value, c["replace_urls"])
|
self.config[key] = _replace_url(value, c["replace_urls"])
|
||||||
|
|
||||||
# Any changes to c means that we need credentials
|
# Any changes to c means that we need credentials
|
||||||
|
|||||||
@@ -35,7 +35,7 @@ def _get_raptor_val(mdict, mname, retval=False):
|
|||||||
# mdict: a dictionary to look through to find the mname
|
# mdict: a dictionary to look through to find the mname
|
||||||
# value.
|
# value.
|
||||||
|
|
||||||
if type(mname) != list:
|
if type(mname) is not list:
|
||||||
if mname in mdict:
|
if mname in mdict:
|
||||||
return mdict[mname]
|
return mdict[mname]
|
||||||
return retval
|
return retval
|
||||||
|
|||||||
@@ -679,7 +679,7 @@ class BrowsertimeResultsHandler(PerftestResultsHandler):
|
|||||||
# mdict: a dictionary to look through to find the mname
|
# mdict: a dictionary to look through to find the mname
|
||||||
# value.
|
# value.
|
||||||
|
|
||||||
if type(mname) != list:
|
if type(mname) is not list:
|
||||||
if mname in mdict:
|
if mname in mdict:
|
||||||
return mdict[mname]
|
return mdict[mname]
|
||||||
return retval
|
return retval
|
||||||
|
|||||||
@@ -250,7 +250,7 @@ class TestInfoReport(TestInfo):
|
|||||||
# returns multiple records for the same test; that can happen if the report
|
# returns multiple records for the same test; that can happen if the report
|
||||||
# sometimes maps more than one ActiveData record to the same path.
|
# sometimes maps more than one ActiveData record to the same path.
|
||||||
new_value = item.get(label, 0) + value
|
new_value = item.get(label, 0) + value
|
||||||
if type(new_value) == int:
|
if type(new_value) is int:
|
||||||
item[label] = new_value
|
item[label] = new_value
|
||||||
else:
|
else:
|
||||||
item[label] = float(round(new_value, 2)) # pylint: disable=W1633
|
item[label] = float(round(new_value, 2)) # pylint: disable=W1633
|
||||||
|
|||||||
@@ -554,11 +554,11 @@ class Client:
|
|||||||
out.append({"type": "undefined"})
|
out.append({"type": "undefined"})
|
||||||
continue
|
continue
|
||||||
t = type(arg)
|
t = type(arg)
|
||||||
if t == int or t == float:
|
if t is int or t is float:
|
||||||
out.append({"type": "number", "value": arg})
|
out.append({"type": "number", "value": arg})
|
||||||
elif t == bool:
|
elif t is bool:
|
||||||
out.append({"type": "boolean", "value": arg})
|
out.append({"type": "boolean", "value": arg})
|
||||||
elif t == str:
|
elif t is str:
|
||||||
out.append({"type": "string", "value": arg})
|
out.append({"type": "string", "value": arg})
|
||||||
else:
|
else:
|
||||||
if "type" in arg:
|
if "type" in arg:
|
||||||
@@ -571,7 +571,7 @@ class Client:
|
|||||||
def __init__(self, client, script, target):
|
def __init__(self, client, script, target):
|
||||||
self.client = client
|
self.client = client
|
||||||
self.script = script
|
self.script = script
|
||||||
if type(target) == list:
|
if type(target) is list:
|
||||||
self.target = target[0]
|
self.target = target[0]
|
||||||
else:
|
else:
|
||||||
self.target = target
|
self.target = target
|
||||||
|
|||||||
@@ -335,7 +335,7 @@ class Linter(visitor.Visitor):
|
|||||||
# Store the variable used for the SelectExpression, excluding functions
|
# Store the variable used for the SelectExpression, excluding functions
|
||||||
# like PLATFORM()
|
# like PLATFORM()
|
||||||
if (
|
if (
|
||||||
type(node.selector) == ast.VariableReference
|
type(node.selector) is ast.VariableReference
|
||||||
and node.selector.id.name not in self.state["variables"]
|
and node.selector.id.name not in self.state["variables"]
|
||||||
):
|
):
|
||||||
self.state["variables"].append(node.selector.id.name)
|
self.state["variables"].append(node.selector.id.name)
|
||||||
|
|||||||
@@ -64,7 +64,11 @@ class TableBuilder(object):
|
|||||||
self.add_rows(self.headers)
|
self.add_rows(self.headers)
|
||||||
|
|
||||||
def add_rows(self, rows):
|
def add_rows(self, rows):
|
||||||
if type(rows) != list or type(rows[0]) != list or type(rows[0][0]) != str:
|
if (
|
||||||
|
type(rows) is not list
|
||||||
|
or type(rows[0]) is not list
|
||||||
|
or type(rows[0][0]) is not str
|
||||||
|
):
|
||||||
raise TypeError("add_rows() requires a two-dimensional list of strings.")
|
raise TypeError("add_rows() requires a two-dimensional list of strings.")
|
||||||
for row in rows:
|
for row in rows:
|
||||||
self.add_row(row)
|
self.add_row(row)
|
||||||
|
|||||||
@@ -197,7 +197,7 @@ class RaptorGatherer(FrameworkGatherer):
|
|||||||
|
|
||||||
def _get_ci_tasks(self):
|
def _get_ci_tasks(self):
|
||||||
for task in self._taskgraph.keys():
|
for task in self._taskgraph.keys():
|
||||||
if type(self._taskgraph[task]) == dict:
|
if type(self._taskgraph[task]) is dict:
|
||||||
command = self._taskgraph[task]["task"]["payload"].get("command", [])
|
command = self._taskgraph[task]["task"]["payload"].get("command", [])
|
||||||
run_on_projects = self._taskgraph[task]["attributes"]["run_on_projects"]
|
run_on_projects = self._taskgraph[task]["attributes"]["run_on_projects"]
|
||||||
else:
|
else:
|
||||||
@@ -521,7 +521,7 @@ class TalosGatherer(FrameworkGatherer):
|
|||||||
for task_name in self._taskgraph.keys():
|
for task_name in self._taskgraph.keys():
|
||||||
task = self._taskgraph[task_name]
|
task = self._taskgraph[task_name]
|
||||||
|
|
||||||
if type(task) == dict:
|
if type(task) is dict:
|
||||||
is_talos = task["task"]["extra"].get("suite", [])
|
is_talos = task["task"]["extra"].get("suite", [])
|
||||||
command = task["task"]["payload"].get("command", [])
|
command = task["task"]["payload"].get("command", [])
|
||||||
run_on_projects = task["attributes"]["run_on_projects"]
|
run_on_projects = task["attributes"]["run_on_projects"]
|
||||||
@@ -646,7 +646,7 @@ class AwsyGatherer(FrameworkGatherer):
|
|||||||
for task_name in self._taskgraph.keys():
|
for task_name in self._taskgraph.keys():
|
||||||
task = self._taskgraph[task_name]
|
task = self._taskgraph[task_name]
|
||||||
|
|
||||||
if type(task) == dict:
|
if type(task) is dict:
|
||||||
awsy_test = task["task"]["extra"].get("suite", [])
|
awsy_test = task["task"]["extra"].get("suite", [])
|
||||||
run_on_projects = task["attributes"]["run_on_projects"]
|
run_on_projects = task["attributes"]["run_on_projects"]
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -104,7 +104,7 @@ class Gatherer(object):
|
|||||||
matched["static"].append(file)
|
matched["static"].append(file)
|
||||||
|
|
||||||
# Append to structdocs if all the searched files were found
|
# Append to structdocs if all the searched files were found
|
||||||
if all(val for val in matched.values() if not type(val) == list):
|
if all(val for val in matched.values() if type(val) is not list):
|
||||||
self._perfdocs_tree.append(matched)
|
self._perfdocs_tree.append(matched)
|
||||||
|
|
||||||
logger.log(
|
logger.log(
|
||||||
|
|||||||
@@ -45,7 +45,7 @@ class PerfDocLogger(object):
|
|||||||
:param list/str files: The file(s) that this warning is about.
|
:param list/str files: The file(s) that this warning is about.
|
||||||
:param boolean restricted: If the param is False, the lint error can be used anywhere.
|
:param boolean restricted: If the param is False, the lint error can be used anywhere.
|
||||||
"""
|
"""
|
||||||
if type(files) != list:
|
if type(files) is not list:
|
||||||
files = [files]
|
files = [files]
|
||||||
|
|
||||||
if len(files) == 0:
|
if len(files) == 0:
|
||||||
|
|||||||
@@ -492,11 +492,11 @@ def write_interface(iface, fd):
|
|||||||
names.add(name)
|
names.add(name)
|
||||||
|
|
||||||
for m in iface.members:
|
for m in iface.members:
|
||||||
if type(m) == xpidl.Attribute:
|
if type(m) is xpidl.Attribute:
|
||||||
record_name(attributeNativeName(m, getter=True))
|
record_name(attributeNativeName(m, getter=True))
|
||||||
if not m.readonly:
|
if not m.readonly:
|
||||||
record_name(attributeNativeName(m, getter=False))
|
record_name(attributeNativeName(m, getter=False))
|
||||||
elif type(m) == xpidl.Method:
|
elif type(m) is xpidl.Method:
|
||||||
record_name(methodNativeName(m))
|
record_name(methodNativeName(m))
|
||||||
|
|
||||||
def write_const_decls(g):
|
def write_const_decls(g):
|
||||||
|
|||||||
@@ -588,7 +588,7 @@ def write_interface(iface, fd):
|
|||||||
|
|
||||||
entries = []
|
entries = []
|
||||||
for member in iface.members:
|
for member in iface.members:
|
||||||
if type(member) == xpidl.Attribute:
|
if type(member) is xpidl.Attribute:
|
||||||
entries.append(
|
entries.append(
|
||||||
vtable_entry_tmpl
|
vtable_entry_tmpl
|
||||||
% {
|
% {
|
||||||
@@ -605,7 +605,7 @@ def write_interface(iface, fd):
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
elif type(member) == xpidl.Method:
|
elif type(member) is xpidl.Method:
|
||||||
entries.append(
|
entries.append(
|
||||||
vtable_entry_tmpl
|
vtable_entry_tmpl
|
||||||
% {
|
% {
|
||||||
@@ -626,7 +626,7 @@ def write_interface(iface, fd):
|
|||||||
# Get all of the constants
|
# Get all of the constants
|
||||||
consts = []
|
consts = []
|
||||||
for member in iface.members:
|
for member in iface.members:
|
||||||
if type(member) == xpidl.ConstMember:
|
if type(member) is xpidl.ConstMember:
|
||||||
consts.append(
|
consts.append(
|
||||||
const_wrapper_tmpl
|
const_wrapper_tmpl
|
||||||
% {
|
% {
|
||||||
@@ -636,7 +636,7 @@ def write_interface(iface, fd):
|
|||||||
"val": member.getValue(),
|
"val": member.getValue(),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
if type(member) == xpidl.CEnum:
|
if type(member) is xpidl.CEnum:
|
||||||
for var in member.variants:
|
for var in member.variants:
|
||||||
consts.append(
|
consts.append(
|
||||||
const_wrapper_tmpl
|
const_wrapper_tmpl
|
||||||
@@ -650,7 +650,7 @@ def write_interface(iface, fd):
|
|||||||
|
|
||||||
methods = []
|
methods = []
|
||||||
for member in iface.members:
|
for member in iface.members:
|
||||||
if type(member) == xpidl.Attribute:
|
if type(member) is xpidl.Attribute:
|
||||||
methods.append(
|
methods.append(
|
||||||
method_wrapper_tmpl
|
method_wrapper_tmpl
|
||||||
% {
|
% {
|
||||||
@@ -669,7 +669,7 @@ def write_interface(iface, fd):
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
elif type(member) == xpidl.Method:
|
elif type(member) is xpidl.Method:
|
||||||
methods.append(
|
methods.append(
|
||||||
method_wrapper_tmpl
|
method_wrapper_tmpl
|
||||||
% {
|
% {
|
||||||
|
|||||||
@@ -59,14 +59,14 @@ def write_interface(iface, fd):
|
|||||||
try:
|
try:
|
||||||
methods = ""
|
methods = ""
|
||||||
for member in iface.members:
|
for member in iface.members:
|
||||||
if type(member) == xpidl.Attribute:
|
if type(member) is xpidl.Attribute:
|
||||||
methods += "/* %s */\n" % member.toIDL()
|
methods += "/* %s */\n" % member.toIDL()
|
||||||
methods += "%s,\n" % attrAsMethodStruct(iface, member, True)
|
methods += "%s,\n" % attrAsMethodStruct(iface, member, True)
|
||||||
if not member.readonly:
|
if not member.readonly:
|
||||||
methods += "%s,\n" % attrAsMethodStruct(iface, member, False)
|
methods += "%s,\n" % attrAsMethodStruct(iface, member, False)
|
||||||
methods += "\n"
|
methods += "\n"
|
||||||
|
|
||||||
elif type(member) == xpidl.Method:
|
elif type(member) is xpidl.Method:
|
||||||
methods += "/* %s */\n" % member.toIDL()
|
methods += "/* %s */\n" % member.toIDL()
|
||||||
methods += "%s,\n\n" % methodAsMethodStruct(iface, member)
|
methods += "%s,\n\n" % methodAsMethodStruct(iface, member)
|
||||||
fd.write(
|
fd.write(
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ def indented(s):
|
|||||||
|
|
||||||
|
|
||||||
def cpp(v):
|
def cpp(v):
|
||||||
if type(v) == bool:
|
if type(v) is bool:
|
||||||
return "true" if v else "false"
|
return "true" if v else "false"
|
||||||
return str(v)
|
return str(v)
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user