Bug 1406650 - Make build/*.py and a few other files flake8 compatible and add them to the list of files to check r=chmanchester

MozReview-Commit-ID: icmFJtbWdN
This commit is contained in:
Sylvestre Ledru
2017-10-07 16:45:22 +02:00
parent dffbe381e6
commit 69d68b3c4f
14 changed files with 123 additions and 77 deletions

View File

@@ -8,6 +8,7 @@
import ConfigParser
import sys
def main(output, file):
config = ConfigParser.RawConfigParser()
config.read(file)
@@ -15,14 +16,18 @@ def main(output, file):
try:
if config.getint('XRE', 'EnableProfileMigrator') == 1:
flags.add('NS_XRE_ENABLE_PROFILE_MIGRATOR')
except: pass
except:
pass
try:
if config.getint('Crash Reporter', 'Enabled') == 1:
flags.add('NS_XRE_ENABLE_CRASH_REPORTER')
except: pass
appdata = dict(("%s:%s" % (s, o), config.get(s, o)) for s in config.sections() for o in config.options(s))
except:
pass
appdata = dict(("%s:%s" % (s, o), config.get(s, o))
for s in config.sections() for o in config.options(s))
appdata['flags'] = ' | '.join(flags) if flags else '0'
appdata['App:profile'] = '"%s"' % appdata['App:profile'] if 'App:profile' in appdata else 'NULL'
appdata['App:profile'] = ('"%s"' % appdata['App:profile']
if 'App:profile' in appdata else 'NULL')
expected = ('App:vendor', 'App:name', 'App:remotingname', 'App:version', 'App:buildid',
'App:id', 'Gecko:minversion', 'Gecko:maxversion')
missing = [var for var in expected if var not in appdata]
@@ -31,7 +36,7 @@ def main(output, file):
"Missing values in %s: %s" % (file, ', '.join(missing))
sys.exit(1)
if not 'Crash Reporter:serverurl' in appdata:
if 'Crash Reporter:serverurl' not in appdata:
appdata['Crash Reporter:serverurl'] = ''
output.write('''#include "mozilla/XREAppData.h"
@@ -50,6 +55,7 @@ def main(output, file):
%(App:profile)s
};''' % appdata)
if __name__ == '__main__':
if len(sys.argv) != 1:
main(sys.stdout, sys.argv[1])

View File

@@ -209,7 +209,7 @@ def build_one_stage(cc, cxx, asm, ld, ar, ranlib, libtool,
"-DLLVM_ENABLE_THREADS=OFF",
"-DLIBCXXABI_LIBCXX_INCLUDES=%s" % libcxx_include_dir,
"-DCMAKE_OSX_SYSROOT=%s" % slashify_path(os.getenv("CROSS_SYSROOT")),
"-DCMAKE_FIND_ROOT_PATH=%s" % slashify_path(os.getenv("CROSS_CCTOOLS_PATH")),
"-DCMAKE_FIND_ROOT_PATH=%s" % slashify_path(os.getenv("CROSS_CCTOOLS_PATH")), # noqa
"-DCMAKE_FIND_ROOT_PATH_MODE_PROGRAM=NEVER",
"-DCMAKE_FIND_ROOT_PATH_MODE_LIBRARY=ONLY",
"-DCMAKE_FIND_ROOT_PATH_MODE_INCLUDE=ONLY",
@@ -391,7 +391,8 @@ if __name__ == "__main__":
if "build_type" in config:
build_type = config["build_type"]
if build_type not in ("Release", "Debug", "RelWithDebInfo", "MinSizeRel"):
raise ValueError("We only know how to do Release, Debug, RelWithDebInfo or MinSizeRel builds")
raise ValueError("We only know how to do Release, Debug, RelWithDebInfo or "
"MinSizeRel builds")
build_libcxx = False
if "build_libcxx" in config:
build_libcxx = config["build_libcxx"]
@@ -503,7 +504,8 @@ if __name__ == "__main__":
extra_ldflags = []
if 'LD_LIBRARY_PATH' in os.environ:
os.environ['LD_LIBRARY_PATH'] = '%s/lib64/:%s' % (gcc_dir, os.environ['LD_LIBRARY_PATH'])
os.environ['LD_LIBRARY_PATH'] = ('%s/lib64/:%s' %
(gcc_dir, os.environ['LD_LIBRARY_PATH']))
else:
os.environ['LD_LIBRARY_PATH'] = '%s/lib64/' % gcc_dir
elif is_windows():

View File

@@ -2,7 +2,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
import sys
from mozbuild.base import MozbuildObject
from mozbuild.backend.configenvironment import PartialConfigEnvironment

View File

@@ -13,6 +13,7 @@ try:
except:
hashlib = None
def digest_file(filename, digest, chunk_size=1024):
'''Produce a checksum for the file specified by 'filename'. 'filename'
is a string path to a file that is opened and read in this function. The
@@ -85,6 +86,7 @@ def process_files(files, output_filename, digests, strip):
os.path.getsize(file),
short_file)
def setup_logging(level=logging.DEBUG):
'''This function sets up the logging module using a speficiable logging
module logging level. The default log level is DEBUG.
@@ -103,6 +105,7 @@ def setup_logging(level=logging.DEBUG):
handler.setFormatter(formatter)
logger.addHandler(handler)
def main():
'''This is a main function that parses arguments, sets up logging
and generates a checksum file'''
@@ -122,7 +125,7 @@ def main():
dest='strip', default=os.getcwd())
options, args = parser.parse_args()
#Figure out which logging level to use
# Figure out which logging level to use
if options.verbose:
loglevel = logging.DEBUG
elif options.quiet:
@@ -130,7 +133,7 @@ def main():
else:
loglevel = logging.INFO
#Set up logging
# Set up logging
setup_logging(loglevel)
logger = logging.getLogger('checksums.py')
@@ -140,7 +143,7 @@ def main():
try:
for digest in options.digests:
hashlib.new(digest)
except ValueError, ve:
except ValueError as ve:
logger.error('Could not create a "%s" hash object (%s)' %
(digest, ve.args[0]))
exit(1)
@@ -154,5 +157,6 @@ def main():
logger.info('File "%s" was not found on the filesystem' % i)
process_files(files, options.outfile, options.digests, options.strip)
if __name__ == '__main__':
main()

View File

@@ -39,10 +39,13 @@ OPTIONAL_PACKAGES = [
def parse_args():
parser = ArgumentParser(description='Generate a test_packages.json file to tell automation which harnesses require which test packages.')
parser = ArgumentParser(
description="Generate a test_packages.json file to tell automation which harnesses "
"require which test packages.")
parser.add_argument("--common", required=True,
action="store", dest="tests_common",
help="Name of the \"common\" archive, a package to be used by all harnesses.")
help="Name of the \"common\" archive, a package to be used by all "
"harnesses.")
parser.add_argument("--jsshell", required=True,
action="store", dest="jsshell",
help="Name of the jsshell zip.")
@@ -79,6 +82,7 @@ def generate_package_data(args):
harness_requirements[harness].append(pkg_name)
return harness_requirements
if __name__ == '__main__':
args = parse_args()
packages_data = generate_package_data(args)

View File

@@ -106,7 +106,9 @@ CATEGORIES = {
},
'disabled': {
'short': 'Disabled',
'long': 'The disabled commands are hidden by default. Use -v to display them. These commands are unavailable for your current context, run "mach <command>" to see why.',
'long': 'The disabled commands are hidden by default. Use -v to display them. '
'These commands are unavailable for your current context, '
'run "mach <command>" to see why.',
'priority': 0,
}
}
@@ -206,7 +208,7 @@ def bootstrap(topsrcdir, mozilla_dir=None):
dist = list(platform.linux_distribution())
data['system']['linux_distribution'] = dist
elif platform.system() == 'Windows':
win32_ver=list((platform.win32_ver())),
win32_ver = list((platform.win32_ver())),
data['system']['win32_ver'] = win32_ver
elif platform.system() == 'Darwin':
# mac version is a special Cupertino snowflake

View File

@@ -7,7 +7,6 @@
import argparse
import errno
import itertools
import os
import re
import subprocess
@@ -112,11 +111,10 @@ def maybe_clear_cache(data):
is_set = cache.get('ac_cv_env_%s_set' % precious) == 'set'
value = cache.get('ac_cv_env_%s_value' % precious) if is_set else None
if value != env.get(precious):
print 'Removing %s because of %s value change from:' \
% (data['cache-file'], precious)
print ' %s' % (value if value is not None else 'undefined')
print 'to:'
print ' %s' % env.get(precious, 'undefined')
print('Removing %s because of %s value change from:' % (data['cache-file'], precious))
print(' %s' % (value if value is not None else 'undefined'))
print('to:')
print(' %s' % env.get(precious, 'undefined'))
os.remove(data['cache-file'])
return True
return False
@@ -139,7 +137,6 @@ def get_config_files(data):
if not os.path.exists(config_status):
return [], []
configure = mozpath.join(data['srcdir'], 'configure')
config_files = []
command_files = []
@@ -252,7 +249,7 @@ def execute_and_prefix(*args, **kwargs):
line = proc.stdout.readline()
if not line:
break
print prefix_lines(line.rstrip(), prefix)
print(prefix_lines(line.rstrip(), prefix))
sys.stdout.flush()
return proc.wait()
@@ -330,8 +327,8 @@ def run(objdir):
# We're going to run it ourselves.
command += ['--no-create']
print prefix_lines('configuring', relobjdir)
print prefix_lines('running %s' % ' '.join(command[:-1]), relobjdir)
print(prefix_lines('configuring', relobjdir))
print(prefix_lines('running %s' % ' '.join(command[:-1]), relobjdir))
sys.stdout.flush()
returncode = execute_and_prefix(command, cwd=objdir, env=data['env'],
prefix=relobjdir)
@@ -368,7 +365,7 @@ def run(objdir):
if not skip_config_status:
if skip_configure:
print prefix_lines('running config.status', relobjdir)
print(prefix_lines('running config.status', relobjdir))
sys.stdout.flush()
ret = execute_and_prefix([data['shell'], '-c', './config.status'],
cwd=objdir, env=data['env'], prefix=relobjdir)

View File

@@ -61,7 +61,7 @@ def submit_telemetry_data(statedir):
now = time.time()
for filename in os.listdir(submitted):
ctime = os.stat(os.path.join(submitted, filename)).st_ctime
if now - ctime >= 60*60*24*30:
if now - ctime >= 60 * 60 * 24 * 30:
os.remove(os.path.join(submitted, filename))
return 0

View File

@@ -31,7 +31,8 @@
# to indicate that files should be uploaded including their paths relative
# to the base path.
import sys, os
import sys
import os
import re
import json
import errno
@@ -47,6 +48,7 @@ from subprocess import (
import concurrent.futures as futures
import redo
def OptionalEnvironmentVariable(v):
"""Return the value of the environment variable named v, or None
if it's unset (or empty)."""
@@ -54,6 +56,7 @@ def OptionalEnvironmentVariable(v):
return os.environ[v]
return None
def FixupMsysPath(path):
"""MSYS helpfully translates absolute pathnames in environment variables
and commandline arguments into Windows native paths. This sucks if you're
@@ -69,6 +72,7 @@ def FixupMsysPath(path):
path = path[len(msys):]
return path
def WindowsPathToMsysPath(path):
"""Translate a Windows pathname to an MSYS pathname.
Necessary because we call out to ssh/scp, which are MSYS binaries
@@ -78,7 +82,8 @@ def WindowsPathToMsysPath(path):
if sys.platform != 'win32' or path.startswith('/'):
return path
(drive, path) = os.path.splitdrive(os.path.abspath(path))
return "/" + drive[0] + path.replace('\\','/')
return "/" + drive[0] + path.replace('\\', '/')
def AppendOptionalArgsToSSHCommandline(cmdline, port, ssh_key):
"""Given optional port and ssh key values, append valid OpenSSH
@@ -93,6 +98,7 @@ def AppendOptionalArgsToSSHCommandline(cmdline, port, ssh_key):
# In case of an issue here we don't want to hang on a password prompt.
cmdline.extend(["-o", "BatchMode=yes"])
def DoSSHCommand(command, user, host, port=None, ssh_key=None):
"""Execute command on user@host using ssh. Optionally use
port and ssh_key, if provided."""
@@ -104,21 +110,22 @@ def DoSSHCommand(command, user, host, port=None, ssh_key=None):
try:
output = f(cmdline, stderr=STDOUT).strip()
except CalledProcessError as e:
print "failed ssh command output:"
print '=' * 20
print e.output
print '=' * 20
print("failed ssh command output:")
print('=' * 20)
print(e.output)
print('=' * 20)
raise
return output
raise Exception("Command %s returned non-zero exit code" % cmdline)
def DoSCPFile(file, remote_path, user, host, port=None, ssh_key=None,
log=False):
"""Upload file to user@host:remote_path using scp. Optionally use
port and ssh_key, if provided."""
if log:
print 'Uploading %s' % file
print('Uploading %s' % file)
cmdline = ["scp"]
AppendOptionalArgsToSSHCommandline(cmdline, port, ssh_key)
cmdline.extend([WindowsPathToMsysPath(file),
@@ -129,6 +136,7 @@ def DoSCPFile(file, remote_path, user, host, port=None, ssh_key=None,
raise Exception("Command %s returned non-zero exit code" % cmdline)
def GetBaseRelativePath(path, local_file, base_path):
"""Given a remote path to upload to, a full path to a local file, and an
optional full path that is a base path of the local file, construct the
@@ -142,9 +150,10 @@ def GetBaseRelativePath(path, local_file, base_path):
return path
dir = os.path.dirname(local_file)
# strip base_path + extra slash and make it unixy
dir = dir[len(base_path)+1:].replace('\\','/')
dir = dir[len(base_path) + 1:].replace('\\', '/')
return path + dir
def GetFileHashAndSize(filename):
sha512Hash = 'UNKNOWN'
size = 'UNKNOWN'
@@ -162,6 +171,7 @@ def GetFileHashAndSize(filename):
return (sha512Hash, size)
def GetMarProperties(filename):
if not os.path.exists(filename):
return {}
@@ -172,6 +182,7 @@ def GetMarProperties(filename):
'completeMarHash': mar_hash,
}
def GetUrlProperties(output, package):
# let's create a switch case using name-spaces/dict
# rather than a long if/else with duplicate code
@@ -189,7 +200,8 @@ def GetUrlProperties(output, package):
('testPackagesUrl', lambda m: m.endswith('test_packages.json')),
('packageUrl', lambda m: m.endswith(package)),
]
url_re = re.compile(r'''^(https?://.*?\.(?:tar\.bz2|dmg|zip|apk|rpm|mar|tar\.gz|json))$''')
url_re = re.compile(
r'''^(https?://.*?\.(?:tar\.bz2|dmg|zip|apk|rpm|mar|tar\.gz|json))$''')
properties = {}
try:
@@ -204,10 +216,13 @@ def GetUrlProperties(output, package):
except IOError as e:
if e.errno != errno.ENOENT:
raise
properties = {prop: 'UNKNOWN' for prop, condition in property_conditions}
properties = {prop: 'UNKNOWN' for prop, condition
in property_conditions}
return properties
def UploadFiles(user, host, path, files, verbose=False, port=None, ssh_key=None, base_path=None, upload_to_temp_dir=False, post_upload_command=None, package=None):
def UploadFiles(user, host, path, files, verbose=False, port=None, ssh_key=None, base_path=None,
upload_to_temp_dir=False, post_upload_command=None, package=None):
"""Upload each file in the list files to user@host:path. Optionally pass
port and ssh_key to the ssh commands. If base_path is not None, upload
files including their path relative to base_path. If upload_to_temp_dir is
@@ -221,12 +236,12 @@ def UploadFiles(user, host, path, files, verbose=False, port=None, ssh_key=None,
if not host or not user:
return {}
if (not path and not upload_to_temp_dir) or (path and upload_to_temp_dir):
print "One (and only one of UPLOAD_PATH or UPLOAD_TO_TEMP must be " + \
"defined."
print("One (and only one of UPLOAD_PATH or UPLOAD_TO_TEMP must be defined.")
sys.exit(1)
if upload_to_temp_dir:
path = DoSSHCommand("mktemp -d", user, host, port=port, ssh_key=ssh_key)
path = DoSSHCommand("mktemp -d", user, host,
port=port, ssh_key=ssh_key)
if not path.endswith("/"):
path += "/"
if base_path is not None:
@@ -250,7 +265,8 @@ def UploadFiles(user, host, path, files, verbose=False, port=None, ssh_key=None,
# If we wanted to, we could reduce the remote paths if they are a parent
# of any entry.
for p in sorted(remote_paths):
DoSSHCommand("mkdir -p " + p, user, host, port=port, ssh_key=ssh_key)
DoSSHCommand("mkdir -p " + p, user, host,
port=port, ssh_key=ssh_key)
with futures.ThreadPoolExecutor(4) as e:
fs = []
@@ -269,20 +285,22 @@ def UploadFiles(user, host, path, files, verbose=False, port=None, ssh_key=None,
if post_upload_command is not None:
if verbose:
print "Running post-upload command: " + post_upload_command
print("Running post-upload command: " + post_upload_command)
file_list = '"' + '" "'.join(remote_files) + '"'
output = DoSSHCommand('%s "%s" %s' % (post_upload_command, path, file_list), user, host, port=port, ssh_key=ssh_key)
output = DoSSHCommand('%s "%s" %s' % (
post_upload_command, path, file_list), user, host, port=port, ssh_key=ssh_key)
# We print since mozharness may parse URLs from the output stream.
print output
print(output)
properties = GetUrlProperties(output, package)
finally:
if upload_to_temp_dir:
DoSSHCommand("rm -rf %s" % path, user, host, port=port,
ssh_key=ssh_key)
if verbose:
print "Upload complete"
print("Upload complete")
return properties
def CopyFilesLocally(path, files, verbose=False, base_path=None, package=None):
"""Copy each file in the list of files to `path`. The `base_path` argument is treated
as it is by UploadFiles."""
@@ -299,9 +317,10 @@ def CopyFilesLocally(path, files, verbose=False, base_path=None, package=None):
if not os.path.exists(target_path):
os.makedirs(target_path)
if verbose:
print "Copying " + file + " to " + target_path
print("Copying " + file + " to " + target_path)
shutil.copy(file, target_path)
def WriteProperties(files, properties_file, url_properties, package):
properties = url_properties
for file in files:
@@ -312,6 +331,7 @@ def WriteProperties(files, properties_file, url_properties, package):
properties['uploadFiles'] = [os.path.abspath(f) for f in files]
json.dump(properties, outfile, indent=4)
if __name__ == '__main__':
host = OptionalEnvironmentVariable('UPLOAD_HOST')
user = OptionalEnvironmentVariable('UPLOAD_USER')
@@ -332,7 +352,8 @@ if __name__ == '__main__':
parser = OptionParser(usage="usage: %prog [options] <files>")
parser.add_option("-b", "--base-path",
action="store",
help="Preserve file paths relative to this path when uploading. If unset, all files will be uploaded directly to UPLOAD_PATH.")
help="Preserve file paths relative to this path when uploading. "
"If unset, all files will be uploaded directly to UPLOAD_PATH.")
parser.add_option("--properties-file",
action="store",
help="Path to the properties file to store the upload properties.")
@@ -341,20 +362,20 @@ if __name__ == '__main__':
help="Name of the main package.")
(options, args) = parser.parse_args()
if len(args) < 1:
print "You must specify at least one file to upload"
print("You must specify at least one file to upload")
sys.exit(1)
if not options.properties_file:
print "You must specify a --properties-file"
print("You must specify a --properties-file")
sys.exit(1)
if host == "localhost":
if upload_to_temp_dir:
print "Cannot use UPLOAD_TO_TEMP with UPLOAD_HOST=localhost"
print("Cannot use UPLOAD_TO_TEMP with UPLOAD_HOST=localhost")
sys.exit(1)
if post_upload_command:
# POST_UPLOAD_COMMAND is difficult to extract from the mozharness
# scripts, so just ignore it until it's no longer used anywhere
print "Ignoring POST_UPLOAD_COMMAND with UPLOAD_HOST=localhost"
print("Ignoring POST_UPLOAD_COMMAND with UPLOAD_HOST=localhost")
try:
if host == "localhost":
@@ -369,7 +390,8 @@ if __name__ == '__main__':
post_upload_command=post_upload_command,
package=options.package, verbose=True)
WriteProperties(args, options.properties_file, url_properties, options.package)
except IOError, (strerror):
print strerror
WriteProperties(args, options.properties_file,
url_properties, options.package)
except IOError as strerror:
print(strerror)
sys.exit(1)

View File

@@ -6,7 +6,6 @@
from __future__ import absolute_import, print_function, unicode_literals
import argparse
from concurrent.futures import ThreadPoolExecutor
from contextlib import contextmanager
import gzip
import io
@@ -37,6 +36,7 @@ def timed():
function was called.
'''
start = time.time()
def elapsed():
return time.time() - start
yield elapsed
@@ -76,10 +76,13 @@ def upload_worker(queue, event, bucket, session_args):
'ContentEncoding': 'gzip',
'ContentType': 'text/plain',
}
log.info('Uploading "{}" ({} bytes)'.format(pathname, len(compressed.getvalue())))
log.info('Uploading "{}" ({} bytes)'.format(
pathname, len(compressed.getvalue())))
with timed() as elapsed:
s3.upload_fileobj(compressed, bucket, pathname, ExtraArgs=extra_args)
log.info('Finished uploading "{}" in {:0.3f}s'.format(pathname, elapsed()))
s3.upload_fileobj(compressed, bucket,
pathname, ExtraArgs=extra_args)
log.info('Finished uploading "{}" in {:0.3f}s'.format(
pathname, elapsed()))
queue.task_done()
except Exception:
log.exception('Thread encountered exception:')
@@ -91,8 +94,10 @@ def do_work(artifact, region, bucket):
session = requests.Session()
if 'TASK_ID' in os.environ:
level = os.environ.get('MOZ_SCM_LEVEL', '1')
secrets_url = 'http://taskcluster/secrets/v1/secret/project/releng/gecko/build/level-{}/gecko-generated-sources-upload'.format(level)
log.info('Using AWS credentials from the secrets service: "{}"'.format(secrets_url))
secrets_url = 'http://taskcluster/secrets/v1/secret/project/releng/gecko/build/level-{}/gecko-generated-sources-upload'.format( # noqa
level)
log.info(
'Using AWS credentials from the secrets service: "{}"'.format(secrets_url))
res = session.get(secrets_url)
res.raise_for_status()
secret = res.json()
@@ -103,12 +108,12 @@ def do_work(artifact, region, bucket):
else:
log.info('Trying to use your AWS credentials..')
# First, fetch the artifact containing the sources.
log.info('Fetching generated sources artifact: "{}"'.format(artifact))
with timed() as elapsed:
res = session.get(artifact)
log.info('Fetch HTTP status: {}, {} bytes downloaded in {:0.3f}s'.format(res.status_code, len(res.content), elapsed()))
log.info('Fetch HTTP status: {}, {} bytes downloaded in {:0.3f}s'.format(
res.status_code, len(res.content), elapsed()))
res.raise_for_status()
# Create a queue and worker threads for uploading.
q = Queue()

View File

@@ -11,6 +11,7 @@ from datetime import datetime
SOURCESTAMP_FILENAME = 'sourcestamp.txt'
def buildid_header(output):
buildid = os.environ.get('MOZ_BUILD_DATE')
if buildid and len(buildid) != 14:
@@ -45,6 +46,7 @@ def get_hg_info(workdir):
def get_hg_changeset(path):
return get_program_output('hg', '-R', path, 'parent', '--template={node}')
def get_info_from_sourcestamp(sourcestamp_path):
"""Read the repository and changelog information from the sourcestamp
file. This assumes that the file exists and returns the results as a list
@@ -66,6 +68,7 @@ def get_info_from_sourcestamp(sourcestamp_path):
# Return the repo and the changeset.
return lines[1].split('/rev/')
def source_repo_header(output):
# We allow the source repo and changeset to be specified via the
# environment (see configure)
@@ -75,7 +78,8 @@ def source_repo_header(output):
source = ''
if not repo:
sourcestamp_path = os.path.join(buildconfig.topsrcdir, SOURCESTAMP_FILENAME)
sourcestamp_path = os.path.join(
buildconfig.topsrcdir, SOURCESTAMP_FILENAME)
if os.path.exists(os.path.join(buildconfig.topsrcdir, '.hg')):
repo, changeset = get_hg_info(buildconfig.topsrcdir)
elif os.path.exists(sourcestamp_path):

View File

@@ -198,7 +198,7 @@ def format_manifest(manifest):
def write_zip(zip_path, prefix=None):
"""Write toolchain data to a zip file."""
if isinstance(prefix, unicode):
if isinstance(prefix, unicode): # noqa Special case for Python 2
prefix = prefix.encode('utf-8')
with JarWriter(file=zip_path, optimize=False, compress=5) as zip:

View File

@@ -7,7 +7,6 @@ from __future__ import print_function, unicode_literals
import codecs
import itertools
import os
import subprocess
import sys
import textwrap

View File

@@ -3,6 +3,8 @@ flake8:
description: Python linter
include:
- build/moz.configure/*.configure
- build/*.py
- configure.py
- config/check_macroassembler_style.py
- config/mozunit.py
- layout/tools/reftest