Bug 1619788 - Ensure files generated by ./mach configure don't have CRLF line endings, even under Python 3 r=glandium
Differential Revision: https://phabricator.services.mozilla.com/D65354
This commit is contained in:
@@ -162,13 +162,13 @@ class _MockBaseOpen(object):
|
||||
self.open = open
|
||||
self.files = files
|
||||
|
||||
def __call__(self, name, mode='r', buffering=None, encoding=None):
|
||||
def __call__(self, name, mode='r', buffering=None, encoding=None, newline=None):
|
||||
# open() can be called with an integer "name" (i.e. a file descriptor).
|
||||
# We don't generally do this in our codebase, but internal Python
|
||||
# libraries sometimes do and we want to handle that cleanly.
|
||||
if isinstance(name, int):
|
||||
return self.open(name, mode=mode, buffering=buffering,
|
||||
encoding=encoding)
|
||||
encoding=encoding, newline=newline)
|
||||
# buffering is ignored.
|
||||
absname = normcase(os.path.abspath(name))
|
||||
if 'w' in mode:
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import codecs
|
||||
import io
|
||||
import itertools
|
||||
import logging
|
||||
import os
|
||||
@@ -133,7 +134,8 @@ def config_status(config):
|
||||
|
||||
# Write out a file so the build backend knows to re-run configure when
|
||||
# relevant Python changes.
|
||||
with open('config_status_deps.in', 'w') as fh:
|
||||
with io.open('config_status_deps.in', 'w', encoding='utf-8',
|
||||
newline='\n') as fh:
|
||||
for f in itertools.chain(config['CONFIG_STATUS_DEPS'],
|
||||
iter_modules_in_path(config['TOPOBJDIR'],
|
||||
config['TOPSRCDIR'])):
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
@@ -955,7 +956,7 @@ class RecursiveMakeBackend(MakeBackend):
|
||||
obj.topobjdir = bf.environment.topobjdir
|
||||
obj.config = bf.environment
|
||||
self._create_makefile(obj, stub=stub)
|
||||
with open(obj.output_path) as fh:
|
||||
with io.open(obj.output_path, encoding='utf-8') as fh:
|
||||
content = fh.read()
|
||||
# Directories with a Makefile containing a tools target, or
|
||||
# XPI_PKGNAME or INSTALL_EXTENSION_ID can't be skipped and
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import io
|
||||
import json
|
||||
import logging
|
||||
import mozpack.path as mozpath
|
||||
@@ -146,7 +147,7 @@ class MozbuildObject(ProcessExecutionMixin):
|
||||
mozconfig = MozconfigLoader.AUTODETECT
|
||||
|
||||
def load_mozinfo(path):
|
||||
info = json.load(open(path, 'rt'))
|
||||
info = json.load(io.open(path, 'rt', encoding='utf-8'))
|
||||
topsrcdir = info.get('topsrcdir')
|
||||
topobjdir = os.path.dirname(path)
|
||||
mozconfig = info.get('mozconfig')
|
||||
@@ -215,7 +216,7 @@ class MozbuildObject(ProcessExecutionMixin):
|
||||
return True
|
||||
|
||||
deps = []
|
||||
with open(dep_file, 'r') as fh:
|
||||
with io.open(dep_file, 'r', encoding='utf-8', newline='\n') as fh:
|
||||
deps = fh.read().splitlines()
|
||||
|
||||
mtime = os.path.getmtime(output)
|
||||
@@ -240,7 +241,7 @@ class MozbuildObject(ProcessExecutionMixin):
|
||||
# we last built the backend, re-generate the backend if
|
||||
# so.
|
||||
outputs = []
|
||||
with open(backend_file, 'r') as fh:
|
||||
with io.open(backend_file, 'r', encoding='utf-8', newline='\n') as fh:
|
||||
outputs = fh.read().splitlines()
|
||||
for output in outputs:
|
||||
if not os.path.isfile(mozpath.join(self.topobjdir, output)):
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import errno
|
||||
import io
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
@@ -227,7 +228,7 @@ class WarningsDatabase(object):
|
||||
"""
|
||||
|
||||
# Need to calculate up front since we are mutating original object.
|
||||
filenames = self._files.keys()
|
||||
filenames = list(six.iterkeys(self._files))
|
||||
for filename in filenames:
|
||||
if not os.path.exists(filename):
|
||||
del self._files[filename]
|
||||
@@ -255,7 +256,8 @@ class WarningsDatabase(object):
|
||||
normalized = list(v2)
|
||||
obj['files'][k][k2] = normalized
|
||||
|
||||
json.dump(obj, fh, indent=2)
|
||||
to_write = six.ensure_text(json.dumps(obj, indent=2))
|
||||
fh.write(to_write)
|
||||
|
||||
def deserialize(self, fh):
|
||||
"""Load serialized content from a handle into the current instance."""
|
||||
@@ -276,7 +278,7 @@ class WarningsDatabase(object):
|
||||
|
||||
def load_from_file(self, filename):
|
||||
"""Load the database from a file."""
|
||||
with open(filename, 'r') as fh:
|
||||
with io.open(filename, 'r', encoding='utf-8') as fh:
|
||||
self.deserialize(fh)
|
||||
|
||||
def save_to_file(self, filename):
|
||||
@@ -287,7 +289,7 @@ class WarningsDatabase(object):
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
raise
|
||||
with open(filename, 'w') as fh:
|
||||
with io.open(filename, 'w', encoding='utf-8', newline='\n') as fh:
|
||||
self.serialize(fh)
|
||||
|
||||
|
||||
|
||||
@@ -401,7 +401,8 @@ class ConfigureSandbox(dict):
|
||||
self._help = HelpFormatter(argv[0])
|
||||
self._help.add(self._help_option)
|
||||
elif moz_logger:
|
||||
handler = logging.FileHandler('config.log', mode='w', delay=True)
|
||||
handler = logging.FileHandler('config.log', mode='w', delay=True,
|
||||
encoding='utf-8')
|
||||
handler.setFormatter(formatter)
|
||||
logger.addHandler(handler)
|
||||
|
||||
|
||||
@@ -349,8 +349,11 @@ class BuildMonitor(MozbuildObject):
|
||||
os.environ['UPLOAD_PATH'])
|
||||
else:
|
||||
build_resources_path = self._get_state_filename('build_resources.json')
|
||||
with open(build_resources_path, 'w') as fh:
|
||||
json.dump(self.resources.as_dict(), fh, indent=2)
|
||||
with io.open(build_resources_path, 'w',
|
||||
encoding='utf-8', newline='\n') as fh:
|
||||
to_write = six.ensure_text(
|
||||
json.dumps(self.resources.as_dict(), indent=2))
|
||||
fh.write(to_write)
|
||||
except Exception as e:
|
||||
self.log(logging.WARNING, 'build_resources_error',
|
||||
{'msg': str(e)},
|
||||
@@ -778,7 +781,7 @@ class StaticAnalysisOutputManager(OutputManager):
|
||||
self.monitor._warnings_database.save_to_file(path)
|
||||
|
||||
else:
|
||||
with open(path, 'w') as f:
|
||||
with io.open(path, 'w', encoding='utf-8', newline='\n') as f:
|
||||
f.write(self.raw)
|
||||
|
||||
self.log(logging.INFO, 'write_output',
|
||||
@@ -1149,7 +1152,7 @@ class BuildDriver(MozbuildObject):
|
||||
add_extra_dependencies)
|
||||
depfile = os.path.join(self.topsrcdir, 'build',
|
||||
'dumbmake-dependencies')
|
||||
with open(depfile) as f:
|
||||
with io.open(depfile, encoding='utf-8', newline='\n') as f:
|
||||
dm = dependency_map(f.readlines())
|
||||
new_pairs = list(add_extra_dependencies(target_pairs, dm))
|
||||
self.log(logging.DEBUG, 'dumbmake',
|
||||
@@ -1244,7 +1247,8 @@ class BuildDriver(MozbuildObject):
|
||||
"Generated.txt")
|
||||
|
||||
if os.path.exists(pathToThirdparty):
|
||||
with open(pathToThirdparty) as f, open(pathToGenerated) as g:
|
||||
with io.open(pathToThirdparty, encoding='utf-8', newline='\n') as f, \
|
||||
io.open(pathToGenerated, encoding='utf-8', newline='\n') as g:
|
||||
# Normalize the path (no trailing /)
|
||||
suppress = f.readlines() + g.readlines()
|
||||
LOCAL_SUPPRESS_DIRS = tuple(s.strip('/') for s in suppress)
|
||||
@@ -1435,11 +1439,12 @@ class BuildDriver(MozbuildObject):
|
||||
def _write_mozconfig_json(self):
|
||||
mozconfig_json = os.path.join(self.topobjdir, '.mozconfig.json')
|
||||
with FileAvoidWrite(mozconfig_json) as fh:
|
||||
json.dump({
|
||||
to_write = six.ensure_text(json.dumps({
|
||||
'topsrcdir': self.topsrcdir,
|
||||
'topobjdir': self.topobjdir,
|
||||
'mozconfig': self.mozconfig,
|
||||
}, fh, sort_keys=True, indent=2)
|
||||
}, sort_keys=True, indent=2))
|
||||
fh.write(to_write)
|
||||
|
||||
def _run_client_mk(self, target=None, line_handler=None, jobs=0,
|
||||
verbose=None, keep_going=False, append_env=None):
|
||||
|
||||
@@ -14,6 +14,7 @@ import difflib
|
||||
import errno
|
||||
import functools
|
||||
import hashlib
|
||||
import io
|
||||
import itertools
|
||||
import os
|
||||
import pprint
|
||||
@@ -53,6 +54,12 @@ def exec_(object, globals=None, locals=None):
|
||||
exec(object, globals, locals)
|
||||
|
||||
|
||||
def _open(path, mode):
|
||||
if 'b' in mode:
|
||||
return io.open(path, mode)
|
||||
return io.open(path, mode, encoding='utf-8', newline='\n')
|
||||
|
||||
|
||||
def hash_file(path, hasher=None):
|
||||
"""Hashes a file specified by the path given and returns the hex digest."""
|
||||
|
||||
@@ -259,7 +266,7 @@ class FileAvoidWrite(BytesIO):
|
||||
old_content = None
|
||||
|
||||
try:
|
||||
existing = open(self.name, self.mode)
|
||||
existing = _open(self.name, self.mode)
|
||||
existed = True
|
||||
except IOError:
|
||||
pass
|
||||
@@ -280,7 +287,10 @@ class FileAvoidWrite(BytesIO):
|
||||
writemode = 'w'
|
||||
if self._binary_mode:
|
||||
writemode += 'b'
|
||||
with open(self.name, writemode) as file:
|
||||
buf = six.ensure_binary(buf)
|
||||
else:
|
||||
buf = six.ensure_text(buf)
|
||||
with _open(self.name, writemode) as file:
|
||||
file.write(buf)
|
||||
|
||||
self._generate_diff(buf, old_content)
|
||||
|
||||
Reference in New Issue
Block a user