зеркало из https://github.com/electron/electron.git
chore: bump pylint to 2.17 (#41576)
* build: bump pylint to 2.17 Xref: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/5062345 * fix pylint consider-using-f-string warnings pt 1: use flynt for automated fixes * fix pylint consider-using-f-string warnings pt 2: manual fixes * fix pylint consider-using-with warnings * fix pylint line-too-long warnings * fix pylint unspecified-encoding warnings * fix py lint consider-using-generator warning * fixup! fix pylint unspecified-encoding warnings * fix pylint line-too-long warnings
This commit is contained in:
Родитель
00da7279cb
Коммит
61ddb1aa07
|
@ -13,7 +13,7 @@ THREEWAY = "ELECTRON_USE_THREE_WAY_MERGE_FOR_PATCHES" in os.environ
|
|||
def apply_patches(target):
|
||||
repo = target.get('repo')
|
||||
if not os.path.exists(repo):
|
||||
warnings.warn('repo not found: %s' % repo)
|
||||
warnings.warn(f'repo not found: {repo}')
|
||||
return
|
||||
patch_dir = target.get('patch_dir')
|
||||
git.import_patches(
|
||||
|
|
|
@ -13,13 +13,13 @@ def stop():
|
|||
DBusTestCase.stop_dbus(DBusTestCase.session_bus_pid)
|
||||
|
||||
def start():
|
||||
log = sys.stdout if is_verbose_mode() else open(os.devnull, 'w')
|
||||
with sys.stdout if is_verbose_mode() \
|
||||
else open(os.devnull, 'w', encoding='utf-8') as log:
|
||||
DBusTestCase.start_system_bus()
|
||||
DBusTestCase.spawn_server_template('logind', None, log)
|
||||
|
||||
DBusTestCase.start_system_bus()
|
||||
DBusTestCase.spawn_server_template('logind', None, log)
|
||||
|
||||
DBusTestCase.start_session_bus()
|
||||
DBusTestCase.spawn_server_template('notification_daemon', None, log)
|
||||
DBusTestCase.start_session_bus()
|
||||
DBusTestCase.spawn_server_template('notification_daemon', None, log)
|
||||
|
||||
if __name__ == '__main__':
|
||||
start()
|
||||
|
|
|
@ -11,7 +11,7 @@ from lib import git
|
|||
def export_patches(target, dry_run):
|
||||
repo = target.get('repo')
|
||||
if not os.path.exists(repo):
|
||||
warnings.warn('repo not found: %s' % repo)
|
||||
warnings.warn(f'repo not found: {repo}')
|
||||
return
|
||||
git.export_patches(
|
||||
dry_run=dry_run,
|
||||
|
|
|
@ -29,15 +29,15 @@ def run_node_configure(target_cpu):
|
|||
|
||||
def read_node_config_gypi():
|
||||
config_gypi = os.path.join(NODE_DIR, 'config.gypi')
|
||||
with open(config_gypi, 'r') as f:
|
||||
content = f.read()
|
||||
with open(config_gypi, 'r', encoding='utf-8') as file_in:
|
||||
content = file_in.read()
|
||||
return ast.literal_eval(content)
|
||||
|
||||
def read_electron_args():
|
||||
all_gn = os.path.join(ELECTRON_DIR, 'build', 'args', 'all.gn')
|
||||
args = {}
|
||||
with open(all_gn, 'r') as f:
|
||||
for line in f:
|
||||
with open(all_gn, 'r', encoding='utf-8') as file_in:
|
||||
for line in file_in:
|
||||
if line.startswith('#'):
|
||||
continue
|
||||
m = re.match('([\w_]+) = (.+)', line)
|
||||
|
@ -62,8 +62,8 @@ def main(target_file, target_cpu):
|
|||
# Used by certain versions of node-gyp.
|
||||
v['build_v8_with_gn'] = 'false'
|
||||
|
||||
with open(target_file, 'w+') as f:
|
||||
f.write(pprint.pformat(config, indent=2))
|
||||
with open(target_file, 'w+', encoding='utf-8') as file_out:
|
||||
file_out.write(pprint.pformat(config, indent=2))
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1], sys.argv[2]))
|
||||
|
|
|
@ -4,7 +4,7 @@ import zipfile
|
|||
import sys
|
||||
|
||||
def main(zip_path, manifest_out):
|
||||
with open(manifest_out, 'w') as manifest, \
|
||||
with open(manifest_out, 'w', encoding='utf-8') as manifest, \
|
||||
zipfile.ZipFile(zip_path, 'r', allowZip64=True) as z:
|
||||
for name in sorted(z.namelist()):
|
||||
manifest.write(name + '\n')
|
||||
|
|
|
@ -8,7 +8,8 @@ NMV = None
|
|||
if len(sys.argv) > 3:
|
||||
NMV = sys.argv[3]
|
||||
|
||||
with open(node_version_file, 'r') as in_file, open(out_file, 'w') as out_file:
|
||||
with open(node_version_file, 'r', encoding='utf-8') as in_file, \
|
||||
open(out_file, 'w', encoding='utf-8') as out_file:
|
||||
changed = False
|
||||
contents = in_file.read()
|
||||
new_contents = re.sub(
|
||||
|
|
|
@ -43,7 +43,7 @@ def get_zip_name(name, version, suffix=''):
|
|||
arch = get_target_arch()
|
||||
if arch == 'arm':
|
||||
arch += 'v7l'
|
||||
zip_name = '{0}-{1}-{2}-{3}'.format(name, version, get_platform_key(), arch)
|
||||
zip_name = f'{name}-{version}-{get_platform_key()}-{arch}'
|
||||
if suffix:
|
||||
zip_name += '-' + suffix
|
||||
return zip_name + '.zip'
|
||||
|
|
|
@ -72,13 +72,10 @@ def am(repo, patch_data, threeway=False, directory=None, exclude=None,
|
|||
root_args += ['-c', 'user.email=' + committer_email]
|
||||
root_args += ['-c', 'commit.gpgsign=false']
|
||||
command = ['git'] + root_args + ['am'] + args
|
||||
proc = subprocess.Popen(
|
||||
command,
|
||||
stdin=subprocess.PIPE)
|
||||
proc.communicate(patch_data.encode('utf-8'))
|
||||
if proc.returncode != 0:
|
||||
raise RuntimeError("Command {} returned {}".format(command,
|
||||
proc.returncode))
|
||||
with subprocess.Popen(command, stdin=subprocess.PIPE) as proc:
|
||||
proc.communicate(patch_data.encode('utf-8'))
|
||||
if proc.returncode != 0:
|
||||
raise RuntimeError(f"Command {command} returned {proc.returncode}")
|
||||
|
||||
|
||||
def import_patches(repo, ref=UPSTREAM_HEAD, **kwargs):
|
||||
|
@ -229,19 +226,19 @@ def export_patches(repo, out_dir,
|
|||
dry_run=False, grep=None):
|
||||
if not os.path.exists(repo):
|
||||
sys.stderr.write(
|
||||
"Skipping patches in {} because it does not exist.\n".format(repo)
|
||||
f"Skipping patches in {repo} because it does not exist.\n"
|
||||
)
|
||||
return
|
||||
if patch_range is None:
|
||||
patch_range, num_patches = guess_base_commit(repo, ref)
|
||||
sys.stderr.write("Exporting {} patches in {} since {}\n".format(
|
||||
num_patches, repo, patch_range[0:7]))
|
||||
patch_range, n_patches = guess_base_commit(repo, ref)
|
||||
msg = f"Exporting {n_patches} patches in {repo} since {patch_range[0:7]}\n"
|
||||
sys.stderr.write(msg)
|
||||
patch_data = format_patch(repo, patch_range)
|
||||
patches = split_patches(patch_data)
|
||||
if grep:
|
||||
olen = len(patches)
|
||||
patches = filter_patches(patches, grep)
|
||||
sys.stderr.write("Exporting {} of {} patches\n".format(len(patches), olen))
|
||||
sys.stderr.write(f"Exporting {len(patches)} of {olen} patches\n")
|
||||
|
||||
try:
|
||||
os.mkdir(out_dir)
|
||||
|
@ -256,7 +253,8 @@ def export_patches(repo, out_dir,
|
|||
for patch in patches:
|
||||
filename = get_file_name(patch)
|
||||
filepath = posixpath.join(out_dir, filename)
|
||||
existing_patch = str(io.open(filepath, 'rb').read(), 'utf-8')
|
||||
with io.open(filepath, 'rb') as inp:
|
||||
existing_patch = str(inp.read(), 'utf-8')
|
||||
formatted_patch = join_patch(patch)
|
||||
if formatted_patch != existing_patch:
|
||||
bad_patches.append(filename)
|
||||
|
|
|
@ -65,7 +65,7 @@ class Platform:
|
|||
return Platform.WINDOWS
|
||||
|
||||
raise AssertionError(
|
||||
"unexpected current platform '{}'".format(platform))
|
||||
f"unexpected current platform '{platform}'")
|
||||
|
||||
@staticmethod
|
||||
def get_all():
|
||||
|
@ -101,19 +101,19 @@ class TestsList():
|
|||
# First check that all names are present in the config.
|
||||
for binary_name in binaries:
|
||||
if binary_name not in self.tests:
|
||||
raise Exception("binary {0} not found in config '{1}'".format(
|
||||
binary_name, self.config_path))
|
||||
msg = f"binary {binary_name} not found in config '{self.config_path}'"
|
||||
raise Exception(msg)
|
||||
|
||||
# Respect the "platform" setting.
|
||||
for binary_name in binaries:
|
||||
if not self.__platform_supports(binary_name):
|
||||
raise Exception(
|
||||
"binary {0} cannot be run on {1}, check the config".format(
|
||||
binary_name, Platform.get_current()))
|
||||
host = Platform.get_current()
|
||||
errmsg = f"binary {binary_name} cannot run on {host}. Check the config"
|
||||
raise Exception(errmsg)
|
||||
|
||||
suite_returncode = sum(
|
||||
[self.__run(binary, output_dir, verbosity, disabled_tests_policy)
|
||||
for binary in binaries])
|
||||
self.__run(binary, output_dir, verbosity, disabled_tests_policy)
|
||||
for binary in binaries)
|
||||
return suite_returncode
|
||||
|
||||
def run_all(self, output_dir=None, verbosity=Verbosity.CHATTY,
|
||||
|
@ -134,7 +134,7 @@ class TestsList():
|
|||
|
||||
@staticmethod
|
||||
def __get_config_data(config_path):
|
||||
with open(config_path, 'r') as stream:
|
||||
with open(config_path, 'r', encoding='utf-8') as stream:
|
||||
return yaml.load(stream)
|
||||
|
||||
@staticmethod
|
||||
|
@ -146,7 +146,7 @@ class TestsList():
|
|||
if isinstance(value, str):
|
||||
return {value: None}
|
||||
|
||||
raise AssertionError("unexpected shorthand type: {}".format(type(value)))
|
||||
raise AssertionError(f"unexpected shorthand type: {type(value)}")
|
||||
|
||||
@staticmethod
|
||||
def __make_a_list(value):
|
||||
|
@ -166,7 +166,7 @@ class TestsList():
|
|||
return [list_item for key in value for list_item in value[key]]
|
||||
|
||||
raise AssertionError(
|
||||
"unexpected type for list merging: {}".format(type(value)))
|
||||
f"unexpected type for list merging: {type(value)}")
|
||||
|
||||
def __platform_supports(self, binary_name):
|
||||
return Platform.get_current() in self.tests[binary_name]['platforms']
|
||||
|
@ -194,8 +194,7 @@ class TestsList():
|
|||
|
||||
for platform in platforms:
|
||||
assert Platform.is_valid(platform), \
|
||||
"platform '{0}' is not supported, check {1} config" \
|
||||
.format(platform, binary_name)
|
||||
f"Unsupported platform {platform}, check {binary_name} config"
|
||||
|
||||
test_data['platforms'] = platforms
|
||||
|
||||
|
@ -231,7 +230,7 @@ class TestsList():
|
|||
if output_dir is None:
|
||||
return None
|
||||
|
||||
return os.path.join(output_dir, "results_{}.xml".format(binary_name))
|
||||
return os.path.join(output_dir, f"results_{binary_name}.xml")
|
||||
|
||||
|
||||
class TestBinary():
|
||||
|
@ -248,50 +247,40 @@ class TestBinary():
|
|||
gtest_output = TestBinary.__get_gtest_output(output_file_path)
|
||||
|
||||
args = [self.binary_path, gtest_filter, gtest_output]
|
||||
stdout, stderr = TestBinary.__get_stdout_and_stderr(verbosity)
|
||||
|
||||
returncode = 0
|
||||
try:
|
||||
returncode = subprocess.call(args, stdout=stdout, stderr=stderr)
|
||||
except Exception as exception:
|
||||
if Verbosity.ge(verbosity, Verbosity.ERRORS):
|
||||
print("An error occurred while running '{}':".format(self.binary_path),
|
||||
'\n', exception, file=sys.stderr)
|
||||
returncode = 1
|
||||
|
||||
with open(os.devnull, "w", encoding='utf-8') as devnull:
|
||||
stdout = stderr = None
|
||||
if Verbosity.le(verbosity, Verbosity.ERRORS):
|
||||
stdout = devnull
|
||||
if verbosity == Verbosity.SILENT:
|
||||
stderr = devnull
|
||||
|
||||
try:
|
||||
returncode = subprocess.call(args, stdout=stdout, stderr=stderr)
|
||||
except Exception as exception:
|
||||
if Verbosity.ge(verbosity, Verbosity.ERRORS):
|
||||
print(f"An error occurred while running '{self.binary_path}':",
|
||||
'\n', exception, file=sys.stderr)
|
||||
returncode = 1
|
||||
|
||||
return returncode
|
||||
|
||||
@staticmethod
|
||||
def __get_gtest_filter(included_tests, excluded_tests):
|
||||
included_tests_string = TestBinary.__list_tests(included_tests)
|
||||
excluded_tests_string = TestBinary.__list_tests(excluded_tests)
|
||||
|
||||
gtest_filter = "--gtest_filter={}-{}".format(included_tests_string,
|
||||
excluded_tests_string)
|
||||
return gtest_filter
|
||||
included_str = TestBinary.__list_tests(included_tests)
|
||||
excluded_str = TestBinary.__list_tests(excluded_tests)
|
||||
return f"--gtest_filter={included_str}-{excluded_str}"
|
||||
|
||||
@staticmethod
|
||||
def __get_gtest_output(output_file_path):
|
||||
gtest_output = ""
|
||||
if output_file_path is not None:
|
||||
gtest_output = "--gtest_output={0}:{1}".format(TestBinary.output_format,
|
||||
output_file_path)
|
||||
return gtest_output
|
||||
if output_file_path is None:
|
||||
return ""
|
||||
return f"--gtest_output={TestBinary.output_format}:{output_file_path}"
|
||||
|
||||
@staticmethod
|
||||
def __list_tests(tests):
|
||||
if tests is None:
|
||||
return ''
|
||||
return ':'.join(tests)
|
||||
|
||||
@staticmethod
|
||||
def __get_stdout_and_stderr(verbosity):
|
||||
stdout = stderr = None
|
||||
|
||||
if Verbosity.le(verbosity, Verbosity.ERRORS):
|
||||
devnull = open(os.devnull, 'w')
|
||||
stdout = devnull
|
||||
if verbosity == Verbosity.SILENT:
|
||||
stderr = devnull
|
||||
|
||||
return (stdout, stderr)
|
||||
|
|
|
@ -21,8 +21,8 @@ def read_patch(patch_dir, patch_filename):
|
|||
for l in f.readlines():
|
||||
line_has_correct_start = l.startswith('diff -') or l.startswith('---')
|
||||
if not added_patch_location and line_has_correct_start:
|
||||
ret.append('{}{}\n'.format(PATCH_DIR_PREFIX, patch_dir))
|
||||
ret.append('{}{}\n'.format(PATCH_FILENAME_PREFIX, patch_filename))
|
||||
ret.append(f'{PATCH_DIR_PREFIX}{patch_dir}\n')
|
||||
ret.append(f'{PATCH_FILENAME_PREFIX}{patch_filename}\n')
|
||||
added_patch_location = True
|
||||
ret.append(l)
|
||||
return ''.join(ret)
|
||||
|
@ -31,8 +31,8 @@ def read_patch(patch_dir, patch_filename):
|
|||
def patch_from_dir(patch_dir):
|
||||
"""Read a directory of patches into a format suitable for passing to
|
||||
'git am'"""
|
||||
with open(os.path.join(patch_dir, ".patches")) as f:
|
||||
patch_list = [l.rstrip('\n') for l in f.readlines()]
|
||||
with open(os.path.join(patch_dir, ".patches"), encoding='utf-8') as file_in:
|
||||
patch_list = [line.rstrip('\n') for line in file_in.readlines()]
|
||||
|
||||
return ''.join([
|
||||
read_patch(patch_dir, patch_filename)
|
||||
|
|
|
@ -35,9 +35,8 @@ def scoped_cwd(path):
|
|||
|
||||
def download(text, url, path):
|
||||
safe_mkdir(os.path.dirname(path))
|
||||
with open(path, 'wb') as local_file:
|
||||
print("Downloading %s to %s" % (url, path))
|
||||
web_file = urlopen(url)
|
||||
with open(path, 'wb') as local_file, urlopen(url) as web_file:
|
||||
print(f"Downloading {url} to {path}")
|
||||
info = web_file.info()
|
||||
if hasattr(info, 'getheader'):
|
||||
file_size = int(info.getheaders("Content-Length")[0])
|
||||
|
@ -58,11 +57,11 @@ def download(text, url, path):
|
|||
|
||||
if not ci:
|
||||
percent = downloaded_size * 100. / file_size
|
||||
status = "\r%s %10d [%3.1f%%]" % (text, downloaded_size, percent)
|
||||
status = f"\r{text} {downloaded_size:10d} [{percent:3.1f}%]"
|
||||
print(status, end=' ')
|
||||
|
||||
if ci:
|
||||
print("%s done." % (text))
|
||||
print(f"{text} done.")
|
||||
else:
|
||||
print()
|
||||
return path
|
||||
|
@ -74,15 +73,16 @@ def make_zip(zip_file_path, files, dirs):
|
|||
allfiles = files + dirs
|
||||
execute(['zip', '-r', '-y', zip_file_path] + allfiles)
|
||||
else:
|
||||
zip_file = zipfile.ZipFile(zip_file_path, "w", zipfile.ZIP_DEFLATED,
|
||||
allowZip64=True)
|
||||
for filename in files:
|
||||
zip_file.write(filename, filename)
|
||||
for dirname in dirs:
|
||||
for root, _, filenames in os.walk(dirname):
|
||||
for f in filenames:
|
||||
zip_file.write(os.path.join(root, f))
|
||||
zip_file.close()
|
||||
with zipfile.ZipFile(zip_file_path, "w",
|
||||
zipfile.ZIP_DEFLATED,
|
||||
allowZip64=True) as zip_file:
|
||||
for filename in files:
|
||||
zip_file.write(filename, filename)
|
||||
for dirname in dirs:
|
||||
for root, _, filenames in os.walk(dirname):
|
||||
for f in filenames:
|
||||
zip_file.write(os.path.join(root, f))
|
||||
zip_file.close()
|
||||
|
||||
|
||||
def rm_rf(path):
|
||||
|
@ -128,8 +128,8 @@ def get_electron_branding():
|
|||
SOURCE_ROOT = os.path.abspath(os.path.join(__file__, '..', '..', '..'))
|
||||
branding_file_path = os.path.join(
|
||||
SOURCE_ROOT, 'shell', 'app', 'BRANDING.json')
|
||||
with open(branding_file_path) as f:
|
||||
return json.load(f)
|
||||
with open(branding_file_path, encoding='utf-8') as file_in:
|
||||
return json.load(file_in)
|
||||
|
||||
|
||||
cached_electron_version = None
|
||||
|
@ -173,14 +173,14 @@ def get_electron_exec():
|
|||
out_dir = get_out_dir()
|
||||
|
||||
if sys.platform == 'darwin':
|
||||
return '{0}/Electron.app/Contents/MacOS/Electron'.format(out_dir)
|
||||
return f'{out_dir}/Electron.app/Contents/MacOS/Electron'
|
||||
if sys.platform == 'win32':
|
||||
return '{0}/electron.exe'.format(out_dir)
|
||||
return f'{out_dir}/electron.exe'
|
||||
if sys.platform == 'linux':
|
||||
return '{0}/electron'.format(out_dir)
|
||||
return f'{out_dir}/electron'
|
||||
|
||||
raise Exception(
|
||||
"get_electron_exec: unexpected platform '{0}'".format(sys.platform))
|
||||
f"get_electron_exec: unexpected platform '{sys.platform}'")
|
||||
|
||||
def get_buildtools_executable(name):
|
||||
buildtools = os.path.realpath(os.path.join(ELECTRON_DIR, '..', 'buildtools'))
|
||||
|
|
|
@ -120,10 +120,10 @@ const LINTERS = [{
|
|||
roots: ['script'],
|
||||
test: filename => filename.endsWith('.py'),
|
||||
run: (opts, filenames) => {
|
||||
const rcfile = path.join(DEPOT_TOOLS, 'pylintrc');
|
||||
const rcfile = path.join(DEPOT_TOOLS, 'pylintrc-2.17');
|
||||
const args = ['--rcfile=' + rcfile, ...filenames];
|
||||
const env = { PYTHONPATH: path.join(ELECTRON_ROOT, 'script'), ...process.env };
|
||||
spawnAndCheckExitCode('pylint-2.7', args, { env });
|
||||
spawnAndCheckExitCode('pylint-2.17', args, { env });
|
||||
}
|
||||
}, {
|
||||
key: 'javascript',
|
||||
|
|
|
@ -65,19 +65,19 @@ def parse_args():
|
|||
# 'config' must exist and be a file.
|
||||
args.config = os.path.abspath(args.config)
|
||||
if not os.path.isfile(args.config):
|
||||
parser.error("file '{}' doesn't exist".format(args.config))
|
||||
parser.error(f"file '{args.config}' doesn't exist")
|
||||
|
||||
# 'tests_dir' must exist and be a directory.
|
||||
if args.tests_dir is not None:
|
||||
args.tests_dir = os.path.abspath(args.tests_dir)
|
||||
if not os.path.isdir(args.tests_dir):
|
||||
parser.error("directory '{}' doesn't exist".format(args.tests_dir))
|
||||
parser.error(f"directory '{args.tests_dir}' doesn't exist")
|
||||
|
||||
# 'output_dir' must exist and be a directory.
|
||||
if args.output_dir is not None:
|
||||
args.output_dir = os.path.abspath(args.output_dir)
|
||||
if not os.path.isdir(args.output_dir):
|
||||
parser.error("directory '{}' doesn't exist".format(args.output_dir))
|
||||
parser.error(f"directory '{args.output_dir}' doesn't exist")
|
||||
|
||||
return args
|
||||
|
||||
|
@ -99,7 +99,7 @@ def main():
|
|||
return tests_list.run_all(args.output_dir, args.verbosity,
|
||||
args.disabled_tests_policy)
|
||||
|
||||
raise AssertionError("unexpected command '{}'".format(args.command))
|
||||
raise AssertionError(f"unexpected command '{args.command}'")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -77,8 +77,8 @@ def set_mtimes(patches_config, mtime):
|
|||
|
||||
mtime_cache[file_path] = mtime
|
||||
|
||||
for file_path in mtime_cache:
|
||||
os.utime(file_path, (mtime_cache[file_path], mtime_cache[file_path]))
|
||||
for file_path, file_mtime in mtime_cache.items():
|
||||
os.utime(file_path, (file_mtime, file_mtime))
|
||||
|
||||
|
||||
def main():
|
||||
|
@ -131,17 +131,17 @@ def main():
|
|||
if args.operation == "generate":
|
||||
try:
|
||||
# Cache file may exist from a previously aborted sync. Reuse it.
|
||||
with open(args.cache_file, mode="r") as f:
|
||||
json.load(f) # Make sure it's not an empty file
|
||||
with open(args.cache_file, mode='r', encoding='utf-8') as fin:
|
||||
json.load(fin) # Make sure it's not an empty file
|
||||
print("Using existing mtime cache for patches")
|
||||
return 0
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
with open(args.cache_file, mode="w") as f:
|
||||
with open(args.cache_file, mode="w", encoding='utf-8') as fin:
|
||||
mtime_cache = generate_cache(json.load(args.patches_config))
|
||||
json.dump(mtime_cache, f, indent=2)
|
||||
json.dump(mtime_cache, fin, indent=2)
|
||||
except Exception:
|
||||
print(
|
||||
"ERROR: failed to generate mtime cache for patches",
|
||||
|
@ -155,8 +155,8 @@ def main():
|
|||
return 0 # Cache file may not exist, fail more gracefully
|
||||
|
||||
try:
|
||||
with open(args.cache_file, mode="r") as f:
|
||||
apply_mtimes(json.load(f))
|
||||
with open(args.cache_file, mode='r', encoding='utf-8') as file_in:
|
||||
apply_mtimes(json.load(file_in))
|
||||
|
||||
if not args.preserve_cache:
|
||||
os.remove(args.cache_file)
|
||||
|
|
|
@ -32,9 +32,8 @@ def get_content(retry_count = 5):
|
|||
headers={"Authorization" : authToken}
|
||||
)
|
||||
|
||||
proposed_content = urlopen(
|
||||
request
|
||||
).read()
|
||||
with urlopen(request) as resp:
|
||||
proposed_content = resp.read()
|
||||
|
||||
if is_json(proposed_content):
|
||||
return proposed_content
|
||||
|
|
|
@ -29,7 +29,7 @@ def main():
|
|||
]
|
||||
|
||||
if args.target_dir is None:
|
||||
store_artifact(directory, 'headers/dist/{0}'.format(args.version),
|
||||
store_artifact(directory, f'headers/dist/{args.version}',
|
||||
checksums)
|
||||
else:
|
||||
copy_files(checksums, args.target_dir)
|
||||
|
@ -51,10 +51,10 @@ def parse_args():
|
|||
|
||||
def get_files_list(version):
|
||||
return [
|
||||
{ "filename": 'node-{0}.tar.gz'.format(version), "required": True },
|
||||
{ "filename": 'node-{0}-headers.tar.gz'.format(version), "required": True },
|
||||
{ "filename": 'iojs-{0}.tar.gz'.format(version), "required": True },
|
||||
{ "filename": 'iojs-{0}-headers.tar.gz'.format(version), "required": True },
|
||||
{ "filename": f'node-{version}.tar.gz', "required": True },
|
||||
{ "filename": f'node-{version}-headers.tar.gz', "required": True },
|
||||
{ "filename": f'iojs-{version}.tar.gz', "required": True },
|
||||
{ "filename": f'iojs-{version}-headers.tar.gz', "required": True },
|
||||
{ "filename": 'node.lib', "required": False },
|
||||
{ "filename": 'x64/node.lib', "required": False },
|
||||
{ "filename": 'win-x86/iojs.lib', "required": False },
|
||||
|
@ -91,8 +91,8 @@ def create_checksum(algorithm, directory, filename, files):
|
|||
lines.append(h.hexdigest() + ' ' + os.path.relpath(path, directory))
|
||||
|
||||
checksum_file = os.path.join(directory, filename)
|
||||
with open(checksum_file, 'w') as f:
|
||||
f.write('\n'.join(lines) + '\n')
|
||||
with open(checksum_file, 'w', encoding='utf-8') as fout:
|
||||
fout.write('\n'.join(lines) + '\n')
|
||||
return checksum_file
|
||||
|
||||
def copy_files(source_files, output_dir):
|
||||
|
|
|
@ -45,9 +45,9 @@ def upload_node(version):
|
|||
versioned_header_tar = header_tar.format(version)
|
||||
shutil.copy2(generated_tar, os.path.join(GEN_DIR, versioned_header_tar))
|
||||
|
||||
store_artifact(GEN_DIR, 'headers/dist/{0}'.format(version),
|
||||
store_artifact(GEN_DIR, f'headers/dist/{version}',
|
||||
glob.glob('node-*.tar.gz'))
|
||||
store_artifact(GEN_DIR, 'headers/dist/{0}'.format(version),
|
||||
store_artifact(GEN_DIR, f'headers/dist/{version}',
|
||||
glob.glob('iojs-*.tar.gz'))
|
||||
|
||||
if PLATFORM == 'win32':
|
||||
|
@ -73,13 +73,13 @@ def upload_node(version):
|
|||
shutil.copy2(electron_lib, v4_node_lib)
|
||||
|
||||
# Upload the node.lib.
|
||||
store_artifact(DIST_DIR, 'headers/dist/{0}'.format(version), [node_lib])
|
||||
store_artifact(DIST_DIR, f'headers/dist/{version}', [node_lib])
|
||||
|
||||
# Upload the iojs.lib.
|
||||
store_artifact(DIST_DIR, 'headers/dist/{0}'.format(version), [iojs_lib])
|
||||
store_artifact(DIST_DIR, f'headers/dist/{version}', [iojs_lib])
|
||||
|
||||
# Upload the v4 node.lib.
|
||||
store_artifact(DIST_DIR, 'headers/dist/{0}'.format(version),
|
||||
store_artifact(DIST_DIR, f'headers/dist/{version}',
|
||||
[v4_node_lib])
|
||||
|
||||
|
||||
|
|
|
@ -26,7 +26,7 @@ PRODUCT_NAME = get_electron_branding()['product_name']
|
|||
SYMBOLS_DIR = os.path.join(RELEASE_DIR, 'breakpad_symbols')
|
||||
|
||||
PDB_LIST = [
|
||||
os.path.join(RELEASE_DIR, '{0}.exe.pdb'.format(PROJECT_NAME))
|
||||
os.path.join(RELEASE_DIR, f'{PROJECT_NAME}.exe.pdb')
|
||||
]
|
||||
|
||||
PDB_LIST += glob.glob(os.path.join(RELEASE_DIR, '*.dll.pdb'))
|
||||
|
|
|
@ -53,9 +53,8 @@ def main():
|
|||
|
||||
build_version = get_electron_build_version()
|
||||
if not ELECTRON_VERSION.startswith(build_version):
|
||||
error = 'Tag name ({0}) should match build version ({1})\n'.format(
|
||||
ELECTRON_VERSION, build_version)
|
||||
sys.stderr.write(error)
|
||||
errmsg = f"Tag ({ELECTRON_VERSION}) should match build ({build_version})\n"
|
||||
sys.stderr.write(errmsg)
|
||||
sys.stderr.flush()
|
||||
return 1
|
||||
|
||||
|
@ -344,8 +343,7 @@ def upload_electron(release, file_path, args):
|
|||
# if upload_to_storage is set, skip github upload.
|
||||
# todo (vertedinde): migrate this variable to upload_to_storage
|
||||
if args.upload_to_storage:
|
||||
key_prefix = 'release-builds/{0}_{1}'.format(args.version,
|
||||
args.upload_timestamp)
|
||||
key_prefix = f'release-builds/{args.version}_{args.upload_timestamp}'
|
||||
store_artifact(os.path.dirname(file_path), key_prefix, [file_path])
|
||||
upload_sha256_checksum(args.version, file_path, key_prefix)
|
||||
return
|
||||
|
@ -358,30 +356,30 @@ def upload_electron(release, file_path, args):
|
|||
|
||||
|
||||
def upload_io_to_github(release, filename, filepath, version):
|
||||
print('Uploading %s to GitHub' % \
|
||||
(filename))
|
||||
print(f'Uploading {filename} to GitHub')
|
||||
script_path = os.path.join(
|
||||
ELECTRON_DIR, 'script', 'release', 'uploaders', 'upload-to-github.ts')
|
||||
upload_process = subprocess.Popen([TS_NODE, script_path, filepath, filename,
|
||||
str(release['id']), version], stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT)
|
||||
if is_verbose_mode():
|
||||
for c in iter(lambda: upload_process.stdout.read(1), b""):
|
||||
sys.stdout.buffer.write(c)
|
||||
sys.stdout.flush()
|
||||
with subprocess.Popen([TS_NODE, script_path, filepath,
|
||||
filename, str(release['id']), version],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT) as upload_process:
|
||||
if is_verbose_mode():
|
||||
for c in iter(lambda: upload_process.stdout.read(1), b""):
|
||||
sys.stdout.buffer.write(c)
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
def upload_sha256_checksum(version, file_path, key_prefix=None):
|
||||
checksum_path = '{}.sha256sum'.format(file_path)
|
||||
checksum_path = f'{file_path}.sha256sum'
|
||||
if key_prefix is None:
|
||||
key_prefix = 'checksums-scratchpad/{0}'.format(version)
|
||||
key_prefix = f'checksums-scratchpad/{version}'
|
||||
sha256 = hashlib.sha256()
|
||||
with open(file_path, 'rb') as f:
|
||||
sha256.update(f.read())
|
||||
|
||||
filename = os.path.basename(file_path)
|
||||
with open(checksum_path, 'w') as checksum:
|
||||
checksum.write('{} *{}'.format(sha256.hexdigest(), filename))
|
||||
with open(checksum_path, 'w', encoding='utf-8') as checksum:
|
||||
checksum.write(f'{sha256.hexdigest()} *{filename}')
|
||||
store_artifact(os.path.dirname(checksum_path), key_prefix, [checksum_path])
|
||||
|
||||
|
||||
|
@ -394,7 +392,7 @@ def get_release(version):
|
|||
release_env['NODE_NO_WARNINGS'] = '1'
|
||||
release_info = execute(['node', script_path, version], release_env)
|
||||
if is_verbose_mode():
|
||||
print('Release info for version: {}:\n'.format(version))
|
||||
print(f'Release info for version: {version}:\n')
|
||||
print(release_info)
|
||||
release = json.loads(release_info)
|
||||
return release
|
||||
|
|
|
@ -85,8 +85,8 @@ def make_diff(diff_file, original, reformatted):
|
|||
difflib.unified_diff(
|
||||
original,
|
||||
reformatted,
|
||||
fromfile='a/{}'.format(diff_file),
|
||||
tofile='b/{}'.format(diff_file),
|
||||
fromfile=f'a/{diff_file}',
|
||||
tofile=f'b/{diff_file}',
|
||||
n=3))
|
||||
|
||||
|
||||
|
@ -111,8 +111,7 @@ def run_clang_format_diff_wrapper(args, file_name):
|
|||
raise
|
||||
except Exception as e:
|
||||
# pylint: disable=W0707
|
||||
raise UnexpectedError('{}: {}: {}'.format(
|
||||
file_name, e.__class__.__name__, e), e)
|
||||
raise UnexpectedError(f'{file_name}: {e.__class__.__name__}: {e}', e)
|
||||
|
||||
|
||||
def run_clang_format_diff(args, file_name):
|
||||
|
@ -131,25 +130,22 @@ def run_clang_format_diff(args, file_name):
|
|||
print(" ".join(invocation))
|
||||
return [], []
|
||||
try:
|
||||
proc = subprocess.Popen(
|
||||
' '.join(invocation),
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
universal_newlines=True,
|
||||
shell=True)
|
||||
with subprocess.Popen(' '.join(invocation),
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
universal_newlines=True,
|
||||
shell=True) as proc:
|
||||
outs = list(proc.stdout.readlines())
|
||||
errs = list(proc.stderr.readlines())
|
||||
proc.wait()
|
||||
if proc.returncode:
|
||||
code = proc.returncode
|
||||
msg = f"clang-format exited with code {code}: '{file_name}'"
|
||||
raise DiffError(msg, errs)
|
||||
except OSError as exc:
|
||||
# pylint: disable=W0707
|
||||
raise DiffError(
|
||||
"Command '{}' failed to start: {}".format(
|
||||
subprocess.list2cmdline(invocation), exc
|
||||
)
|
||||
)
|
||||
outs = list(proc.stdout.readlines())
|
||||
errs = list(proc.stderr.readlines())
|
||||
proc.wait()
|
||||
if proc.returncode:
|
||||
raise DiffError("clang-format exited with status {}: '{}'".format(
|
||||
proc.returncode, file_name), errs)
|
||||
# pylint: disable=raise-missing-from
|
||||
cmd = subprocess.list2cmdline(invocation)
|
||||
raise DiffError(f"Command '{cmd}' failed to start: {exc}")
|
||||
if args.fix:
|
||||
return None, errs
|
||||
if sys.platform == 'win32':
|
||||
|
@ -200,7 +196,7 @@ def print_trouble(prog, message, use_colors):
|
|||
error_text = 'error:'
|
||||
if use_colors:
|
||||
error_text = bold_red(error_text)
|
||||
print("{}: {} {}".format(prog, error_text, message), file=sys.stderr)
|
||||
print(f"{prog}: {error_text} {message}", file=sys.stderr)
|
||||
|
||||
|
||||
def main():
|
||||
|
@ -212,8 +208,8 @@ def main():
|
|||
default=get_buildtools_executable('clang-format'))
|
||||
parser.add_argument(
|
||||
'--extensions',
|
||||
help='comma separated list of file extensions (default: {})'.format(
|
||||
DEFAULT_EXTENSIONS),
|
||||
help='comma-separated list of file extensions'
|
||||
f' (default: {DEFAULT_EXTENSIONS})',
|
||||
default=DEFAULT_EXTENSIONS)
|
||||
parser.add_argument(
|
||||
'--fix',
|
||||
|
@ -290,18 +286,18 @@ def main():
|
|||
|
||||
parse_files = []
|
||||
if args.changed:
|
||||
stdout = subprocess.Popen(
|
||||
with subprocess.Popen(
|
||||
"git diff --name-only --cached",
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
shell=True,
|
||||
universal_newlines=True
|
||||
).communicate()[0].split("\n")
|
||||
for line in stdout:
|
||||
file_name = line.rstrip()
|
||||
# don't check deleted files
|
||||
if os.path.isfile(file_name):
|
||||
parse_files.append(file_name)
|
||||
) as child:
|
||||
for line in child.communicate()[0].split("\n"):
|
||||
file_name = line.rstrip()
|
||||
# don't check deleted files
|
||||
if os.path.isfile(file_name):
|
||||
parse_files.append(file_name)
|
||||
|
||||
else:
|
||||
parse_files = args.files
|
||||
|
@ -324,6 +320,7 @@ def main():
|
|||
njobs = min(len(files), njobs)
|
||||
|
||||
if not args.fix:
|
||||
# pylint: disable=consider-using-with
|
||||
patch_file = tempfile.NamedTemporaryFile(delete=False,
|
||||
prefix='electron-format-')
|
||||
|
||||
|
@ -333,6 +330,7 @@ def main():
|
|||
it = (run_clang_format_diff_wrapper(args, file) for file in files)
|
||||
pool = None
|
||||
else:
|
||||
# pylint: disable=consider-using-with
|
||||
pool = multiprocessing.Pool(njobs)
|
||||
it = pool.imap_unordered(
|
||||
partial(run_clang_format_diff_wrapper, args), files)
|
||||
|
@ -373,8 +371,11 @@ def main():
|
|||
patch_file.close()
|
||||
os.unlink(patch_file.name)
|
||||
else:
|
||||
print("\nTo patch these files, run:\n$ git apply {}\n"
|
||||
.format(patch_file.name))
|
||||
print(
|
||||
'To patch these files, run:',
|
||||
f"$ git apply {patch_file.name}", sep='\n')
|
||||
filename=patch_file.name
|
||||
print(f"\nTo patch these files, run:\n$ git apply {filename}\n")
|
||||
|
||||
return retcode
|
||||
|
||||
|
|
|
@ -7,6 +7,6 @@ target = sys.argv[2]
|
|||
|
||||
os.chdir(os.path.dirname(source))
|
||||
|
||||
tarball = tarfile.open(name=os.path.basename(target), mode='w:gz')
|
||||
tarball.add(os.path.relpath(source))
|
||||
tarball.close()
|
||||
with tarfile.open(name=os.path.basename(target), mode='w:gz') as tarball:
|
||||
tarball.add(os.path.relpath(source))
|
||||
tarball.close()
|
||||
|
|
|
@ -21,14 +21,15 @@ def main():
|
|||
|
||||
chromedriver_path = os.path.join(
|
||||
args.source_root, args.build_dir, chromedriver_name[sys.platform])
|
||||
proc = subprocess.Popen([chromedriver_path],
|
||||
stdout=subprocess.PIPE, universal_newlines=True)
|
||||
try:
|
||||
output = proc.stdout.readline()
|
||||
except KeyboardInterrupt:
|
||||
returncode = 0
|
||||
finally:
|
||||
proc.terminate()
|
||||
with subprocess.Popen([chromedriver_path],
|
||||
stdout=subprocess.PIPE,
|
||||
universal_newlines=True) as proc:
|
||||
try:
|
||||
output = proc.stdout.readline()
|
||||
except KeyboardInterrupt:
|
||||
returncode = 0
|
||||
finally:
|
||||
proc.terminate()
|
||||
|
||||
returncode = 0
|
||||
match = re.search(
|
||||
|
|
|
@ -24,10 +24,10 @@ def main():
|
|||
electron = os.path.join(app_path, 'Contents', 'MacOS', PRODUCT_NAME)
|
||||
ffmpeg_name = 'libffmpeg.dylib'
|
||||
ffmpeg_app_path = os.path.join(app_path, 'Contents', 'Frameworks',
|
||||
'{0} Framework.framework'.format(PRODUCT_NAME),
|
||||
f'{PRODUCT_NAME} Framework.framework',
|
||||
'Libraries')
|
||||
elif sys.platform == 'win32':
|
||||
electron = os.path.join(app_path, '{0}.exe'.format(PROJECT_NAME))
|
||||
electron = os.path.join(app_path, f'{PROJECT_NAME}.exe')
|
||||
ffmpeg_app_path = app_path
|
||||
ffmpeg_name = 'ffmpeg.dll'
|
||||
else:
|
||||
|
@ -69,7 +69,7 @@ def create_app_copy(initial_app_path):
|
|||
+ '-no-proprietary-codecs')
|
||||
|
||||
if sys.platform == 'darwin':
|
||||
app_name = '{0}.app'.format(PRODUCT_NAME)
|
||||
app_name = f'{PRODUCT_NAME}.app'
|
||||
initial_app_path = os.path.join(initial_app_path, app_name)
|
||||
app_path = os.path.join(app_path, app_name)
|
||||
|
||||
|
|
|
@ -26,8 +26,9 @@ def main():
|
|||
try:
|
||||
with scoped_cwd(app_path):
|
||||
if args.snapshot_files_dir is None:
|
||||
with open(os.path.join(app_path, 'mksnapshot_args')) as f:
|
||||
mkargs = f.read().splitlines()
|
||||
snapshot_filename = os.path.join(app_path, 'mksnapshot_args')
|
||||
with open(snapshot_filename, encoding='utf-8') as file_in:
|
||||
mkargs = file_in.read().splitlines()
|
||||
print('running: ' + ' '.join(mkargs + [ SNAPSHOT_SOURCE ]))
|
||||
subprocess.check_call(mkargs + [ SNAPSHOT_SOURCE ], cwd=app_path)
|
||||
print('ok mksnapshot successfully created snapshot_blob.bin.')
|
||||
|
@ -41,7 +42,7 @@ def main():
|
|||
gen_binary = get_binary_path('v8_context_snapshot_generator', \
|
||||
app_path)
|
||||
genargs = [ gen_binary, \
|
||||
'--output_file={0}'.format(context_snapshot_path) ]
|
||||
f'--output_file={context_snapshot_path}' ]
|
||||
print('running: ' + ' '.join(genargs))
|
||||
subprocess.check_call(genargs)
|
||||
print('ok v8_context_snapshot_generator successfully created ' \
|
||||
|
@ -59,15 +60,15 @@ def main():
|
|||
|
||||
if sys.platform == 'darwin':
|
||||
bin_files = glob.glob(os.path.join(app_path, '*.bin'))
|
||||
app_dir = os.path.join(app_path, '{0}.app'.format(PRODUCT_NAME))
|
||||
app_dir = os.path.join(app_path, f'{PRODUCT_NAME}.app')
|
||||
electron = os.path.join(app_dir, 'Contents', 'MacOS', PRODUCT_NAME)
|
||||
bin_out_path = os.path.join(app_dir, 'Contents', 'Frameworks',
|
||||
'{0} Framework.framework'.format(PROJECT_NAME),
|
||||
f'{PROJECT_NAME} Framework.framework',
|
||||
'Resources')
|
||||
for bin_file in bin_files:
|
||||
shutil.copy2(bin_file, bin_out_path)
|
||||
elif sys.platform == 'win32':
|
||||
electron = os.path.join(app_path, '{0}.exe'.format(PROJECT_NAME))
|
||||
electron = os.path.join(app_path, f'{PROJECT_NAME}.exe')
|
||||
else:
|
||||
electron = os.path.join(app_path, PROJECT_NAME)
|
||||
|
||||
|
@ -81,7 +82,7 @@ def main():
|
|||
except KeyboardInterrupt:
|
||||
print('Other error')
|
||||
returncode = 0
|
||||
print('Returning with error code: {0}'.format(returncode))
|
||||
print(f'Returning with error code: {returncode}')
|
||||
return returncode
|
||||
|
||||
|
||||
|
@ -98,7 +99,7 @@ def create_app_copy(initial_app_path):
|
|||
|
||||
def get_binary_path(binary_name, root_path):
|
||||
if sys.platform == 'win32':
|
||||
binary_path = os.path.join(root_path, '{0}.exe'.format(binary_name))
|
||||
binary_path = os.path.join(root_path, f'{binary_name}.exe')
|
||||
else:
|
||||
binary_path = os.path.join(root_path, binary_name)
|
||||
return binary_path
|
||||
|
|
|
@ -4,7 +4,7 @@ import zipfile
|
|||
import sys
|
||||
|
||||
def main(zip_path, manifest_in):
|
||||
with open(manifest_in, 'r') as manifest, \
|
||||
with open(manifest_in, 'r', encoding='utf-8') as manifest, \
|
||||
zipfile.ZipFile(zip_path, 'r', allowZip64=True) as z:
|
||||
files_in_zip = set(z.namelist())
|
||||
files_in_manifest = {l.strip() for l in manifest.readlines()}
|
||||
|
|
Загрузка…
Ссылка в новой задаче