зеркало из https://github.com/electron/electron.git
chore: fix linter errors (#25996)
This commit is contained in:
Родитель
05b5c197ae
Коммит
c27e5fdbb6
|
@ -12,7 +12,8 @@ def read_patch(patch_dir, patch_filename):
|
||||||
patch_path = os.path.join(patch_dir, patch_filename)
|
patch_path = os.path.join(patch_dir, patch_filename)
|
||||||
with codecs.open(patch_path, encoding='utf-8') as f:
|
with codecs.open(patch_path, encoding='utf-8') as f:
|
||||||
for l in f.readlines():
|
for l in f.readlines():
|
||||||
if not added_filename_line and (l.startswith('diff -') or l.startswith('---')):
|
line_has_correct_start = l.startswith('diff -') or l.startswith('---')
|
||||||
|
if not added_filename_line and line_has_correct_start:
|
||||||
ret.append('Patch-Filename: {}\n'.format(patch_filename))
|
ret.append('Patch-Filename: {}\n'.format(patch_filename))
|
||||||
added_filename_line = True
|
added_filename_line = True
|
||||||
ret.append(l)
|
ret.append(l)
|
||||||
|
|
|
@ -46,7 +46,9 @@ def main():
|
||||||
|
|
||||||
for symbol_file in files:
|
for symbol_file in files:
|
||||||
print("Generating Sentry src bundle for: " + symbol_file)
|
print("Generating Sentry src bundle for: " + symbol_file)
|
||||||
subprocess.check_output([NPX_CMD, '@sentry/cli@1.51.1', 'difutil', 'bundle-sources', symbol_file])
|
subprocess.check_output([
|
||||||
|
NPX_CMD, '@sentry/cli@1.51.1', 'difutil', 'bundle-sources',
|
||||||
|
symbol_file])
|
||||||
|
|
||||||
files += glob.glob(SYMBOLS_DIR + '/*/*/*.src.zip')
|
files += glob.glob(SYMBOLS_DIR + '/*/*/*.src.zip')
|
||||||
|
|
||||||
|
|
|
@ -36,7 +36,8 @@ SYMBOLS_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'symbols')
|
||||||
DSYM_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'dsym')
|
DSYM_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'dsym')
|
||||||
PDB_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'pdb')
|
PDB_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'pdb')
|
||||||
DEBUG_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'debug')
|
DEBUG_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'debug')
|
||||||
TOOLCHAIN_PROFILE_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'toolchain-profile')
|
TOOLCHAIN_PROFILE_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION,
|
||||||
|
'toolchain-profile')
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
@ -59,7 +60,8 @@ def main():
|
||||||
tag_exists = True
|
tag_exists = True
|
||||||
|
|
||||||
if not args.upload_to_s3:
|
if not args.upload_to_s3:
|
||||||
assert release['exists'], 'Release does not exist; cannot upload to GitHub!'
|
assert release['exists'], \
|
||||||
|
'Release does not exist; cannot upload to GitHub!'
|
||||||
assert tag_exists == args.overwrite, \
|
assert tag_exists == args.overwrite, \
|
||||||
'You have to pass --overwrite to overwrite a published release'
|
'You have to pass --overwrite to overwrite a published release'
|
||||||
|
|
||||||
|
@ -114,7 +116,8 @@ def main():
|
||||||
|
|
||||||
if PLATFORM == 'linux' and get_target_arch() == 'x64':
|
if PLATFORM == 'linux' and get_target_arch() == 'x64':
|
||||||
# Upload the hunspell dictionaries only from the linux x64 build
|
# Upload the hunspell dictionaries only from the linux x64 build
|
||||||
hunspell_dictionaries_zip = os.path.join(OUT_DIR, 'hunspell_dictionaries.zip')
|
hunspell_dictionaries_zip = os.path.join(
|
||||||
|
OUT_DIR, 'hunspell_dictionaries.zip')
|
||||||
upload_electron(release, hunspell_dictionaries_zip, args)
|
upload_electron(release, hunspell_dictionaries_zip, args)
|
||||||
|
|
||||||
if not tag_exists and not args.upload_to_s3:
|
if not tag_exists and not args.upload_to_s3:
|
||||||
|
@ -126,7 +129,9 @@ def main():
|
||||||
if PLATFORM == 'win32':
|
if PLATFORM == 'win32':
|
||||||
toolchain_profile_zip = os.path.join(OUT_DIR, TOOLCHAIN_PROFILE_NAME)
|
toolchain_profile_zip = os.path.join(OUT_DIR, TOOLCHAIN_PROFILE_NAME)
|
||||||
with ZipFile(toolchain_profile_zip, 'w') as myzip:
|
with ZipFile(toolchain_profile_zip, 'w') as myzip:
|
||||||
myzip.write(os.path.join(OUT_DIR, 'windows_toolchain_profile.json'), 'toolchain_profile.json')
|
myzip.write(
|
||||||
|
os.path.join(OUT_DIR, 'windows_toolchain_profile.json'),
|
||||||
|
'toolchain_profile.json')
|
||||||
upload_electron(release, toolchain_profile_zip, args)
|
upload_electron(release, toolchain_profile_zip, args)
|
||||||
|
|
||||||
|
|
||||||
|
@ -170,12 +175,10 @@ class NonZipFileError(ValueError):
|
||||||
def zero_zip_date_time(fname):
|
def zero_zip_date_time(fname):
|
||||||
""" Wrap strip-zip zero_zip_date_time within a file opening operation """
|
""" Wrap strip-zip zero_zip_date_time within a file opening operation """
|
||||||
try:
|
try:
|
||||||
zip = open(fname, 'r+b')
|
with open(fname, 'r+b') as f:
|
||||||
_zero_zip_date_time(zip)
|
_zero_zip_date_time(f)
|
||||||
except:
|
except:
|
||||||
raise NonZipFileError(fname)
|
raise NonZipFileError(fname)
|
||||||
finally:
|
|
||||||
zip.close()
|
|
||||||
|
|
||||||
|
|
||||||
def _zero_zip_date_time(zip_):
|
def _zero_zip_date_time(zip_):
|
||||||
|
@ -208,7 +211,7 @@ def _zero_zip_date_time(zip_):
|
||||||
_, header_length = values
|
_, header_length = values
|
||||||
extra_struct = Struct("<HH" + "B" * header_length)
|
extra_struct = Struct("<HH" + "B" * header_length)
|
||||||
values = list(extra_struct.unpack_from(mm, offset))
|
values = list(extra_struct.unpack_from(mm, offset))
|
||||||
header_id, header_length, rest = values[0], values[1], values[2:]
|
header_id, header_length = values[:2]
|
||||||
|
|
||||||
if header_id in (EXTENDED_TIME_DATA, UNIX_EXTRA_DATA):
|
if header_id in (EXTENDED_TIME_DATA, UNIX_EXTRA_DATA):
|
||||||
values[0] = STRIPZIP_OPTION_HEADER
|
values[0] = STRIPZIP_OPTION_HEADER
|
||||||
|
@ -218,7 +221,7 @@ def _zero_zip_date_time(zip_):
|
||||||
if header_id == ZIP64_EXTRA_HEADER:
|
if header_id == ZIP64_EXTRA_HEADER:
|
||||||
assert header_length == 16
|
assert header_length == 16
|
||||||
values = list(zip64_extra_struct.unpack_from(mm, offset))
|
values = list(zip64_extra_struct.unpack_from(mm, offset))
|
||||||
header_id, header_length, uncompressed_size, compressed_size = values
|
header_id, header_length, _, compressed_size = values
|
||||||
|
|
||||||
offset += extra_header_struct.size + header_length
|
offset += extra_header_struct.size + header_length
|
||||||
|
|
||||||
|
@ -266,7 +269,7 @@ def _zero_zip_date_time(zip_):
|
||||||
if signature_struct.unpack_from(mm, offset) != (FILE_HEADER_SIGNATURE,):
|
if signature_struct.unpack_from(mm, offset) != (FILE_HEADER_SIGNATURE,):
|
||||||
break
|
break
|
||||||
values = list(local_file_header_struct.unpack_from(mm, offset))
|
values = list(local_file_header_struct.unpack_from(mm, offset))
|
||||||
_, _, _, _, _, _, _, compressed_size, _, name_length, extra_field_length = values
|
compressed_size, _, name_length, extra_field_length = values[7:11]
|
||||||
# reset last_mod_time
|
# reset last_mod_time
|
||||||
values[4] = 0
|
values[4] = 0
|
||||||
# reset last_mod_date
|
# reset last_mod_date
|
||||||
|
@ -274,20 +277,22 @@ def _zero_zip_date_time(zip_):
|
||||||
local_file_header_struct.pack_into(mm, offset, *values)
|
local_file_header_struct.pack_into(mm, offset, *values)
|
||||||
offset += local_file_header_struct.size + name_length
|
offset += local_file_header_struct.size + name_length
|
||||||
if extra_field_length != 0:
|
if extra_field_length != 0:
|
||||||
compressed_size = purify_extra_data(mm, offset, extra_field_length, compressed_size)
|
compressed_size = purify_extra_data(mm, offset, extra_field_length,
|
||||||
|
compressed_size)
|
||||||
offset += compressed_size + extra_field_length
|
offset += compressed_size + extra_field_length
|
||||||
|
|
||||||
while offset < archive_size:
|
while offset < archive_size:
|
||||||
if signature_struct.unpack_from(mm, offset) != (CENDIR_HEADER_SIGNATURE,):
|
if signature_struct.unpack_from(mm, offset) != (CENDIR_HEADER_SIGNATURE,):
|
||||||
break
|
break
|
||||||
values = list(central_directory_header_struct.unpack_from(mm, offset))
|
values = list(central_directory_header_struct.unpack_from(mm, offset))
|
||||||
_, _, _, _, _, _, _, _, _, _, file_name_length, extra_field_length, file_comment_length, _, _, _, _ = values
|
file_name_length, extra_field_length, file_comment_length = values[10:13]
|
||||||
# reset last_mod_time
|
# reset last_mod_time
|
||||||
values[5] = 0
|
values[5] = 0
|
||||||
# reset last_mod_date
|
# reset last_mod_date
|
||||||
values[6] = 0x21
|
values[6] = 0x21
|
||||||
central_directory_header_struct.pack_into(mm, offset, *values)
|
central_directory_header_struct.pack_into(mm, offset, *values)
|
||||||
offset += central_directory_header_struct.size + file_name_length + extra_field_length + file_comment_length
|
offset += central_directory_header_struct.size
|
||||||
|
offset += file_name_length + extra_field_length + file_comment_length
|
||||||
if extra_field_length != 0:
|
if extra_field_length != 0:
|
||||||
purify_extra_data(mm, offset - extra_field_length, extra_field_length)
|
purify_extra_data(mm, offset - extra_field_length, extra_field_length)
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,9 @@ def strip_binary(binary_path, target_cpu):
|
||||||
strip = 'mips64el-redhat-linux-strip'
|
strip = 'mips64el-redhat-linux-strip'
|
||||||
else:
|
else:
|
||||||
strip = 'strip'
|
strip = 'strip'
|
||||||
execute([strip, '--discard-all', '--strip-debug', '--preserve-dates', binary_path])
|
execute([
|
||||||
|
strip, '--discard-all', '--strip-debug', '--preserve-dates',
|
||||||
|
binary_path])
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
args = parse_args()
|
args = parse_args()
|
||||||
|
|
|
@ -20,7 +20,10 @@ def parse_args():
|
||||||
|
|
||||||
parser.add_argument('--base-url', required=False,
|
parser.add_argument('--base-url', required=False,
|
||||||
help="Base URL for all downloads")
|
help="Base URL for all downloads")
|
||||||
parser.add_argument('--force', action='store_true', default=False, required=False)
|
parser.add_argument('--force',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
required=False)
|
||||||
|
|
||||||
return parser.parse_args()
|
return parser.parse_args()
|
||||||
|
|
||||||
|
@ -111,7 +114,9 @@ def download_binary(base_url, sha, binary_name, attempt=3):
|
||||||
def validate_sha(file_path, sha):
|
def validate_sha(file_path, sha):
|
||||||
downloaded_sha = sha256(file_path)
|
downloaded_sha = sha256(file_path)
|
||||||
if downloaded_sha != sha:
|
if downloaded_sha != sha:
|
||||||
raise Exception("SHA for external binary file {} does not match expected '{}' != '{}'".format(file_path, downloaded_sha, sha))
|
raise Exception("SHA for external binary file {} does not match "
|
||||||
|
"expected '{}' != '{}'".format(
|
||||||
|
file_path, downloaded_sha, sha))
|
||||||
|
|
||||||
|
|
||||||
def download_to_temp_dir(url, filename, sha):
|
def download_to_temp_dir(url, filename, sha):
|
||||||
|
|
|
@ -33,7 +33,9 @@ def main():
|
||||||
|
|
||||||
returncode = 0
|
returncode = 0
|
||||||
match = re.search(
|
match = re.search(
|
||||||
'^Starting ChromeDriver [0-9]+.[0-9]+.[0-9]+.[0-9]+ .* on port [0-9]+$', output)
|
'^Starting ChromeDriver [0-9]+.[0-9]+.[0-9]+.[0-9]+ .* on port [0-9]+$',
|
||||||
|
output
|
||||||
|
)
|
||||||
|
|
||||||
if match is None:
|
if match is None:
|
||||||
returncode = 1
|
returncode = 1
|
||||||
|
|
Загрузка…
Ссылка в новой задаче