2012-05-21 15:12:37 +04:00
|
|
|
|
# This Source Code Form is subject to the terms of the Mozilla Public
|
|
|
|
|
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
|
|
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
|
|
|
|
|
2007-11-21 18:07:54 +03:00
|
|
|
|
import os
|
|
|
|
|
import shutil
|
2017-07-31 20:34:22 +03:00
|
|
|
|
import hashlib
|
2007-11-21 18:07:54 +03:00
|
|
|
|
import re
|
|
|
|
|
import sys
|
|
|
|
|
import getopt
|
|
|
|
|
import time
|
|
|
|
|
import tempfile
|
2017-07-31 20:34:22 +03:00
|
|
|
|
import io
|
2007-11-21 18:07:54 +03:00
|
|
|
|
|
2018-03-15 13:32:42 +03:00
|
|
|
|
|
2007-11-21 18:07:54 +03:00
|
|
|
|
class PatchInfo:
|
|
|
|
|
""" Represents the meta-data associated with a patch
|
|
|
|
|
work_dir = working dir where files are stored for this patch
|
|
|
|
|
archive_files = list of files to include in this patch
|
2011-04-12 08:23:18 +04:00
|
|
|
|
manifestv2 = set of manifest version 2 patch instructions
|
2014-03-05 23:42:56 +04:00
|
|
|
|
manifestv3 = set of manifest version 3 patch instructions
|
2014-09-29 22:52:04 +04:00
|
|
|
|
file_exclusion_list =
|
2007-11-21 18:07:54 +03:00
|
|
|
|
files to exclude from this patch. names without slashes will be
|
2014-03-05 23:42:56 +04:00
|
|
|
|
excluded anywhere in the directory hiearchy. names with slashes
|
|
|
|
|
will only be excluded at that exact path
|
2007-11-21 18:07:54 +03:00
|
|
|
|
"""
|
2018-03-15 13:32:42 +03:00
|
|
|
|
|
2007-11-21 18:07:54 +03:00
|
|
|
|
def __init__(self, work_dir, file_exclusion_list, path_exclusion_list):
|
2018-03-15 13:32:42 +03:00
|
|
|
|
self.work_dir = work_dir
|
|
|
|
|
self.archive_files = []
|
|
|
|
|
self.manifestv2 = []
|
|
|
|
|
self.manifestv3 = []
|
|
|
|
|
self.file_exclusion_list = file_exclusion_list
|
|
|
|
|
self.path_exclusion_list = path_exclusion_list
|
2014-03-05 23:42:56 +04:00
|
|
|
|
|
2007-11-21 18:07:54 +03:00
|
|
|
|
def append_add_instruction(self, filename):
|
2014-03-05 23:42:56 +04:00
|
|
|
|
""" Appends an add instruction for this patch.
|
|
|
|
|
if filename starts with distribution/extensions/.*/ this will add an
|
|
|
|
|
add-if instruction that will add the file if the parent directory
|
|
|
|
|
of the file exists. This was ported from
|
|
|
|
|
mozilla/tools/update-packaging/common.sh's make_add_instruction.
|
2007-11-21 18:07:54 +03:00
|
|
|
|
"""
|
2014-03-05 23:42:56 +04:00
|
|
|
|
m = re.match("((?:|.*/)distribution/extensions/.*)/", filename)
|
2013-02-20 19:48:58 +04:00
|
|
|
|
if m:
|
2011-03-03 22:09:19 +03:00
|
|
|
|
# Directory immediately following extensions is used for the test
|
2013-02-20 19:48:58 +04:00
|
|
|
|
testdir = m.group(1)
|
2018-03-15 13:32:42 +03:00
|
|
|
|
print(' add-if "' + testdir + '" "' + filename + '"')
|
|
|
|
|
self.manifestv2.append('add-if "' + testdir + '" "' + filename + '"')
|
|
|
|
|
self.manifestv3.append('add-if "' + testdir + '" "' + filename + '"')
|
2007-11-21 18:07:54 +03:00
|
|
|
|
else:
|
2018-03-15 13:32:42 +03:00
|
|
|
|
print(' add "' + filename + '"')
|
|
|
|
|
self.manifestv2.append('add "' + filename + '"')
|
|
|
|
|
self.manifestv3.append('add "' + filename + '"')
|
2014-03-05 23:42:56 +04:00
|
|
|
|
|
|
|
|
|
def append_add_if_not_instruction(self, filename):
|
|
|
|
|
""" Appends an add-if-not instruction to the version 3 manifest for this patch.
|
|
|
|
|
This was ported from mozilla/tools/update-packaging/common.sh's
|
|
|
|
|
make_add_if_not_instruction.
|
|
|
|
|
"""
|
2018-03-15 13:32:42 +03:00
|
|
|
|
print(' add-if-not "' + filename + '" "' + filename + '"')
|
|
|
|
|
self.manifestv3.append('add-if-not "' + filename + '" "' + filename + '"')
|
2014-03-05 23:42:56 +04:00
|
|
|
|
|
2007-11-21 18:07:54 +03:00
|
|
|
|
def append_patch_instruction(self, filename, patchname):
|
2014-03-05 23:42:56 +04:00
|
|
|
|
""" Appends a patch instruction for this patch.
|
|
|
|
|
|
2007-11-21 18:07:54 +03:00
|
|
|
|
filename = file to patch
|
|
|
|
|
patchname = patchfile to apply to file
|
2014-03-05 23:42:56 +04:00
|
|
|
|
|
|
|
|
|
if filename starts with distribution/extensions/.*/ this will add a
|
|
|
|
|
patch-if instruction that will patch the file if the parent
|
|
|
|
|
directory of the file exists. This was ported from
|
2013-02-20 19:48:58 +04:00
|
|
|
|
mozilla/tools/update-packaging/common.sh's make_patch_instruction.
|
2007-11-21 18:07:54 +03:00
|
|
|
|
"""
|
2014-03-05 23:42:56 +04:00
|
|
|
|
m = re.match("((?:|.*/)distribution/extensions/.*)/", filename)
|
2013-02-20 19:48:58 +04:00
|
|
|
|
if m:
|
|
|
|
|
testdir = m.group(1)
|
2018-03-15 13:32:42 +03:00
|
|
|
|
print(' patch-if "' + testdir + '" "' + patchname + '" "' + filename + '"')
|
|
|
|
|
self.manifestv2.append('patch-if "' + testdir + '" "' +
|
|
|
|
|
patchname + '" "' + filename + '"')
|
|
|
|
|
self.manifestv3.append('patch-if "' + testdir + '" "' +
|
|
|
|
|
patchname + '" "' + filename + '"')
|
2007-11-21 18:07:54 +03:00
|
|
|
|
else:
|
2018-03-15 13:32:42 +03:00
|
|
|
|
print(' patch "' + patchname + '" "' + filename + '"')
|
|
|
|
|
self.manifestv2.append('patch "' + patchname + '" "' + filename + '"')
|
|
|
|
|
self.manifestv3.append('patch "' + patchname + '" "' + filename + '"')
|
2014-03-05 23:42:56 +04:00
|
|
|
|
|
2007-11-21 18:07:54 +03:00
|
|
|
|
def append_remove_instruction(self, filename):
|
2014-03-05 23:42:56 +04:00
|
|
|
|
""" Appends an remove instruction for this patch.
|
2007-11-21 18:07:54 +03:00
|
|
|
|
This was ported from
|
|
|
|
|
mozilla/tools/update-packaging/common.sh/make_remove_instruction
|
|
|
|
|
"""
|
2011-04-12 08:23:18 +04:00
|
|
|
|
if filename.endswith("/"):
|
2018-03-15 13:32:42 +03:00
|
|
|
|
print(' rmdir "' + filename + '"')
|
|
|
|
|
self.manifestv2.append('rmdir "' + filename + '"')
|
|
|
|
|
self.manifestv3.append('rmdir "' + filename + '"')
|
2011-04-12 08:23:18 +04:00
|
|
|
|
elif filename.endswith("/*"):
|
|
|
|
|
filename = filename[:-1]
|
2018-03-15 13:32:42 +03:00
|
|
|
|
print(' rmrfdir "' + filename + '"')
|
|
|
|
|
self.manifestv2.append('rmrfdir "' + filename + '"')
|
|
|
|
|
self.manifestv3.append('rmrfdir "' + filename + '"')
|
2011-04-12 08:23:18 +04:00
|
|
|
|
else:
|
2018-03-15 13:32:42 +03:00
|
|
|
|
print(' remove "' + filename + '"')
|
|
|
|
|
self.manifestv2.append('remove "' + filename + '"')
|
|
|
|
|
self.manifestv3.append('remove "' + filename + '"')
|
2007-11-21 18:07:54 +03:00
|
|
|
|
|
2011-04-12 08:23:18 +04:00
|
|
|
|
def create_manifest_files(self):
|
2014-03-05 23:42:56 +04:00
|
|
|
|
""" Create the v2 manifest file in the root of the work_dir """
|
2018-03-15 13:32:42 +03:00
|
|
|
|
manifest_file_path = os.path.join(self.work_dir, "updatev2.manifest")
|
2011-04-12 08:23:18 +04:00
|
|
|
|
manifest_file = open(manifest_file_path, "wb")
|
2017-07-31 20:34:22 +03:00
|
|
|
|
manifest_file.writelines(io.BytesIO(b"type \"partial\"\n"))
|
|
|
|
|
manifest_file.writelines(io.BytesIO('\n'.join(self.manifestv2).encode('ascii')))
|
|
|
|
|
manifest_file.writelines(io.BytesIO(b"\n"))
|
2011-04-12 08:23:18 +04:00
|
|
|
|
manifest_file.close()
|
|
|
|
|
|
2017-07-31 20:34:22 +03:00
|
|
|
|
xz_file(manifest_file_path)
|
2011-04-12 08:23:18 +04:00
|
|
|
|
self.archive_files.append('"updatev2.manifest"')
|
2007-11-21 18:07:54 +03:00
|
|
|
|
|
2014-03-05 23:42:56 +04:00
|
|
|
|
""" Create the v3 manifest file in the root of the work_dir """
|
2018-03-15 13:32:42 +03:00
|
|
|
|
manifest_file_path = os.path.join(self.work_dir, "updatev3.manifest")
|
2014-03-05 23:42:56 +04:00
|
|
|
|
manifest_file = open(manifest_file_path, "wb")
|
2017-07-31 20:34:22 +03:00
|
|
|
|
manifest_file.writelines(io.BytesIO(b"type \"partial\"\n"))
|
|
|
|
|
manifest_file.writelines(io.BytesIO('\n'.join(self.manifestv3).encode('ascii')))
|
|
|
|
|
manifest_file.writelines(io.BytesIO(b"\n"))
|
2014-03-05 23:42:56 +04:00
|
|
|
|
manifest_file.close()
|
|
|
|
|
|
2017-07-31 20:34:22 +03:00
|
|
|
|
xz_file(manifest_file_path)
|
2014-03-05 23:42:56 +04:00
|
|
|
|
self.archive_files.append('"updatev3.manifest"')
|
|
|
|
|
|
2007-11-21 18:07:54 +03:00
|
|
|
|
def build_marfile_entry_hash(self, root_path):
|
2011-04-12 08:23:18 +04:00
|
|
|
|
""" Iterates through the root_path, creating a MarFileEntry for each file
|
|
|
|
|
and directory in that path. Excludes any filenames in the file_exclusion_list
|
|
|
|
|
"""
|
|
|
|
|
mar_entry_hash = {}
|
|
|
|
|
filename_set = set()
|
|
|
|
|
dirname_set = set()
|
|
|
|
|
for root, dirs, files in os.walk(root_path):
|
|
|
|
|
for name in files:
|
|
|
|
|
# filename is the relative path from root directory
|
2018-03-15 13:32:42 +03:00
|
|
|
|
partial_path = root[len(root_path) + 1:]
|
2011-04-12 08:23:18 +04:00
|
|
|
|
if name not in self.file_exclusion_list:
|
|
|
|
|
filename = os.path.join(partial_path, name)
|
2018-03-15 13:32:42 +03:00
|
|
|
|
if "/" + filename not in self.path_exclusion_list:
|
|
|
|
|
mar_entry_hash[filename] = MarFileEntry(root_path, filename)
|
2011-04-12 08:23:18 +04:00
|
|
|
|
filename_set.add(filename)
|
|
|
|
|
|
|
|
|
|
for name in dirs:
|
|
|
|
|
# dirname is the relative path from root directory
|
2018-03-15 13:32:42 +03:00
|
|
|
|
partial_path = root[len(root_path) + 1:]
|
2011-04-12 08:23:18 +04:00
|
|
|
|
if name not in self.file_exclusion_list:
|
|
|
|
|
dirname = os.path.join(partial_path, name)
|
2018-03-15 13:32:42 +03:00
|
|
|
|
if "/" + dirname not in self.path_exclusion_list:
|
|
|
|
|
dirname = dirname + "/"
|
|
|
|
|
mar_entry_hash[dirname] = MarFileEntry(root_path, dirname)
|
2011-04-12 08:23:18 +04:00
|
|
|
|
dirname_set.add(dirname)
|
|
|
|
|
|
|
|
|
|
return mar_entry_hash, filename_set, dirname_set
|
2014-09-29 22:52:04 +04:00
|
|
|
|
|
2014-03-05 23:42:56 +04:00
|
|
|
|
|
2007-11-21 18:07:54 +03:00
|
|
|
|
class MarFileEntry:
|
|
|
|
|
"""Represents a file inside a Mozilla Archive Format (MAR)
|
|
|
|
|
abs_path = abspath to the the file
|
|
|
|
|
name = relative path within the mar. e.g.
|
|
|
|
|
foo.mar/dir/bar.txt extracted into /tmp/foo:
|
|
|
|
|
abs_path=/tmp/foo/dir/bar.txt
|
|
|
|
|
name = dir/bar.txt
|
2014-09-29 22:52:04 +04:00
|
|
|
|
"""
|
2018-03-15 13:32:42 +03:00
|
|
|
|
|
2007-11-21 18:07:54 +03:00
|
|
|
|
def __init__(self, root, name):
|
|
|
|
|
"""root = path the the top of the mar
|
|
|
|
|
name = relative path within the mar"""
|
2018-03-15 13:32:42 +03:00
|
|
|
|
self.name = name.replace("\\", "/")
|
|
|
|
|
self.abs_path = os.path.join(root, name)
|
|
|
|
|
self.sha_cache = None
|
2007-11-21 18:07:54 +03:00
|
|
|
|
|
|
|
|
|
def __str__(self):
|
2018-03-15 13:32:42 +03:00
|
|
|
|
return 'Name: %s FullPath: %s' % (self.name, self.abs_path)
|
2007-11-21 18:07:54 +03:00
|
|
|
|
|
2014-03-05 23:42:56 +04:00
|
|
|
|
def calc_file_sha_digest(self, filename):
|
2007-11-21 18:07:54 +03:00
|
|
|
|
""" Returns sha digest of given filename"""
|
2017-07-31 20:34:22 +03:00
|
|
|
|
file_content = open(filename, 'rb').read()
|
|
|
|
|
return hashlib.sha1(file_content).digest()
|
2007-11-21 18:07:54 +03:00
|
|
|
|
|
|
|
|
|
def sha(self):
|
|
|
|
|
""" Returns sha digest of file repreesnted by this _marfile_entry"""
|
|
|
|
|
if not self.sha_cache:
|
2018-03-15 13:32:42 +03:00
|
|
|
|
self.sha_cache = self.calc_file_sha_digest(self.abs_path)
|
2007-11-21 18:07:54 +03:00
|
|
|
|
return self.sha_cache
|
|
|
|
|
|
2018-03-15 13:32:42 +03:00
|
|
|
|
|
2007-11-21 18:07:54 +03:00
|
|
|
|
def exec_shell_cmd(cmd):
|
|
|
|
|
"""Execs shell cmd and raises an exception if the cmd fails"""
|
|
|
|
|
if (os.system(cmd)):
|
2018-03-15 13:32:42 +03:00
|
|
|
|
raise Exception("cmd failed " + cmd)
|
2007-11-21 18:07:54 +03:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def copy_file(src_file_abs_path, dst_file_abs_path):
|
|
|
|
|
""" Copies src to dst creating any parent dirs required in dst first """
|
2018-03-15 13:32:42 +03:00
|
|
|
|
dst_file_dir = os.path.dirname(dst_file_abs_path)
|
2007-11-21 18:07:54 +03:00
|
|
|
|
if not os.path.exists(dst_file_dir):
|
2018-03-15 13:32:42 +03:00
|
|
|
|
os.makedirs(dst_file_dir)
|
2007-11-21 18:07:54 +03:00
|
|
|
|
# Copy the file over
|
|
|
|
|
shutil.copy2(src_file_abs_path, dst_file_abs_path)
|
|
|
|
|
|
2018-03-15 13:32:42 +03:00
|
|
|
|
|
2017-07-31 20:34:22 +03:00
|
|
|
|
def xz_file(filename):
|
2018-03-15 13:31:12 +03:00
|
|
|
|
""" XZ compresses the file in place. The original file is replaced
|
|
|
|
|
with the xz compressed version of itself assumes the path is absolute"""
|
2018-03-15 13:32:42 +03:00
|
|
|
|
exec_shell_cmd('xz --compress --x86 --lzma2 --format=xz --check=crc64 "' + filename + '"')
|
|
|
|
|
os.rename(filename + ".xz", filename)
|
|
|
|
|
|
2014-03-05 23:42:56 +04:00
|
|
|
|
|
2017-07-31 20:34:22 +03:00
|
|
|
|
def xzunzip_file(filename):
|
2018-03-15 13:31:12 +03:00
|
|
|
|
""" xz decompresses the file in palce. The original file is replaced
|
|
|
|
|
with a xz decompressed version of itself. doesn't matter if the
|
|
|
|
|
filename ends in .xz or not"""
|
2017-07-31 20:34:22 +03:00
|
|
|
|
if not filename.endswith(".xz"):
|
2018-03-15 13:32:42 +03:00
|
|
|
|
os.rename(filename, filename + ".xz")
|
|
|
|
|
filename = filename + ".xz"
|
|
|
|
|
exec_shell_cmd('xz -d "' + filename + '"')
|
2007-11-21 18:07:54 +03:00
|
|
|
|
|
|
|
|
|
|
2014-09-29 22:52:04 +04:00
|
|
|
|
def extract_mar(filename, work_dir):
|
2007-11-21 18:07:54 +03:00
|
|
|
|
""" Extracts the marfile intot he work_dir
|
|
|
|
|
assumes work_dir already exists otherwise will throw osError"""
|
2018-03-15 13:32:42 +03:00
|
|
|
|
print("Extracting " + filename + " to " + work_dir)
|
2007-11-21 18:07:54 +03:00
|
|
|
|
saved_path = os.getcwd()
|
|
|
|
|
try:
|
|
|
|
|
os.chdir(work_dir)
|
2018-03-15 13:32:42 +03:00
|
|
|
|
exec_shell_cmd("mar -x " + filename)
|
2007-11-21 18:07:54 +03:00
|
|
|
|
finally:
|
|
|
|
|
os.chdir(saved_path)
|
|
|
|
|
|
2018-03-15 13:32:42 +03:00
|
|
|
|
|
2007-11-21 18:07:54 +03:00
|
|
|
|
def create_partial_patch_for_file(from_marfile_entry, to_marfile_entry, shas, patch_info):
|
|
|
|
|
""" Creates the partial patch file and manifest entry for the pair of files passed in
|
|
|
|
|
"""
|
2018-03-15 13:32:42 +03:00
|
|
|
|
if not (from_marfile_entry.sha(), to_marfile_entry.sha()) in shas:
|
|
|
|
|
print('diffing "' + from_marfile_entry.name + '\"')
|
|
|
|
|
# bunzip to/from
|
2017-07-31 20:34:22 +03:00
|
|
|
|
xzunzip_file(from_marfile_entry.abs_path)
|
|
|
|
|
xzunzip_file(to_marfile_entry.abs_path)
|
2007-11-21 18:07:54 +03:00
|
|
|
|
|
|
|
|
|
# The patch file will be created in the working directory with the
|
|
|
|
|
# name of the file in the mar + .patch
|
2018-03-15 13:32:42 +03:00
|
|
|
|
patch_file_abs_path = os.path.join(patch_info.work_dir, from_marfile_entry.name + ".patch")
|
|
|
|
|
patch_file_dir = os.path.dirname(patch_file_abs_path)
|
2007-11-21 18:07:54 +03:00
|
|
|
|
if not os.path.exists(patch_file_dir):
|
|
|
|
|
os.makedirs(patch_file_dir)
|
|
|
|
|
|
2017-07-31 20:34:22 +03:00
|
|
|
|
# Create xz compressed patch file
|
2018-03-15 13:32:42 +03:00
|
|
|
|
exec_shell_cmd("mbsdiff " + from_marfile_entry.abs_path + " " +
|
|
|
|
|
to_marfile_entry.abs_path + " " + patch_file_abs_path)
|
2017-07-31 20:34:22 +03:00
|
|
|
|
xz_file(patch_file_abs_path)
|
2007-11-21 18:07:54 +03:00
|
|
|
|
|
2017-07-31 20:34:22 +03:00
|
|
|
|
# Create xz compressed full file
|
2018-03-15 13:32:42 +03:00
|
|
|
|
full_file_abs_path = os.path.join(patch_info.work_dir, to_marfile_entry.name)
|
2007-11-21 18:07:54 +03:00
|
|
|
|
shutil.copy2(to_marfile_entry.abs_path, full_file_abs_path)
|
2017-07-31 20:34:22 +03:00
|
|
|
|
xz_file(full_file_abs_path)
|
2014-03-05 23:42:56 +04:00
|
|
|
|
|
2007-11-21 18:07:54 +03:00
|
|
|
|
if os.path.getsize(patch_file_abs_path) < os.path.getsize(full_file_abs_path):
|
|
|
|
|
# Patch is smaller than file. Remove the file and add patch to manifest
|
|
|
|
|
os.remove(full_file_abs_path)
|
2018-03-15 13:32:42 +03:00
|
|
|
|
file_in_manifest_name = from_marfile_entry.name + ".patch"
|
2007-11-21 18:07:54 +03:00
|
|
|
|
file_in_manifest_abspath = patch_file_abs_path
|
|
|
|
|
patch_info.append_patch_instruction(to_marfile_entry.name, file_in_manifest_name)
|
2014-03-05 23:42:56 +04:00
|
|
|
|
else:
|
2007-11-21 18:07:54 +03:00
|
|
|
|
# File is smaller than patch. Remove the patch and add file to manifest
|
|
|
|
|
os.remove(patch_file_abs_path)
|
|
|
|
|
file_in_manifest_name = from_marfile_entry.name
|
|
|
|
|
file_in_manifest_abspath = full_file_abs_path
|
|
|
|
|
patch_info.append_add_instruction(file_in_manifest_name)
|
2014-03-05 23:42:56 +04:00
|
|
|
|
|
2018-03-15 13:32:42 +03:00
|
|
|
|
shas[from_marfile_entry.sha(), to_marfile_entry.sha()] = (
|
|
|
|
|
file_in_manifest_name, file_in_manifest_abspath)
|
|
|
|
|
patch_info.archive_files.append('"' + file_in_manifest_name + '"')
|
2007-11-21 18:07:54 +03:00
|
|
|
|
else:
|
2018-03-15 13:32:42 +03:00
|
|
|
|
filename, src_file_abs_path = shas[from_marfile_entry.sha(), to_marfile_entry.sha()]
|
2014-03-05 23:42:56 +04:00
|
|
|
|
# We've already calculated the patch for this pair of files.
|
2007-11-21 18:07:54 +03:00
|
|
|
|
if (filename.endswith(".patch")):
|
2014-03-05 23:42:56 +04:00
|
|
|
|
# print "skipping diff: "+from_marfile_entry.name
|
2007-11-21 18:07:54 +03:00
|
|
|
|
# Patch was smaller than file - add patch instruction to manifest
|
2018-03-15 13:32:42 +03:00
|
|
|
|
file_in_manifest_name = to_marfile_entry.name + '.patch'
|
2007-11-21 18:07:54 +03:00
|
|
|
|
patch_info.append_patch_instruction(to_marfile_entry.name, file_in_manifest_name)
|
|
|
|
|
else:
|
|
|
|
|
# File was smaller than file - add file to manifest
|
|
|
|
|
file_in_manifest_name = to_marfile_entry.name
|
2014-03-05 23:42:56 +04:00
|
|
|
|
patch_info.append_add_instruction(file_in_manifest_name)
|
2007-11-21 18:07:54 +03:00
|
|
|
|
# Copy the pre-calculated file into our new patch work aread
|
|
|
|
|
copy_file(src_file_abs_path, os.path.join(patch_info.work_dir, file_in_manifest_name))
|
2018-03-15 13:32:42 +03:00
|
|
|
|
patch_info.archive_files.append('"' + file_in_manifest_name + '"')
|
|
|
|
|
|
2014-09-29 22:52:04 +04:00
|
|
|
|
|
2014-03-05 23:42:56 +04:00
|
|
|
|
def create_add_patch_for_file(to_marfile_entry, patch_info):
|
2018-03-15 13:31:12 +03:00
|
|
|
|
""" Copy the file to the working dir, add the add instruction,
|
|
|
|
|
and add it to the list of archive files """
|
2007-11-21 18:07:54 +03:00
|
|
|
|
copy_file(to_marfile_entry.abs_path, os.path.join(patch_info.work_dir, to_marfile_entry.name))
|
|
|
|
|
patch_info.append_add_instruction(to_marfile_entry.name)
|
2018-03-15 13:32:42 +03:00
|
|
|
|
patch_info.archive_files.append('"' + to_marfile_entry.name + '"')
|
|
|
|
|
|
2014-03-05 23:42:56 +04:00
|
|
|
|
|
|
|
|
|
def create_add_if_not_patch_for_file(to_marfile_entry, patch_info):
|
2018-03-15 13:31:12 +03:00
|
|
|
|
""" Copy the file to the working dir, add the add-if-not instruction,
|
|
|
|
|
and add it to the list of archive files """
|
2014-03-05 23:42:56 +04:00
|
|
|
|
copy_file(to_marfile_entry.abs_path, os.path.join(patch_info.work_dir, to_marfile_entry.name))
|
|
|
|
|
patch_info.append_add_if_not_instruction(to_marfile_entry.name)
|
2018-03-15 13:32:42 +03:00
|
|
|
|
patch_info.archive_files.append('"' + to_marfile_entry.name + '"')
|
|
|
|
|
|
2007-11-21 18:07:54 +03:00
|
|
|
|
|
2014-09-29 22:52:04 +04:00
|
|
|
|
def process_explicit_remove_files(dir_path, patch_info):
|
2007-11-21 18:07:54 +03:00
|
|
|
|
""" Looks for a 'removed-files' file in the dir_path. If the removed-files does not exist
|
|
|
|
|
this will throw. If found adds the removed-files
|
|
|
|
|
found in that file to the patch_info"""
|
|
|
|
|
|
|
|
|
|
# Windows and linux have this file at the root of the dir
|
|
|
|
|
list_file_path = os.path.join(dir_path, "removed-files")
|
|
|
|
|
if not os.path.exists(list_file_path):
|
2014-09-29 22:52:04 +04:00
|
|
|
|
list_file_path = os.path.join(dir_path, "Contents/Resources/removed-files")
|
2007-11-21 18:07:54 +03:00
|
|
|
|
|
|
|
|
|
if (os.path.exists(list_file_path)):
|
2017-07-31 20:34:22 +03:00
|
|
|
|
fd, tmppath = tempfile.mkstemp('', 'tmp', os.getcwd())
|
|
|
|
|
os.close(fd)
|
2018-03-15 13:32:42 +03:00
|
|
|
|
exec_shell_cmd('xz -k -d --stdout "' + list_file_path + '" > "' + tmppath + '"')
|
2017-07-31 20:34:22 +03:00
|
|
|
|
list_file = open(tmppath)
|
2007-11-21 18:07:54 +03:00
|
|
|
|
|
2011-04-12 08:23:18 +04:00
|
|
|
|
lines = []
|
2007-11-21 18:07:54 +03:00
|
|
|
|
for line in list_file:
|
2011-04-12 08:23:18 +04:00
|
|
|
|
lines.append(line.strip())
|
|
|
|
|
|
2017-07-31 20:34:22 +03:00
|
|
|
|
list_file.close()
|
|
|
|
|
os.remove(tmppath)
|
2011-04-12 08:23:18 +04:00
|
|
|
|
lines.sort(reverse=True)
|
|
|
|
|
for line in lines:
|
|
|
|
|
# Exclude any blank and comment lines.
|
|
|
|
|
if line and not line.startswith("#"):
|
|
|
|
|
# Python on windows uses \ for path separators and the update
|
|
|
|
|
# manifests expects / for path separators on all platforms.
|
2011-05-19 21:04:06 +04:00
|
|
|
|
line = line.replace("\\", "/")
|
2011-04-12 08:23:18 +04:00
|
|
|
|
patch_info.append_remove_instruction(line)
|
2007-11-21 18:07:54 +03:00
|
|
|
|
|
2018-03-15 13:32:42 +03:00
|
|
|
|
|
2018-03-15 13:31:12 +03:00
|
|
|
|
def create_partial_patch(from_dir_path, to_dir_path, patch_filename,
|
|
|
|
|
shas, patch_info, forced_updates, add_if_not_list):
|
2012-04-10 02:30:34 +04:00
|
|
|
|
""" Builds a partial patch by comparing the files in from_dir_path to those of to_dir_path"""
|
2007-11-21 18:07:54 +03:00
|
|
|
|
# Cannocolize the paths for safey
|
|
|
|
|
from_dir_path = os.path.abspath(from_dir_path)
|
|
|
|
|
to_dir_path = os.path.abspath(to_dir_path)
|
2011-04-12 08:23:18 +04:00
|
|
|
|
# Create a hashtable of the from and to directories
|
2018-03-15 13:32:42 +03:00
|
|
|
|
from_dir_hash, from_file_set, from_dir_set = patch_info.build_marfile_entry_hash(from_dir_path)
|
|
|
|
|
to_dir_hash, to_file_set, to_dir_set = patch_info.build_marfile_entry_hash(to_dir_path)
|
2014-09-29 22:52:04 +04:00
|
|
|
|
# Create a list of the forced updates
|
2009-02-02 08:14:02 +03:00
|
|
|
|
forced_list = forced_updates.strip().split('|')
|
2014-09-29 22:51:55 +04:00
|
|
|
|
# Require that the precomplete file is included in the complete update
|
2014-09-29 22:53:15 +04:00
|
|
|
|
if "precomplete" in to_file_set:
|
2014-09-29 22:51:55 +04:00
|
|
|
|
forced_list.append("precomplete")
|
2014-09-29 22:53:15 +04:00
|
|
|
|
elif "Contents/Resources/precomplete" in to_file_set:
|
|
|
|
|
forced_list.append("Contents/Resources/precomplete")
|
|
|
|
|
# The check with \ file separators allows tests for Mac to run on Windows
|
|
|
|
|
elif "Contents\Resources\precomplete" in to_file_set:
|
|
|
|
|
forced_list.append("Contents\Resources\precomplete")
|
|
|
|
|
else:
|
2018-03-15 13:32:42 +03:00
|
|
|
|
raise Exception("missing precomplete file in: " + to_dir_path)
|
2014-09-29 22:53:15 +04:00
|
|
|
|
|
|
|
|
|
if "removed-files" in to_file_set:
|
|
|
|
|
forced_list.append("removed-files")
|
|
|
|
|
elif "Contents/Resources/removed-files" in to_file_set:
|
|
|
|
|
forced_list.append("Contents/Resources/removed-files")
|
|
|
|
|
# The check with \ file separators allows tests for Mac to run on Windows
|
|
|
|
|
elif "Contents\Resources\\removed-files" in to_file_set:
|
|
|
|
|
forced_list.append("Contents\Resources\\removed-files")
|
|
|
|
|
else:
|
2018-03-15 13:32:42 +03:00
|
|
|
|
raise Exception("missing removed-files file in: " + to_dir_path)
|
2011-04-12 08:23:18 +04:00
|
|
|
|
|
2007-11-21 18:07:54 +03:00
|
|
|
|
# Files which exist in both sets need to be patched
|
2011-04-12 08:23:18 +04:00
|
|
|
|
patch_filenames = list(from_file_set.intersection(to_file_set))
|
|
|
|
|
patch_filenames.sort(reverse=True)
|
2007-11-21 18:07:54 +03:00
|
|
|
|
for filename in patch_filenames:
|
|
|
|
|
from_marfile_entry = from_dir_hash[filename]
|
|
|
|
|
to_marfile_entry = to_dir_hash[filename]
|
2014-03-05 23:42:56 +04:00
|
|
|
|
if os.path.basename(filename) in add_if_not_list:
|
|
|
|
|
# This filename is in the add if not list, explicitly add-if-not
|
|
|
|
|
create_add_if_not_patch_for_file(to_dir_hash[filename], patch_info)
|
|
|
|
|
elif filename in forced_list:
|
2018-03-15 13:32:42 +03:00
|
|
|
|
print('Forcing "' + filename + '"')
|
2009-02-02 08:14:02 +03:00
|
|
|
|
# This filename is in the forced list, explicitly add
|
2011-04-12 08:23:18 +04:00
|
|
|
|
create_add_patch_for_file(to_dir_hash[filename], patch_info)
|
2014-09-29 22:52:04 +04:00
|
|
|
|
else:
|
2018-03-15 13:32:42 +03:00
|
|
|
|
if from_marfile_entry.sha() != to_marfile_entry.sha():
|
|
|
|
|
# Not the same - calculate a patch
|
|
|
|
|
create_partial_patch_for_file(
|
|
|
|
|
from_marfile_entry, to_marfile_entry, shas, patch_info)
|
2007-11-21 18:07:54 +03:00
|
|
|
|
|
|
|
|
|
# files in to_dir not in from_dir need to added
|
2011-04-12 08:23:18 +04:00
|
|
|
|
add_filenames = list(to_file_set - from_file_set)
|
|
|
|
|
add_filenames.sort(reverse=True)
|
2007-11-21 18:07:54 +03:00
|
|
|
|
for filename in add_filenames:
|
2014-03-05 23:42:56 +04:00
|
|
|
|
if os.path.basename(filename) in add_if_not_list:
|
|
|
|
|
create_add_if_not_patch_for_file(to_dir_hash[filename], patch_info)
|
|
|
|
|
else:
|
|
|
|
|
create_add_patch_for_file(to_dir_hash[filename], patch_info)
|
2007-11-21 18:07:54 +03:00
|
|
|
|
|
2011-04-12 08:23:18 +04:00
|
|
|
|
# files in from_dir not in to_dir need to be removed
|
|
|
|
|
remove_filenames = list(from_file_set - to_file_set)
|
|
|
|
|
remove_filenames.sort(reverse=True)
|
|
|
|
|
for filename in remove_filenames:
|
|
|
|
|
patch_info.append_remove_instruction(from_dir_hash[filename].name)
|
|
|
|
|
|
2007-11-21 18:07:54 +03:00
|
|
|
|
process_explicit_remove_files(to_dir_path, patch_info)
|
2014-03-05 23:42:56 +04:00
|
|
|
|
|
2011-04-12 08:23:18 +04:00
|
|
|
|
# directories in from_dir not in to_dir need to be removed
|
|
|
|
|
remove_dirnames = list(from_dir_set - to_dir_set)
|
|
|
|
|
remove_dirnames.sort(reverse=True)
|
|
|
|
|
for dirname in remove_dirnames:
|
|
|
|
|
patch_info.append_remove_instruction(from_dir_hash[dirname].name)
|
|
|
|
|
|
|
|
|
|
# Construct the Manifest files
|
|
|
|
|
patch_info.create_manifest_files()
|
2014-03-05 23:42:56 +04:00
|
|
|
|
|
2007-11-21 18:07:54 +03:00
|
|
|
|
# And construct the mar
|
2018-03-15 13:32:42 +03:00
|
|
|
|
mar_cmd = 'mar -C ' + patch_info.work_dir + \
|
|
|
|
|
' -c output.mar ' + ' '.join(patch_info.archive_files)
|
2007-11-21 18:07:54 +03:00
|
|
|
|
exec_shell_cmd(mar_cmd)
|
|
|
|
|
|
|
|
|
|
# Copy mar to final destination
|
|
|
|
|
patch_file_dir = os.path.split(patch_filename)[0]
|
|
|
|
|
if not os.path.exists(patch_file_dir):
|
|
|
|
|
os.makedirs(patch_file_dir)
|
2018-03-15 13:32:42 +03:00
|
|
|
|
shutil.copy2(os.path.join(patch_info.work_dir, "output.mar"), patch_filename)
|
2014-09-29 22:52:04 +04:00
|
|
|
|
|
2008-01-09 22:49:23 +03:00
|
|
|
|
return patch_filename
|
2014-03-05 23:42:56 +04:00
|
|
|
|
|
2018-03-15 13:32:42 +03:00
|
|
|
|
|
2007-11-21 18:07:54 +03:00
|
|
|
|
def usage():
|
2017-07-31 20:34:22 +03:00
|
|
|
|
print("-h for help")
|
|
|
|
|
print("-f for patchlist_file")
|
2007-11-21 18:07:54 +03:00
|
|
|
|
|
2018-03-15 13:32:42 +03:00
|
|
|
|
|
2014-09-29 22:52:04 +04:00
|
|
|
|
def get_buildid(work_dir):
|
2008-01-18 19:58:31 +03:00
|
|
|
|
""" extracts buildid from MAR
|
|
|
|
|
"""
|
2014-09-29 22:52:04 +04:00
|
|
|
|
ini = '%s/application.ini' % work_dir
|
2008-01-18 19:58:31 +03:00
|
|
|
|
if not os.path.exists(ini):
|
2014-09-29 22:52:04 +04:00
|
|
|
|
ini = '%s/Contents/Resources/application.ini' % work_dir
|
|
|
|
|
if not os.path.exists(ini):
|
2017-07-31 20:34:22 +03:00
|
|
|
|
print('WARNING: application.ini not found, cannot find build ID')
|
2014-09-29 22:52:04 +04:00
|
|
|
|
return ''
|
|
|
|
|
|
2017-07-31 20:34:22 +03:00
|
|
|
|
fd, tmppath = tempfile.mkstemp('', 'tmp', os.getcwd())
|
|
|
|
|
os.close(fd)
|
2018-03-15 13:32:42 +03:00
|
|
|
|
exec_shell_cmd('xz -k -d --stdout "' + ini + '" > "' + tmppath + '"')
|
2017-07-31 20:34:22 +03:00
|
|
|
|
file = open(tmppath)
|
2008-01-09 22:49:23 +03:00
|
|
|
|
for line in file:
|
2014-09-29 22:52:04 +04:00
|
|
|
|
if line.find('BuildID') == 0:
|
2017-07-31 20:34:22 +03:00
|
|
|
|
file.close()
|
|
|
|
|
os.remove(tmppath)
|
2014-09-29 22:52:04 +04:00
|
|
|
|
return line.strip().split('=')[1]
|
2017-07-31 20:34:22 +03:00
|
|
|
|
print('WARNING: cannot find build ID in application.ini')
|
|
|
|
|
file.close()
|
|
|
|
|
os.remove(tmppath)
|
2008-01-18 19:58:31 +03:00
|
|
|
|
return ''
|
2008-01-09 22:49:23 +03:00
|
|
|
|
|
2018-03-15 13:32:42 +03:00
|
|
|
|
|
2008-10-09 16:53:33 +04:00
|
|
|
|
def decode_filename(filepath):
|
|
|
|
|
""" Breaks filename/dir structure into component parts based on regex
|
2008-01-09 22:49:23 +03:00
|
|
|
|
for example: firefox-3.0b3pre.en-US.linux-i686.complete.mar
|
2008-10-09 16:53:33 +04:00
|
|
|
|
Or linux-i686/en-US/firefox-3.0b3.complete.mar
|
2008-01-09 22:49:23 +03:00
|
|
|
|
Returns dict with keys product, version, locale, platform, type
|
|
|
|
|
"""
|
|
|
|
|
try:
|
2008-10-09 16:53:33 +04:00
|
|
|
|
m = re.search(
|
2018-03-15 13:31:12 +03:00
|
|
|
|
'(?P<product>\w+)(-)(?P<version>\w+\.\w+(\.\w+){0,2})(\.)(?P<locale>.+?)(\.)(?P<platform>.+?)(\.)(?P<type>\w+)(.mar)', # NOQA: E501
|
|
|
|
|
os.path.basename(filepath))
|
2008-10-09 16:53:33 +04:00
|
|
|
|
return m.groupdict()
|
2018-03-15 13:31:12 +03:00
|
|
|
|
except Exception as exc:
|
|
|
|
|
try:
|
|
|
|
|
m = re.search(
|
|
|
|
|
'(?P<platform>.+?)\/(?P<locale>.+?)\/(?P<product>\w+)-(?P<version>\w+\.\w+)\.(?P<type>\w+).mar', # NOQA: E501
|
|
|
|
|
filepath)
|
|
|
|
|
return m.groupdict()
|
|
|
|
|
except Exception:
|
|
|
|
|
raise Exception("could not parse filepath %s: %s" % (filepath, exc))
|
2008-01-09 22:49:23 +03:00
|
|
|
|
|
2018-03-15 13:32:42 +03:00
|
|
|
|
|
2008-01-09 22:49:23 +03:00
|
|
|
|
def create_partial_patches(patches):
|
|
|
|
|
""" Given the patches generates a set of partial patches"""
|
2007-11-21 18:07:54 +03:00
|
|
|
|
shas = {}
|
|
|
|
|
|
|
|
|
|
work_dir_root = None
|
2008-01-09 22:49:23 +03:00
|
|
|
|
metadata = []
|
2007-11-21 18:07:54 +03:00
|
|
|
|
try:
|
2008-04-24 19:14:57 +04:00
|
|
|
|
work_dir_root = tempfile.mkdtemp('-fastmode', 'tmp', os.getcwd())
|
2017-07-31 20:34:22 +03:00
|
|
|
|
print("Building patches using work dir: %s" % (work_dir_root))
|
2014-09-29 22:52:04 +04:00
|
|
|
|
|
2007-11-21 18:07:54 +03:00
|
|
|
|
# Iterate through every patch set in the patch file
|
|
|
|
|
patch_num = 1
|
|
|
|
|
for patch in patches:
|
|
|
|
|
startTime = time.time()
|
|
|
|
|
|
2018-03-15 13:32:42 +03:00
|
|
|
|
from_filename, to_filename, patch_filename, forced_updates = patch.split(",")
|
|
|
|
|
from_filename, to_filename, patch_filename = os.path.abspath(
|
|
|
|
|
from_filename), os.path.abspath(to_filename), os.path.abspath(patch_filename)
|
2007-11-21 18:07:54 +03:00
|
|
|
|
|
|
|
|
|
# Each patch iteration uses its own work dir
|
2018-03-15 13:32:42 +03:00
|
|
|
|
work_dir = os.path.join(work_dir_root, str(patch_num))
|
2007-11-21 18:07:54 +03:00
|
|
|
|
os.mkdir(work_dir)
|
|
|
|
|
|
|
|
|
|
# Extract from mar into from dir
|
2018-03-15 13:32:42 +03:00
|
|
|
|
work_dir_from = os.path.join(work_dir, "from")
|
2007-11-21 18:07:54 +03:00
|
|
|
|
os.mkdir(work_dir_from)
|
2018-03-15 13:32:42 +03:00
|
|
|
|
extract_mar(from_filename, work_dir_from)
|
2008-10-09 16:53:33 +04:00
|
|
|
|
from_decoded = decode_filename(from_filename)
|
2014-09-29 22:52:04 +04:00
|
|
|
|
from_buildid = get_buildid(work_dir_from)
|
2017-07-31 20:34:22 +03:00
|
|
|
|
from_shasum = hashlib.sha1(open(from_filename, "rb").read()).hexdigest()
|
2008-01-09 22:49:23 +03:00
|
|
|
|
from_size = str(os.path.getsize(to_filename))
|
2014-03-05 23:42:56 +04:00
|
|
|
|
|
2008-01-09 22:49:23 +03:00
|
|
|
|
# Extract to mar into to dir
|
2018-03-15 13:32:42 +03:00
|
|
|
|
work_dir_to = os.path.join(work_dir, "to")
|
2007-11-21 18:07:54 +03:00
|
|
|
|
os.mkdir(work_dir_to)
|
|
|
|
|
extract_mar(to_filename, work_dir_to)
|
2008-10-09 16:53:33 +04:00
|
|
|
|
to_decoded = decode_filename(from_filename)
|
2014-09-29 22:52:04 +04:00
|
|
|
|
to_buildid = get_buildid(work_dir_to)
|
2017-07-31 20:34:22 +03:00
|
|
|
|
to_shasum = hashlib.sha1(open(to_filename, 'rb').read()).hexdigest()
|
2008-01-09 22:49:23 +03:00
|
|
|
|
to_size = str(os.path.getsize(to_filename))
|
2007-11-21 18:07:54 +03:00
|
|
|
|
|
|
|
|
|
mar_extract_time = time.time()
|
|
|
|
|
|
2018-03-15 13:31:12 +03:00
|
|
|
|
partial_filename = create_partial_patch(work_dir_from, work_dir_to, patch_filename,
|
|
|
|
|
shas, PatchInfo(work_dir, [
|
|
|
|
|
'update.manifest',
|
|
|
|
|
'updatev2.manifest',
|
|
|
|
|
'updatev3.manifest'
|
|
|
|
|
], []),
|
|
|
|
|
forced_updates,
|
|
|
|
|
['channel-prefs.js', 'update-settings.ini'])
|
2017-07-31 20:34:22 +03:00
|
|
|
|
partial_shasum = hashlib.sha1(open(partial_filename, "rb").read()).hexdigest()
|
2008-01-09 22:49:23 +03:00
|
|
|
|
partial_size = str(os.path.getsize(partial_filename))
|
|
|
|
|
|
|
|
|
|
metadata.append({
|
2018-03-15 13:32:42 +03:00
|
|
|
|
'to_filename': os.path.basename(to_filename),
|
|
|
|
|
'from_filename': os.path.basename(from_filename),
|
|
|
|
|
'partial_filename': os.path.basename(partial_filename),
|
|
|
|
|
'to_buildid': to_buildid,
|
|
|
|
|
'from_buildid': from_buildid,
|
|
|
|
|
'to_sha1sum': to_shasum,
|
|
|
|
|
'from_sha1sum': from_shasum,
|
|
|
|
|
'partial_sha1sum': partial_shasum,
|
|
|
|
|
'to_size': to_size,
|
|
|
|
|
'from_size': from_size,
|
|
|
|
|
'partial_size': partial_size,
|
|
|
|
|
'to_version': to_decoded['version'],
|
|
|
|
|
'from_version': from_decoded['version'],
|
|
|
|
|
'locale': from_decoded['locale'],
|
|
|
|
|
'platform': from_decoded['platform'],
|
2008-01-09 22:49:23 +03:00
|
|
|
|
})
|
2018-03-15 13:31:12 +03:00
|
|
|
|
print("done with patch %s/%s time (%.2fs/%.2fs/%.2fs) (mar/patch/total)" % (str(patch_num), # NOQA: E501
|
|
|
|
|
str(len(patches)), mar_extract_time - startTime, time.time() - mar_extract_time, time.time() - startTime)) # NOQA: E501
|
2007-11-21 18:07:54 +03:00
|
|
|
|
patch_num += 1
|
2008-01-09 22:49:23 +03:00
|
|
|
|
return metadata
|
2007-11-21 18:07:54 +03:00
|
|
|
|
finally:
|
|
|
|
|
# If we fail or get a ctrl-c during run be sure to clean up temp dir
|
|
|
|
|
if (work_dir_root and os.path.exists(work_dir_root)):
|
2014-03-05 23:42:56 +04:00
|
|
|
|
shutil.rmtree(work_dir_root)
|
2007-11-21 18:07:54 +03:00
|
|
|
|
|
2018-03-15 13:32:42 +03:00
|
|
|
|
|
2014-03-05 23:42:56 +04:00
|
|
|
|
def main(argv):
|
2007-11-21 18:07:54 +03:00
|
|
|
|
patchlist_file = None
|
2014-03-05 23:42:56 +04:00
|
|
|
|
try:
|
2018-03-15 13:32:42 +03:00
|
|
|
|
opts, args = getopt.getopt(argv, "hf:", ["help", "patchlist_file="])
|
|
|
|
|
for opt, arg in opts:
|
2014-03-05 23:42:56 +04:00
|
|
|
|
if opt in ("-h", "--help"):
|
|
|
|
|
usage()
|
|
|
|
|
sys.exit()
|
2007-11-21 18:07:54 +03:00
|
|
|
|
elif opt in ("-f", "--patchlist_file"):
|
2014-03-05 23:42:56 +04:00
|
|
|
|
patchlist_file = arg
|
|
|
|
|
except getopt.GetoptError:
|
2018-03-15 13:32:42 +03:00
|
|
|
|
usage()
|
|
|
|
|
sys.exit(2)
|
2014-03-05 23:42:56 +04:00
|
|
|
|
|
2008-01-04 20:55:17 +03:00
|
|
|
|
if not patchlist_file:
|
2007-11-21 18:07:54 +03:00
|
|
|
|
usage()
|
|
|
|
|
sys.exit(2)
|
2014-03-05 23:42:56 +04:00
|
|
|
|
|
2008-01-09 22:49:23 +03:00
|
|
|
|
patches = []
|
|
|
|
|
f = open(patchlist_file, 'r')
|
|
|
|
|
for line in f.readlines():
|
|
|
|
|
patches.append(line)
|
|
|
|
|
f.close()
|
|
|
|
|
create_partial_patches(patches)
|
2007-11-21 18:07:54 +03:00
|
|
|
|
|
2018-03-15 13:32:42 +03:00
|
|
|
|
|
2007-11-21 18:07:54 +03:00
|
|
|
|
if __name__ == "__main__":
|
|
|
|
|
main(sys.argv[1:])
|