зеркало из https://github.com/mozilla/gecko-dev.git
521 строка
14 KiB
Python
521 строка
14 KiB
Python
|
#!/bin/python3
|
||
|
assert __name__ == '__main__'
|
||
|
|
||
|
'''
|
||
|
To update ANGLE in Gecko, use Windows with git-bash, and setup depot_tools and
|
||
|
python3.
|
||
|
|
||
|
Upstream: https://chromium.googlesource.com/angle/angle
|
||
|
|
||
|
Our repo: https://github.com/mozilla/angle
|
||
|
It has branches like 'firefox-60' which is the branch we use for pulling into
|
||
|
Gecko with this script.
|
||
|
|
||
|
This script leaves a record of the merge-base and cherry-picks that we pull into
|
||
|
Gecko. (gfx/angle/cherries.log)
|
||
|
'''
|
||
|
|
||
|
import json
|
||
|
import os
|
||
|
from pathlib import *
|
||
|
import re
|
||
|
import shutil
|
||
|
import subprocess
|
||
|
import sys
|
||
|
|
||
|
REPO_DIR = Path.cwd()
|
||
|
GECKO_ANGLE_DIR = Path(__file__).parent
|
||
|
GECKO_DIR = GECKO_ANGLE_DIR.parent.parent
|
||
|
|
||
|
OUT_DIR = 'out'
|
||
|
|
||
|
COMMON_HEADER = [
|
||
|
'# Generated by update-angle.py',
|
||
|
'',
|
||
|
"include('../../moz.build.common')",
|
||
|
]
|
||
|
|
||
|
# --
|
||
|
|
||
|
def print_now(*args):
|
||
|
print(*args)
|
||
|
sys.stdout.flush()
|
||
|
|
||
|
|
||
|
def run_checked(*args, **kwargs):
|
||
|
print_now(' ', args)
|
||
|
return subprocess.run(args, check=True, shell=True, **kwargs)
|
||
|
|
||
|
|
||
|
def sorted_items(x):
|
||
|
for k in sorted(x.keys()):
|
||
|
yield (k, x[k])
|
||
|
|
||
|
# --
|
||
|
|
||
|
CHERRIES_PATH = Path(GECKO_ANGLE_DIR, 'cherries.log')
|
||
|
print_now('Logging cherries ({})'.format(CHERRIES_PATH))
|
||
|
|
||
|
MERGE_BASE = sys.argv[1]
|
||
|
MERGE_BASE = run_checked('git', 'merge-base', 'HEAD', MERGE_BASE,
|
||
|
stdout=subprocess.PIPE).stdout.decode().strip()
|
||
|
|
||
|
mb_info = run_checked('git', 'log', '{}~1..{}'.format(MERGE_BASE, MERGE_BASE),
|
||
|
stdout=subprocess.PIPE).stdout
|
||
|
cherries = run_checked('git', 'log', MERGE_BASE + '..', stdout=subprocess.PIPE).stdout
|
||
|
|
||
|
with open(CHERRIES_PATH, 'wb') as f:
|
||
|
f.write(cherries)
|
||
|
f.write(b'\nCherries picked')
|
||
|
f.write(b'\n' + (b'=' * 80))
|
||
|
f.write(b'\nMerge base')
|
||
|
f.write(b'\n\n')
|
||
|
f.write(mb_info)
|
||
|
|
||
|
# --
|
||
|
|
||
|
print_now('Importing graph')
|
||
|
|
||
|
shutil.rmtree(OUT_DIR, True)
|
||
|
|
||
|
run_checked('gn', 'gen', OUT_DIR)
|
||
|
|
||
|
GN_ARGS = '''
|
||
|
# Build arguments go here.
|
||
|
# See "gn args <out_dir> --list" for available build arguments.
|
||
|
is_clang = false
|
||
|
angle_enable_gl = false
|
||
|
angle_enable_gl_null = false
|
||
|
angle_enable_null = false
|
||
|
angle_enable_vulkan = false
|
||
|
'''[1:]
|
||
|
with open(OUT_DIR + '/args.gn', 'wb') as f:
|
||
|
f.write(GN_ARGS.encode())
|
||
|
|
||
|
# --
|
||
|
|
||
|
p = run_checked('gn', 'desc', '--format=json', OUT_DIR, '*', stdout=subprocess.PIPE)
|
||
|
|
||
|
print_now('Processing graph')
|
||
|
descs = json.loads(p.stdout.decode())
|
||
|
|
||
|
# HACKHACKHACK
|
||
|
common = descs['//:angle_common']
|
||
|
common['sources'] += [
|
||
|
'//src/common/system_utils_linux.cpp',
|
||
|
'//src/common/system_utils_mac.cpp',
|
||
|
]
|
||
|
|
||
|
# --
|
||
|
|
||
|
# Inject node key and child links into desc dicts
|
||
|
for (k, v) in descs.items():
|
||
|
v['target_name'] = k
|
||
|
v['dep_children'] = [descs[x] for x in v['deps']]
|
||
|
assert v['public'] == '*', k
|
||
|
|
||
|
v['includes'] = []
|
||
|
v['just_sources'] = []
|
||
|
|
||
|
def fn(x):
|
||
|
(_, e) = x.rsplit('.', 1)
|
||
|
if e in ['h', 'inl']:
|
||
|
v['includes'].append(x)
|
||
|
return
|
||
|
elif e in ['cc', 'cpp']:
|
||
|
v['just_sources'].append(x)
|
||
|
return
|
||
|
|
||
|
list(map(fn, v.get('sources', [])))
|
||
|
if v['type'] == 'action':
|
||
|
list(map(fn, v['outputs']))
|
||
|
|
||
|
# --
|
||
|
# Ready to traverse
|
||
|
|
||
|
def traverse(roots, pre_recurse_func, key_func=None):
|
||
|
visited = set()
|
||
|
|
||
|
def identity(x):
|
||
|
return x
|
||
|
|
||
|
if not key_func:
|
||
|
key_func = identity
|
||
|
|
||
|
def recurse(cur):
|
||
|
key = key_func(cur)
|
||
|
if key in visited:
|
||
|
return
|
||
|
visited.add(key)
|
||
|
|
||
|
t = pre_recurse_func(cur)
|
||
|
post_recurse_func = None
|
||
|
try:
|
||
|
(children, post_recurse_func) = t
|
||
|
except ValueError:
|
||
|
(children,) = t
|
||
|
|
||
|
for x in children:
|
||
|
recurse(x)
|
||
|
|
||
|
if post_recurse_func:
|
||
|
post_recurse_func(cur)
|
||
|
return
|
||
|
|
||
|
for x in roots:
|
||
|
recurse(x)
|
||
|
return
|
||
|
|
||
|
ROOTS = ['//:translator', '//:libEGL', '//:libGLESv2']
|
||
|
ROOTS = list(map(descs.get, ROOTS))
|
||
|
|
||
|
# Gather real targets:
|
||
|
real_targets = []
|
||
|
|
||
|
def desc_key(x):
|
||
|
return x['target_name']
|
||
|
|
||
|
def gather_includable_includes_post(x):
|
||
|
x['includable_includes'] = x['includes']
|
||
|
x['includable_sources'] = x['just_sources']
|
||
|
x['all_include_dirs'] = x.get('include_dirs', [])
|
||
|
for y in x['dep_children']:
|
||
|
x['includable_includes'] += y['includable_includes']
|
||
|
x['all_include_dirs'] += y['all_include_dirs']
|
||
|
if y['type'] == 'source_set':
|
||
|
x['includable_sources'] += y['includable_sources']
|
||
|
|
||
|
def gather_real_targets(cur):
|
||
|
print_now(' ' + cur['type'], cur['target_name'])
|
||
|
if cur['type'] in ['shared_library', 'static_library']:
|
||
|
real_targets.append(cur)
|
||
|
|
||
|
return (cur['dep_children'], gather_includable_includes_post)
|
||
|
|
||
|
traverse(ROOTS, gather_real_targets, desc_key)
|
||
|
|
||
|
# --
|
||
|
|
||
|
print_now('Running required actions')
|
||
|
|
||
|
# Build the ':commit_id' 'action' target to generate 'commit.h'.
|
||
|
run_checked('ninja', '-C', OUT_DIR, ':commit_id')
|
||
|
|
||
|
# --
|
||
|
|
||
|
print_now('Export targets')
|
||
|
|
||
|
# Clear our dest directories
|
||
|
targets_dir = Path(GECKO_ANGLE_DIR, 'targets')
|
||
|
checkout_dir = Path(GECKO_ANGLE_DIR, 'checkout')
|
||
|
shutil.rmtree(targets_dir, True)
|
||
|
shutil.rmtree(checkout_dir, True)
|
||
|
targets_dir.mkdir(exist_ok=True)
|
||
|
checkout_dir.mkdir(exist_ok=True)
|
||
|
|
||
|
# Export our targets
|
||
|
|
||
|
def sortedi(x):
|
||
|
return sorted(x, key=str.lower)
|
||
|
|
||
|
def append_arr(dest, name, vals):
|
||
|
if not vals:
|
||
|
return
|
||
|
|
||
|
dest.append('')
|
||
|
dest.append('{} += ['.format(name))
|
||
|
for x in sortedi(vals):
|
||
|
dest.append(" '{}',".format(x))
|
||
|
dest.append(']')
|
||
|
return
|
||
|
|
||
|
INCLUDE_REGEX = re.compile('# *include *"(.+)"')
|
||
|
|
||
|
IGNORED_INCLUDES = {
|
||
|
'compiler/translator/TranslatorVulkan.h',
|
||
|
'libANGLE/renderer/d3d/d3d11/winrt/NativeWindow11WinRT.h',
|
||
|
'libANGLE/renderer/gl/glx/DisplayGLX.h',
|
||
|
'libANGLE/renderer/gl/cgl/DisplayCGL.h',
|
||
|
'libANGLE/renderer/gl/egl/ozone/DisplayOzone.h',
|
||
|
'libANGLE/renderer/gl/egl/android/DisplayAndroid.h',
|
||
|
'libANGLE/renderer/vulkan/win32/DisplayVkWin32.h',
|
||
|
'libANGLE/renderer/vulkan/xcb/DisplayVkXcb.h',
|
||
|
}
|
||
|
|
||
|
REGISTERED_DEFINES = {
|
||
|
'ANGLE_ENABLE_D3D11': True,
|
||
|
'ANGLE_ENABLE_D3D9': True,
|
||
|
'ANGLE_ENABLE_DEBUG_ANNOTATIONS': True,
|
||
|
'ANGLE_ENABLE_NULL': False,
|
||
|
'ANGLE_ENABLE_OPENGL': False,
|
||
|
'ANGLE_ENABLE_OPENGL_NULL': False,
|
||
|
'ANGLE_ENABLE_ESSL': True,
|
||
|
'ANGLE_ENABLE_GLSL': True,
|
||
|
'ANGLE_ENABLE_HLSL': True,
|
||
|
'ANGLE_GENERATE_SHADER_DEBUG_INFO': True,
|
||
|
'ANGLE_IS_64_BIT_CPU': False,
|
||
|
'ANGLE_PRELOADED_D3DCOMPILER_MODULE_NAMES': False,
|
||
|
'CERT_CHAIN_PARA_HAS_EXTRA_FIELDS': False,
|
||
|
'CHROMIUM_BUILD': False,
|
||
|
'COMPONENT_BUILD': False,
|
||
|
'constexpr14': False,
|
||
|
'DYNAMIC_ANNOTATIONS_ENABLED': True,
|
||
|
'EGL_EGLEXT_PROTOTYPES': True,
|
||
|
'EGLAPI': True,
|
||
|
'FIELDTRIAL_TESTING_ENABLED': False,
|
||
|
'FULL_SAFE_BROWSING': False,
|
||
|
'GL_API': True,
|
||
|
'GL_APICALL': True,
|
||
|
'GL_GLEXT_PROTOTYPES': True,
|
||
|
'GPU_INFO_USE_SETUPAPI': True,
|
||
|
'LIBANGLE_IMPLEMENTATION': True,
|
||
|
'LIBEGL_IMPLEMENTATION': True,
|
||
|
'LIBGLESV2_IMPLEMENTATION': True,
|
||
|
'NOMINMAX': True,
|
||
|
'NO_TCMALLOC': False,
|
||
|
|
||
|
# Else: gfx/angle/checkout/src/libANGLE/renderer/d3d/d3d11/win32/NativeWindow11Win32.cpp(89): error C2787: 'IDCompositionDevice': no GUID has been associated with this object
|
||
|
'NTDDI_VERSION': True,
|
||
|
|
||
|
'PSAPI_VERSION': False,
|
||
|
'SAFE_BROWSING_CSD': False,
|
||
|
'SAFE_BROWSING_DB_LOCAL': False,
|
||
|
'UNICODE': True,
|
||
|
'USE_AURA': False,
|
||
|
'V8_DEPRECATION_WARNINGS': False,
|
||
|
'WIN32': False,
|
||
|
'WIN32_LEAN_AND_MEAN': False,
|
||
|
'WINVER': False,
|
||
|
'WTF_USE_DYNAMIC_ANNOTATIONS': False,
|
||
|
'_ATL_NO_OPENGL': True,
|
||
|
'_CRT_RAND_S': True,
|
||
|
'_CRT_SECURE_NO_DEPRECATE': True,
|
||
|
'_DEBUG': False,
|
||
|
'_HAS_EXCEPTIONS': True,
|
||
|
'_SCL_SECURE_NO_DEPRECATE': True,
|
||
|
'_SECURE_ATL': True,
|
||
|
'_UNICODE': True,
|
||
|
'_USING_V110_SDK71_': False,
|
||
|
'_WIN32_WINNT': False,
|
||
|
'_WINDOWS': False,
|
||
|
'__STD_C': False,
|
||
|
}
|
||
|
|
||
|
def is_used_file_name(x):
|
||
|
(_, e) = x.rsplit('.', 1)
|
||
|
if e in ['h', 'cc', 'cpp', 'inl']:
|
||
|
return True
|
||
|
return False
|
||
|
|
||
|
def check_includes(target_name, cur, avail_files, include_dirs):
|
||
|
assert cur.startswith('//'), cur
|
||
|
cur = cur[2:]
|
||
|
|
||
|
if not is_used_file_name(cur):
|
||
|
return
|
||
|
|
||
|
(cur_dir, _) = os.path.split(cur)
|
||
|
include_dirs = [
|
||
|
'//',
|
||
|
'//' + cur_dir + '/',
|
||
|
] + list(include_dirs)
|
||
|
|
||
|
def is_valid_include(inc):
|
||
|
if inc in IGNORED_INCLUDES:
|
||
|
return True
|
||
|
|
||
|
for inc_dir in include_dirs:
|
||
|
inc_path = inc_dir + inc
|
||
|
if inc_path in avail_files:
|
||
|
return True
|
||
|
|
||
|
return False
|
||
|
|
||
|
line_num = 0
|
||
|
with open(cur, 'rb') as f:
|
||
|
for line in f:
|
||
|
line = line.decode()
|
||
|
line_num += 1
|
||
|
m = INCLUDE_REGEX.match(line)
|
||
|
if not m:
|
||
|
continue
|
||
|
inc = m.group(1)
|
||
|
if not is_valid_include(inc):
|
||
|
print('Warning in {}: {}:{}: Invalid include: {}'.format(target_name, cur, line_num, inc))
|
||
|
|
||
|
|
||
|
total_used_files = set()
|
||
|
|
||
|
def export_target(root):
|
||
|
name = root['target_name']
|
||
|
assert name.startswith('//:')
|
||
|
name = name[3:]
|
||
|
|
||
|
accum_desc = dict(root)
|
||
|
use_libs = set()
|
||
|
|
||
|
checkable_sources = set()
|
||
|
|
||
|
target_includable_files = set()
|
||
|
|
||
|
def pre(cur):
|
||
|
assert cur.get('allow_circular_includes_from', []) == [], cur['target_name']
|
||
|
children = cur['dep_children']
|
||
|
|
||
|
if cur != root:
|
||
|
if cur['type'] in ['shared_library', 'static_library']:
|
||
|
children = []
|
||
|
|
||
|
name = cur['target_name']
|
||
|
assert name.startswith('//:')
|
||
|
name = name[3:]
|
||
|
use_libs.add(name)
|
||
|
elif cur['type'] in ('source_set', 'group', 'action'):
|
||
|
for (k,v) in cur.items():
|
||
|
if type(v) == list:
|
||
|
vs = accum_desc.setdefault(k, [])
|
||
|
vs += v
|
||
|
else:
|
||
|
accum_desc.setdefault(k, v)
|
||
|
|
||
|
return (children,)
|
||
|
|
||
|
traverse([root], pre, desc_key)
|
||
|
|
||
|
# Check includes, since `gn check` seems to be broken
|
||
|
includable = set(root['includable_sources'] + root['includable_includes'])
|
||
|
for x in includable:
|
||
|
check_includes(name, x, includable, set(accum_desc['all_include_dirs']))
|
||
|
|
||
|
total_used_files.update(includable, root['sources']) # With 'sources' to get rc/defs.
|
||
|
|
||
|
# --
|
||
|
|
||
|
target_dir = Path(targets_dir, name)
|
||
|
target_dir.mkdir(exist_ok=True)
|
||
|
|
||
|
lines = COMMON_HEADER[:]
|
||
|
|
||
|
lines.append('')
|
||
|
for x in sorted(set(accum_desc['defines'])):
|
||
|
try:
|
||
|
(k, v) = x.split('=', 1)
|
||
|
v = "'{}'".format(v)
|
||
|
except ValueError:
|
||
|
(k, v) = (x, 'True')
|
||
|
try:
|
||
|
line = "DEFINES['{}'] = {}".format(k, v)
|
||
|
if REGISTERED_DEFINES[k] == False:
|
||
|
line = '#' + line
|
||
|
lines.append(line)
|
||
|
except KeyError:
|
||
|
print('[{}] Unrecognized define: {}'.format(name, k))
|
||
|
|
||
|
cxxflags = set(accum_desc['cflags'] + accum_desc['cflags_cc'])
|
||
|
|
||
|
def fixup_paths(listt):
|
||
|
for x in set(listt):
|
||
|
assert x.startswith('//'), x
|
||
|
yield '../../checkout/' + x[2:]
|
||
|
|
||
|
sources = []
|
||
|
sources_by_os_arch = {}
|
||
|
extras = dict()
|
||
|
for x in fixup_paths(accum_desc['sources']):
|
||
|
(b, e) = x.rsplit('.', 1)
|
||
|
if e in ['h', 'y', 'l', 'inl']:
|
||
|
continue
|
||
|
elif e in ['cpp', 'cc']:
|
||
|
if b.endswith('_win'):
|
||
|
sources_by_os_arch.setdefault('WINNT', []).append(x)
|
||
|
elif b.endswith('_linux'):
|
||
|
sources_by_os_arch.setdefault('Linux', []).append(x)
|
||
|
elif b.endswith('_mac'):
|
||
|
sources_by_os_arch.setdefault('Darwin', []).append(x)
|
||
|
else:
|
||
|
sources.append(x)
|
||
|
continue
|
||
|
elif e == 'rc':
|
||
|
assert 'RCFILE' not in extras
|
||
|
extras['RCFILE'] = "'{}'".format(x)
|
||
|
continue
|
||
|
elif e == 'def':
|
||
|
assert 'DEFFILE' not in extras
|
||
|
extras['DEFFILE'] = "SRCDIR + '/{}'".format(x)
|
||
|
continue
|
||
|
else:
|
||
|
assert False, x
|
||
|
|
||
|
ldflags = filter(lambda x: not x.startswith('/DEF:'), set(accum_desc['ldflags']))
|
||
|
os_libs = list(map( lambda x: x[:-len('.lib')], set(accum_desc.get('libs', [])) ))
|
||
|
|
||
|
def append_arr_commented(dest, name, src):
|
||
|
lines = []
|
||
|
append_arr(lines, name, src)
|
||
|
lines = map(lambda x: '#' + x, lines)
|
||
|
dest += lines
|
||
|
|
||
|
append_arr(lines, 'LOCAL_INCLUDES', fixup_paths(accum_desc['include_dirs']))
|
||
|
append_arr_commented(lines, 'CXXFLAGS', cxxflags)
|
||
|
append_arr(lines, 'SOURCES', sources)
|
||
|
|
||
|
for (os_arch,v) in sorted_items(sources_by_os_arch):
|
||
|
lines += [
|
||
|
"if CONFIG['OS_ARCH'] == '{}':".format(os_arch),
|
||
|
" SOURCES += [",
|
||
|
]
|
||
|
lines += ("{}'{}',".format(' '*8, x) for x in sorted(set(v)))
|
||
|
lines += [" ]"]
|
||
|
|
||
|
append_arr(lines, 'USE_LIBS', use_libs)
|
||
|
append_arr(lines, 'DIRS', ['../' + x for x in use_libs])
|
||
|
append_arr(lines, 'OS_LIBS', os_libs)
|
||
|
append_arr_commented(lines, 'LDFLAGS', ldflags)
|
||
|
|
||
|
for (k,v) in sorted(extras.items()):
|
||
|
lines.append('{} = {}'.format(k, v))
|
||
|
|
||
|
lib_type = root['type']
|
||
|
if lib_type == 'shared_library':
|
||
|
lines.append("\nGeckoSharedLibrary('{}', linkage=None)".format(name))
|
||
|
elif lib_type == 'static_library':
|
||
|
lines.append("\nLibrary('{}')".format(name))
|
||
|
else:
|
||
|
assert False, lib_type
|
||
|
|
||
|
# Write it out
|
||
|
|
||
|
mozbuild = Path(target_dir, 'moz.build')
|
||
|
print_now(' Writing {}'.format(mozbuild))
|
||
|
with mozbuild.open('w', newline='\n') as f:
|
||
|
for x in lines:
|
||
|
f.write(x + '\n')
|
||
|
|
||
|
return
|
||
|
|
||
|
|
||
|
for x in real_targets:
|
||
|
export_target(x)
|
||
|
|
||
|
# Copy all the files
|
||
|
|
||
|
print_now('Migrate files')
|
||
|
|
||
|
total_used_files = sorted(set(total_used_files))
|
||
|
i = 0
|
||
|
for x in total_used_files:
|
||
|
i += 1
|
||
|
sys.stdout.write('\r Copying {}/{}'.format(i, len(total_used_files)))
|
||
|
sys.stdout.flush()
|
||
|
assert x.startswith('//'), x
|
||
|
x = x[2:]
|
||
|
|
||
|
src = Path(REPO_DIR, x)
|
||
|
dest = Path(checkout_dir, x)
|
||
|
dest.parent.mkdir(parents=True, exist_ok=True)
|
||
|
data = src.read_bytes()
|
||
|
data = data.replace(b'\r\n', b'\n')
|
||
|
dest.write_bytes(data)
|
||
|
|
||
|
print('\nDone')
|