gecko-dev/gfx/angle/update-angle.py

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

594 строки
16 KiB
Python
Исходник Обычный вид История

#! /usr/bin/env python3
assert __name__ == '__main__'
'''
To update ANGLE in Gecko, use Windows with git-bash, and setup depot_tools, python2, and
python3. Because depot_tools expects `python` to be `python2` (shame!), python2 must come
before python3 in your path.
Upstream: https://chromium.googlesource.com/angle/angle
Our repo: https://github.com/mozilla/angle
It has branches like 'firefox-60' which is the branch we use for pulling into
Gecko with this script.
This script leaves a record of the merge-base and cherry-picks that we pull into
Gecko. (gfx/angle/cherries.log)
ANGLE<->Chrome version mappings are here: https://omahaproxy.appspot.com/
An easy choice is to grab Chrome's Beta's ANGLE branch.
## Usage
Prepare your env:
~~~
export PATH="$PATH:/path/to/depot_tools"
~~~
If this is a new repo, don't forget:
~~~
# In the angle repo:
./scripts/bootstrap.py
gclient sync
~~~
Update: (in the angle repo)
~~~
# In the angle repo:
/path/to/gecko/gfx/angle/update-angle.py origin/chromium/XXXX
git push moz # Push the firefox-XX branch to github.com/mozilla/angle
~~~~
'''
import json
import os
from pathlib import *
import re
import shutil
import subprocess
import sys
from vendor_from_git import *
REPO_DIR = Path.cwd()
GECKO_ANGLE_DIR = Path(__file__).parent
OUT_DIR = 'out'
COMMON_HEADER = [
'# Generated by update-angle.py',
'',
"include('../../moz.build.common')",
]
VENDOR_PREREQ_TARGETS = [
'//:commit_id', # Generate 'commit.h'.
]
ROOTS = ['//:translator', '//:libEGL', '//:libGLESv2']
DRY_RUN = '--dry' in sys.argv
ACTION_PREFIX = ''
if DRY_RUN:
ACTION_PREFIX = '(not) '
GN_ENV = dict(os.environ)
GN_ENV['DEPOT_TOOLS_WIN_TOOLCHAIN'] = '0'
(MERGE_BASE_ORIGIN, ) = sys.argv[1:] # Not always 'origin'!
# --------------------------------------
def sorted_items(x):
for k in sorted(x.keys()):
yield (k, x[k])
def collapse_dotdots(path):
split = path.split('/')
ret = []
for x in split:
if x == '..' and ret:
ret.pop()
continue
ret.append(x)
continue
return '/'.join(ret)
def traverse(roots, pre_recurse_func, key_func=id):
visited = set()
def recurse(cur):
key = key_func(cur)
if key in visited:
return
visited.add(key)
t = pre_recurse_func(cur)
post_recurse_func = None
try:
(children, post_recurse_func) = t
except ValueError:
(children,) = t
for x in children:
recurse(x)
if post_recurse_func:
post_recurse_func(cur)
return
for x in roots:
recurse(x)
return
# --------------------------------------
if not DRY_RUN:
record_cherry_picks(GECKO_ANGLE_DIR, MERGE_BASE_ORIGIN)
# --
print_now('Importing graph')
shutil.rmtree(OUT_DIR, True)
run_checked('gn', 'gen', OUT_DIR, shell=True, env=GN_ENV)
GN_ARGS = '''
# Build arguments go here.
# See "gn args <out_dir> --list" for available build arguments.
is_clang = false
angle_enable_gl = false
angle_enable_gl_null = false
angle_enable_null = false
angle_enable_vulkan = false
'''[1:]
with open(OUT_DIR + '/args.gn', 'wb') as f:
f.write(GN_ARGS.encode())
# --
p = run_checked('gn', 'desc', '--format=json', OUT_DIR, '*', stdout=subprocess.PIPE,
shell=True, env=GN_ENV)
print_now('Processing graph')
descs = json.loads(p.stdout.decode())
# HACKHACKHACK
common = descs['//:angle_common']
common['sources'] += [
'//src/common/system_utils_linux.cpp',
'//src/common/system_utils_mac.cpp',
]
# --
for (k, v) in descs.items():
for (k2, v2) in v.items():
if type(v2) == list:
v[k2] = tuple(v2) # Freeze lists
v['target_name'] = k
v['dep_nodes'] = tuple([descs[x] for x in v['deps']])
assert v['public'] == '*', k
# --
# Ready to traverse
ROOTS = [descs[k] for k in ROOTS]
# Gather real targets:
real_targets = []
def gather_real_targets(cur):
print_now(' ' + cur['type'], cur['target_name'])
if cur['type'] in ['shared_library', 'static_library']:
real_targets.append(cur)
def post(x):
x['sources_with_deps'] = x.get('sources', ())
x['include_dirs_with_deps'] = x.get('include_dirs', ())
for y in x['dep_nodes']:
x['sources_with_deps'] += y['sources_with_deps']
x['include_dirs_with_deps'] += y['include_dirs_with_deps']
return (cur['dep_nodes'], post)
traverse(ROOTS, gather_real_targets)
# --
def sortedi(x):
return sorted(x, key=str.lower)
def append_arr(dest, name, vals, indent=0):
if not vals:
return
dest.append('{}{} += ['.format(' '*4*indent, name))
for x in sortedi(vals):
dest.append("{}'{}',".format(' '*4*(indent+1), x))
dest.append('{}]'.format(' '*4*indent))
dest.append('')
return
INCLUDE_REGEX = re.compile('# *include +([<"])([^>"]+)[>"]')
IGNORED_INCLUDES = {
'compiler/translator/TranslatorVulkan.h',
'libANGLE/renderer/d3d/d3d11/winrt/NativeWindow11WinRT.h',
'libANGLE/renderer/gl/glx/DisplayGLX.h',
'libANGLE/renderer/gl/cgl/DisplayCGL.h',
'libANGLE/renderer/gl/egl/ozone/DisplayOzone.h',
'libANGLE/renderer/gl/egl/android/DisplayAndroid.h',
'libANGLE/renderer/gl/wgl/DisplayWGL.h',
'libANGLE/renderer/null/DisplayNULL.h',
'libANGLE/renderer/vulkan/android/DisplayVkAndroid.h',
'libANGLE/renderer/vulkan/win32/DisplayVkWin32.h',
'libANGLE/renderer/vulkan/xcb/DisplayVkXcb.h',
'kernel/image.h',
}
IGNORED_INCLUDE_PREFIXES = {
'android/',
'Carbon/',
'CoreFoundation/',
'CoreServices/',
'IOSurface/',
'mach/',
'mach-o/',
'OpenGL/',
'pci/',
'sys/',
'wrl/',
'X11/',
}
REGISTERED_DEFINES = {
'ANGLE_ENABLE_D3D11': True,
'ANGLE_ENABLE_D3D9': True,
'ANGLE_ENABLE_DEBUG_ANNOTATIONS': True,
'ANGLE_ENABLE_NULL': False,
'ANGLE_ENABLE_OPENGL': False,
'ANGLE_ENABLE_OPENGL_NULL': False,
'ANGLE_ENABLE_ESSL': True,
'ANGLE_ENABLE_GLSL': True,
'ANGLE_ENABLE_HLSL': True,
'ANGLE_GENERATE_SHADER_DEBUG_INFO': True,
'ANGLE_IS_64_BIT_CPU': False,
'ANGLE_PRELOADED_D3DCOMPILER_MODULE_NAMES': False,
'CERT_CHAIN_PARA_HAS_EXTRA_FIELDS': False,
'CHROMIUM_BUILD': False,
'COMPONENT_BUILD': False,
'constexpr14': False,
'DYNAMIC_ANNOTATIONS_ENABLED': True,
'EGL_EGLEXT_PROTOTYPES': True,
'EGLAPI': True,
'FIELDTRIAL_TESTING_ENABLED': False,
'FULL_SAFE_BROWSING': False,
'GL_API': True,
'GL_APICALL': True,
'GL_GLEXT_PROTOTYPES': True,
'GPU_INFO_USE_SETUPAPI': True,
'LIBANGLE_IMPLEMENTATION': True,
'LIBEGL_IMPLEMENTATION': True,
'LIBGLESV2_IMPLEMENTATION': True,
'NOMINMAX': True,
'NO_TCMALLOC': False,
# Else: gfx/angle/checkout/src/libANGLE/renderer/d3d/d3d11/win32/NativeWindow11Win32.cpp(89): error C2787: 'IDCompositionDevice': no GUID has been associated with this object
'NTDDI_VERSION': True,
'PSAPI_VERSION': False,
'SAFE_BROWSING_CSD': False,
'SAFE_BROWSING_DB_LOCAL': False,
'UNICODE': True,
'USE_AURA': False,
'V8_DEPRECATION_WARNINGS': False,
'WIN32': False,
'WIN32_LEAN_AND_MEAN': False,
'WINAPI_FAMILY': False,
'WINVER': False,
'WTF_USE_DYNAMIC_ANNOTATIONS': False,
'_ATL_NO_OPENGL': True,
'_CRT_RAND_S': True,
'_CRT_SECURE_NO_DEPRECATE': True,
'_DEBUG': False,
'_HAS_EXCEPTIONS': True,
'_HAS_ITERATOR_DEBUGGING': False,
'_SCL_SECURE_NO_DEPRECATE': True,
'_SECURE_ATL': True,
'_UNICODE': True,
'_USING_V110_SDK71_': False,
'_WIN32_WINNT': False,
'_WINDOWS': False,
'__STD_C': False,
}
SOURCE_FILE_EXTS = frozenset(['h', 'hpp', 'inc', 'inl', 'c', 'cc', 'cpp'])
def is_source_file(x):
e = x.split('.')[-1]
return e in SOURCE_FILE_EXTS
def assert_valid_includes(target_name, cur, avail_files, include_dirs):
assert cur.startswith('//'), cur
cur = PurePosixPath(cur[2:])
(cur_dir, _) = os.path.split(cur)
include_dirs = [
'//',
'//' + cur_dir + '/',
] + list(include_dirs)
def assert_one(inc, line_num):
attempts = []
for inc_dir in include_dirs:
assert inc_dir[-1] == '/'
inc_path = inc_dir + inc
inc_path = collapse_dotdots(inc_path)
attempts.append(inc_path)
if inc_path in avail_files:
return
print('Warning in {}: {}:{}: Invalid include: {}'.format(target_name, cur, line_num, inc))
print(' Tried:')
for x in attempts:
print(' {}'.format(x))
#print()
#print(avail_files)
exit(1)
line_num = 0
with open(cur, 'rb') as f:
for line in f:
line = line.decode()
line_num += 1
m = INCLUDE_REGEX.match(line)
if not m:
continue
inc = m.group(2)
if inc in IGNORED_INCLUDES:
continue
if m.group(1) == '<':
if '/' not in inc:
continue
if any((inc.startswith(x) for x in IGNORED_INCLUDE_PREFIXES)):
continue
assert_one(inc, line_num)
total_used_files = set()
vendor_prereq_outputs = set()
# --
print_now('Running prerequisite actions')
for k in VENDOR_PREREQ_TARGETS:
assert k.startswith('//')
run_checked('ninja', '-C', OUT_DIR, k[2:])
vendor_prereq_outputs |= set(descs[k]['outputs'])
total_used_files |= vendor_prereq_outputs
# --
# Export our targets
print_now('Export targets')
# Clear our dest directories
targets_dir = Path(GECKO_ANGLE_DIR, 'targets')
checkout_dir = Path(GECKO_ANGLE_DIR, 'checkout')
if not DRY_RUN:
shutil.rmtree(targets_dir, True)
shutil.rmtree(checkout_dir, True)
targets_dir.mkdir(exist_ok=True)
checkout_dir.mkdir(exist_ok=True)
def export_target(root):
name = root['target_name']
assert name.startswith('//:')
name = name[3:]
used_files = root['sources_with_deps']
used_files = [x for x in used_files if x.split('.')[-1] not in ['dll']]
global total_used_files
total_used_files |= set(used_files)
# Check includes, since `gn check` seems to be broken.
includable = set(root['sources_with_deps']) | vendor_prereq_outputs
for x in includable:
if is_source_file(x):
assert_valid_includes(name, x, includable, root['include_dirs_with_deps'])
# Accumulate a combined dict for the target including non-lib deps.
accum_desc = dict(root)
del accum_desc['dep_nodes']
use_libs = set()
checkable_sources = set()
target_includable_files = set()
def pre(cur):
assert not cur.get('allow_circular_includes_from', ()), cur['target_name']
deps = cur['dep_nodes']
if cur != root:
if cur['type'] in ['shared_library', 'static_library']:
deps = []
name = cur['target_name']
assert name.startswith('//:')
name = name[3:]
use_libs.add(name)
elif cur['type'] in ('source_set', 'group', 'action'):
for (k,v) in cur.items():
if k in ('dep_nodes', 'sources_with_deps', 'include_dirs_with_deps'):
continue
if type(v) in (list, tuple):
vs = accum_desc.setdefault(k, ())
vs += v
else:
accum_desc.setdefault(k, v)
return (deps,)
traverse([root], pre)
# Create our manifest lines
target_dir = Path(targets_dir, name)
target_dir.mkdir(exist_ok=True)
lines = COMMON_HEADER[:]
lines.append('')
for x in sorted(set(accum_desc['defines'])):
try:
(k, v) = x.split('=', 1)
v = "'{}'".format(v)
except ValueError:
(k, v) = (x, 'True')
try:
line = "DEFINES['{}'] = {}".format(k, v)
if REGISTERED_DEFINES[k] == False:
line = '#' + line
lines.append(line)
except KeyError:
print('[{}] Unrecognized define: {}'.format(name, k))
lines.append('')
cxxflags = set(accum_desc['cflags'] + accum_desc['cflags_cc'])
def fixup_paths(listt):
for x in set(listt):
assert x.startswith('//'), x
yield '../../checkout/' + x[2:]
sources_by_config = {}
extras = dict()
for x in fixup_paths(accum_desc['sources']):
(b, e) = x.rsplit('.', 1)
if e in ['h', 'y', 'l', 'inc', 'inl']:
continue
elif e in ['cpp', 'cc', 'c']:
if b.endswith('_win'):
config = "CONFIG['OS_ARCH'] == 'WINNT'"
elif b.endswith('_linux'):
# Include these on BSDs too.
config = "CONFIG['OS_ARCH'] not in ('Darwin', 'WINNT')"
elif b.endswith('_mac'):
config = "CONFIG['OS_ARCH'] == 'Darwin'"
else:
config = '' # None can't compare against str.
sources_by_config.setdefault(config, []).append(x)
continue
elif e == 'rc':
assert 'RCFILE' not in extras
extras['RCFILE'] = "'{}'".format(x)
continue
else:
assert False, "Unhandled ext: {}".format(x)
ldflags = set(accum_desc['ldflags'])
DEF_PREFIX = '/DEF:'
for x in set(ldflags):
if x.startswith(DEF_PREFIX):
assert 'DEFFILE' not in extras
ldflags.remove(x)
def_path = OUT_DIR + '/' + x[len(DEF_PREFIX):]
def_path = '//' + collapse_dotdots(def_path)
total_used_files.add(def_path)
def_rel_path = list(fixup_paths([def_path]))[0]
extras['DEFFILE'] = "'{}'".format(def_rel_path)
os_libs = list(map( lambda x: x[:-len('.lib')], set(accum_desc.get('libs', [])) ))
def append_arr_commented(dest, name, src):
lines = []
append_arr(lines, name, src)
def comment(x):
if x:
x = '#' + x
return x
lines = map(comment, lines)
dest += lines
append_arr(lines, 'LOCAL_INCLUDES', fixup_paths(accum_desc['include_dirs']))
append_arr_commented(lines, 'CXXFLAGS', cxxflags)
for (config,v) in sorted_items(sources_by_config):
indent = 0
if config:
lines.append("if {}:".format(config))
indent = 1
append_arr(lines, 'SOURCES', v, indent=indent)
append_arr(lines, 'USE_LIBS', use_libs)
append_arr(lines, 'DIRS', ['../' + x for x in use_libs])
append_arr(lines, 'OS_LIBS', os_libs)
append_arr_commented(lines, 'LDFLAGS', ldflags)
for (k,v) in sorted(extras.items()):
lines.append('{} = {}'.format(k, v))
lib_type = root['type']
if lib_type == 'shared_library':
lines.append("GeckoSharedLibrary('{}', linkage=None)".format(name))
elif lib_type == 'static_library':
lines.append("Library('{}')".format(name))
else:
assert False, lib_type
# Write it out
mozbuild = Path(target_dir, 'moz.build')
print_now(' {}Writing {}'.format(ACTION_PREFIX, mozbuild))
if not DRY_RUN:
with mozbuild.open('w', newline='\n') as f:
for x in lines:
f.write(x + '\n')
return
for x in real_targets:
export_target(x)
# Copy all the files
print_now('Migrate files')
total_used_files = sorted(total_used_files)
i = 0
for x in total_used_files:
i += 1
sys.stdout.write('\r {}Copying {}/{}'.format(ACTION_PREFIX, i, len(total_used_files)))
sys.stdout.flush()
assert x.startswith('//'), x
x = x[2:]
src = Path(REPO_DIR, x)
dest = Path(checkout_dir, x)
if not DRY_RUN:
dest.parent.mkdir(parents=True, exist_ok=True)
data = src.read_bytes()
data = data.replace(b'\r\n', b'\n')
dest.write_bytes(data)
print('\nDone')