2018-04-21 02:11:55 +03:00
#! /usr/bin/env python3
2018-02-23 00:59:06 +03:00
assert __name__ == ' __main__ '
'''
2018-06-30 00:20:22 +03:00
To update ANGLE in Gecko , use Windows with git - bash , and setup depot_tools , python2 , and
python3 . Because depot_tools expects ` python ` to be ` python2 ` ( shame ! ) , python2 must come
before python3 in your path .
2018-02-23 00:59:06 +03:00
Upstream : https : / / chromium . googlesource . com / angle / angle
Our repo : https : / / github . com / mozilla / angle
It has branches like ' firefox-60 ' which is the branch we use for pulling into
Gecko with this script .
This script leaves a record of the merge - base and cherry - picks that we pull into
Gecko . ( gfx / angle / cherries . log )
2018-05-08 03:37:47 +03:00
ANGLE < - > Chrome version mappings are here : https : / / omahaproxy . appspot . com /
An easy choice is to grab Chrome ' s Beta ' s ANGLE branch .
2018-06-30 00:20:22 +03:00
## Usage
Prepare your env :
~ ~ ~
export PATH = " $PATH:/path/to/depot_tools "
~ ~ ~
If this is a new repo , don ' t forget:
~ ~ ~
# In the angle repo:
. / scripts / bootstrap . py
gclient sync
~ ~ ~
Update : ( in the angle repo )
~ ~ ~
# In the angle repo:
/ path / to / gecko / gfx / angle / update - angle . py origin / chromium / XXXX
git push moz # Push the firefox-XX branch to github.com/mozilla/angle
~ ~ ~ ~
2018-02-23 00:59:06 +03:00
'''
import json
import os
from pathlib import *
import re
import shutil
import subprocess
import sys
2018-05-08 03:37:47 +03:00
from vendor_from_git import *
2018-02-23 00:59:06 +03:00
REPO_DIR = Path . cwd ( )
GECKO_ANGLE_DIR = Path ( __file__ ) . parent
OUT_DIR = ' out '
COMMON_HEADER = [
' # Generated by update-angle.py ' ,
' ' ,
" include( ' ../../moz.build.common ' ) " ,
]
2018-06-30 00:20:22 +03:00
VENDOR_PREREQ_TARGETS = [
' //:commit_id ' , # Generate 'commit.h'.
]
ROOTS = [ ' //:translator ' , ' //:libEGL ' , ' //:libGLESv2 ' ]
DRY_RUN = ' --dry ' in sys . argv
ACTION_PREFIX = ' '
if DRY_RUN :
ACTION_PREFIX = ' (not) '
2018-09-26 04:03:43 +03:00
GN_ENV = dict ( os . environ )
GN_ENV [ ' DEPOT_TOOLS_WIN_TOOLCHAIN ' ] = ' 0 '
2018-10-09 09:06:12 +03:00
( MERGE_BASE_ORIGIN , ) = sys . argv [ 1 : ] # Not always 'origin'!
2018-06-30 00:20:22 +03:00
# --------------------------------------
2018-02-23 00:59:06 +03:00
def sorted_items ( x ) :
for k in sorted ( x . keys ( ) ) :
yield ( k , x [ k ] )
2018-06-30 00:20:22 +03:00
def collapse_dotdots ( path ) :
split = path . split ( ' / ' )
ret = [ ]
for x in split :
if x == ' .. ' and ret :
ret . pop ( )
continue
ret . append ( x )
continue
return ' / ' . join ( ret )
def traverse ( roots , pre_recurse_func , key_func = id ) :
visited = set ( )
def recurse ( cur ) :
key = key_func ( cur )
if key in visited :
return
visited . add ( key )
t = pre_recurse_func ( cur )
post_recurse_func = None
try :
( children , post_recurse_func ) = t
except ValueError :
( children , ) = t
for x in children :
recurse ( x )
if post_recurse_func :
post_recurse_func ( cur )
return
for x in roots :
recurse ( x )
return
# --------------------------------------
2018-02-23 00:59:06 +03:00
2018-06-30 00:20:22 +03:00
if not DRY_RUN :
2018-10-09 09:06:12 +03:00
record_cherry_picks ( GECKO_ANGLE_DIR , MERGE_BASE_ORIGIN )
2018-02-23 00:59:06 +03:00
# --
print_now ( ' Importing graph ' )
shutil . rmtree ( OUT_DIR , True )
2018-09-26 04:03:43 +03:00
run_checked ( ' gn ' , ' gen ' , OUT_DIR , shell = True , env = GN_ENV )
2018-02-23 00:59:06 +03:00
GN_ARGS = '''
# Build arguments go here.
# See "gn args <out_dir> --list" for available build arguments.
is_clang = false
angle_enable_gl = false
angle_enable_gl_null = false
angle_enable_null = false
angle_enable_vulkan = false
''' [1:]
with open ( OUT_DIR + ' /args.gn ' , ' wb ' ) as f :
f . write ( GN_ARGS . encode ( ) )
# --
2018-05-08 03:37:47 +03:00
p = run_checked ( ' gn ' , ' desc ' , ' --format=json ' , OUT_DIR , ' * ' , stdout = subprocess . PIPE ,
2018-09-26 04:03:43 +03:00
shell = True , env = GN_ENV )
2018-02-23 00:59:06 +03:00
print_now ( ' Processing graph ' )
descs = json . loads ( p . stdout . decode ( ) )
# HACKHACKHACK
common = descs [ ' //:angle_common ' ]
common [ ' sources ' ] + = [
' //src/common/system_utils_linux.cpp ' ,
' //src/common/system_utils_mac.cpp ' ,
]
# --
for ( k , v ) in descs . items ( ) :
2018-06-30 00:20:22 +03:00
for ( k2 , v2 ) in v . items ( ) :
if type ( v2 ) == list :
v [ k2 ] = tuple ( v2 ) # Freeze lists
2018-02-23 00:59:06 +03:00
v [ ' target_name ' ] = k
2018-06-30 00:20:22 +03:00
v [ ' dep_nodes ' ] = tuple ( [ descs [ x ] for x in v [ ' deps ' ] ] )
2018-02-23 00:59:06 +03:00
assert v [ ' public ' ] == ' * ' , k
# --
# Ready to traverse
2018-06-30 00:20:22 +03:00
ROOTS = [ descs [ k ] for k in ROOTS ]
2018-02-23 00:59:06 +03:00
# Gather real targets:
real_targets = [ ]
def gather_real_targets ( cur ) :
print_now ( ' ' + cur [ ' type ' ] , cur [ ' target_name ' ] )
if cur [ ' type ' ] in [ ' shared_library ' , ' static_library ' ] :
real_targets . append ( cur )
2018-06-30 00:20:22 +03:00
def post ( x ) :
x [ ' sources_with_deps ' ] = x . get ( ' sources ' , ( ) )
x [ ' include_dirs_with_deps ' ] = x . get ( ' include_dirs ' , ( ) )
for y in x [ ' dep_nodes ' ] :
x [ ' sources_with_deps ' ] + = y [ ' sources_with_deps ' ]
x [ ' include_dirs_with_deps ' ] + = y [ ' include_dirs_with_deps ' ]
2018-02-23 00:59:06 +03:00
2018-06-30 00:20:22 +03:00
return ( cur [ ' dep_nodes ' ] , post )
2018-02-23 00:59:06 +03:00
2018-06-30 00:20:22 +03:00
traverse ( ROOTS , gather_real_targets )
2018-02-23 00:59:06 +03:00
# --
def sortedi ( x ) :
return sorted ( x , key = str . lower )
2018-03-22 00:23:13 +03:00
def append_arr ( dest , name , vals , indent = 0 ) :
2018-02-23 00:59:06 +03:00
if not vals :
return
2018-03-22 00:23:13 +03:00
dest . append ( ' {} {} += [ ' . format ( ' ' * 4 * indent , name ) )
2018-02-23 00:59:06 +03:00
for x in sortedi ( vals ) :
2018-03-22 00:23:13 +03:00
dest . append ( " {} ' {} ' , " . format ( ' ' * 4 * ( indent + 1 ) , x ) )
dest . append ( ' {} ] ' . format ( ' ' * 4 * indent ) )
dest . append ( ' ' )
2018-02-23 00:59:06 +03:00
return
2018-06-30 00:20:22 +03:00
INCLUDE_REGEX = re . compile ( ' # *include +([< " ])([^> " ]+)[> " ] ' )
2018-02-23 00:59:06 +03:00
IGNORED_INCLUDES = {
' compiler/translator/TranslatorVulkan.h ' ,
' libANGLE/renderer/d3d/d3d11/winrt/NativeWindow11WinRT.h ' ,
' libANGLE/renderer/gl/glx/DisplayGLX.h ' ,
' libANGLE/renderer/gl/cgl/DisplayCGL.h ' ,
' libANGLE/renderer/gl/egl/ozone/DisplayOzone.h ' ,
' libANGLE/renderer/gl/egl/android/DisplayAndroid.h ' ,
2018-06-30 00:20:22 +03:00
' libANGLE/renderer/gl/wgl/DisplayWGL.h ' ,
' libANGLE/renderer/null/DisplayNULL.h ' ,
' libANGLE/renderer/vulkan/android/DisplayVkAndroid.h ' ,
2018-02-23 00:59:06 +03:00
' libANGLE/renderer/vulkan/win32/DisplayVkWin32.h ' ,
' libANGLE/renderer/vulkan/xcb/DisplayVkXcb.h ' ,
2019-01-22 22:21:28 +03:00
' kernel/image.h ' ,
2018-02-23 00:59:06 +03:00
}
2018-06-30 00:20:22 +03:00
IGNORED_INCLUDE_PREFIXES = {
' android/ ' ,
' Carbon/ ' ,
' CoreFoundation/ ' ,
' CoreServices/ ' ,
' IOSurface/ ' ,
' mach/ ' ,
' mach-o/ ' ,
' OpenGL/ ' ,
' pci/ ' ,
' sys/ ' ,
' wrl/ ' ,
' X11/ ' ,
}
2018-02-23 00:59:06 +03:00
REGISTERED_DEFINES = {
' ANGLE_ENABLE_D3D11 ' : True ,
' ANGLE_ENABLE_D3D9 ' : True ,
' ANGLE_ENABLE_DEBUG_ANNOTATIONS ' : True ,
' ANGLE_ENABLE_NULL ' : False ,
' ANGLE_ENABLE_OPENGL ' : False ,
' ANGLE_ENABLE_OPENGL_NULL ' : False ,
' ANGLE_ENABLE_ESSL ' : True ,
' ANGLE_ENABLE_GLSL ' : True ,
' ANGLE_ENABLE_HLSL ' : True ,
' ANGLE_GENERATE_SHADER_DEBUG_INFO ' : True ,
' ANGLE_IS_64_BIT_CPU ' : False ,
' ANGLE_PRELOADED_D3DCOMPILER_MODULE_NAMES ' : False ,
' CERT_CHAIN_PARA_HAS_EXTRA_FIELDS ' : False ,
' CHROMIUM_BUILD ' : False ,
' COMPONENT_BUILD ' : False ,
' constexpr14 ' : False ,
' DYNAMIC_ANNOTATIONS_ENABLED ' : True ,
' EGL_EGLEXT_PROTOTYPES ' : True ,
' EGLAPI ' : True ,
' FIELDTRIAL_TESTING_ENABLED ' : False ,
' FULL_SAFE_BROWSING ' : False ,
' GL_API ' : True ,
' GL_APICALL ' : True ,
' GL_GLEXT_PROTOTYPES ' : True ,
' GPU_INFO_USE_SETUPAPI ' : True ,
' LIBANGLE_IMPLEMENTATION ' : True ,
' LIBEGL_IMPLEMENTATION ' : True ,
' LIBGLESV2_IMPLEMENTATION ' : True ,
' NOMINMAX ' : True ,
' NO_TCMALLOC ' : False ,
# Else: gfx/angle/checkout/src/libANGLE/renderer/d3d/d3d11/win32/NativeWindow11Win32.cpp(89): error C2787: 'IDCompositionDevice': no GUID has been associated with this object
' NTDDI_VERSION ' : True ,
' PSAPI_VERSION ' : False ,
' SAFE_BROWSING_CSD ' : False ,
' SAFE_BROWSING_DB_LOCAL ' : False ,
' UNICODE ' : True ,
' USE_AURA ' : False ,
' V8_DEPRECATION_WARNINGS ' : False ,
' WIN32 ' : False ,
' WIN32_LEAN_AND_MEAN ' : False ,
2018-06-30 00:20:22 +03:00
' WINAPI_FAMILY ' : False ,
2018-02-23 00:59:06 +03:00
' WINVER ' : False ,
' WTF_USE_DYNAMIC_ANNOTATIONS ' : False ,
' _ATL_NO_OPENGL ' : True ,
' _CRT_RAND_S ' : True ,
' _CRT_SECURE_NO_DEPRECATE ' : True ,
' _DEBUG ' : False ,
' _HAS_EXCEPTIONS ' : True ,
2019-01-22 22:21:28 +03:00
' _HAS_ITERATOR_DEBUGGING ' : False ,
2018-02-23 00:59:06 +03:00
' _SCL_SECURE_NO_DEPRECATE ' : True ,
' _SECURE_ATL ' : True ,
' _UNICODE ' : True ,
' _USING_V110_SDK71_ ' : False ,
' _WIN32_WINNT ' : False ,
' _WINDOWS ' : False ,
' __STD_C ' : False ,
}
2018-06-30 00:20:22 +03:00
SOURCE_FILE_EXTS = frozenset ( [ ' h ' , ' hpp ' , ' inc ' , ' inl ' , ' c ' , ' cc ' , ' cpp ' ] )
2018-02-23 00:59:06 +03:00
2018-06-30 00:20:22 +03:00
def is_source_file ( x ) :
e = x . split ( ' . ' ) [ - 1 ]
return e in SOURCE_FILE_EXTS
2018-02-23 00:59:06 +03:00
2018-06-30 00:20:22 +03:00
def assert_valid_includes ( target_name , cur , avail_files , include_dirs ) :
assert cur . startswith ( ' // ' ) , cur
cur = PurePosixPath ( cur [ 2 : ] )
2018-02-23 00:59:06 +03:00
( cur_dir , _ ) = os . path . split ( cur )
include_dirs = [
' // ' ,
' // ' + cur_dir + ' / ' ,
] + list ( include_dirs )
2018-06-30 00:20:22 +03:00
def assert_one ( inc , line_num ) :
attempts = [ ]
2018-02-23 00:59:06 +03:00
for inc_dir in include_dirs :
2018-06-30 00:20:22 +03:00
assert inc_dir [ - 1 ] == ' / '
2018-02-23 00:59:06 +03:00
inc_path = inc_dir + inc
2018-06-30 00:20:22 +03:00
inc_path = collapse_dotdots ( inc_path )
attempts . append ( inc_path )
2018-02-23 00:59:06 +03:00
if inc_path in avail_files :
2018-06-30 00:20:22 +03:00
return
2018-02-23 00:59:06 +03:00
2018-06-30 00:20:22 +03:00
print ( ' Warning in {} : {} : {} : Invalid include: {} ' . format ( target_name , cur , line_num , inc ) )
print ( ' Tried: ' )
for x in attempts :
print ( ' {} ' . format ( x ) )
#print()
#print(avail_files)
exit ( 1 )
2018-02-23 00:59:06 +03:00
line_num = 0
with open ( cur , ' rb ' ) as f :
for line in f :
line = line . decode ( )
line_num + = 1
m = INCLUDE_REGEX . match ( line )
if not m :
continue
2018-06-30 00:20:22 +03:00
inc = m . group ( 2 )
if inc in IGNORED_INCLUDES :
continue
if m . group ( 1 ) == ' < ' :
if ' / ' not in inc :
continue
if any ( ( inc . startswith ( x ) for x in IGNORED_INCLUDE_PREFIXES ) ) :
continue
2018-02-23 00:59:06 +03:00
2018-06-30 00:20:22 +03:00
assert_one ( inc , line_num )
2018-02-23 00:59:06 +03:00
total_used_files = set ( )
2018-06-30 00:20:22 +03:00
vendor_prereq_outputs = set ( )
# --
print_now ( ' Running prerequisite actions ' )
for k in VENDOR_PREREQ_TARGETS :
assert k . startswith ( ' // ' )
run_checked ( ' ninja ' , ' -C ' , OUT_DIR , k [ 2 : ] )
vendor_prereq_outputs | = set ( descs [ k ] [ ' outputs ' ] )
total_used_files | = vendor_prereq_outputs
# --
# Export our targets
print_now ( ' Export targets ' )
# Clear our dest directories
targets_dir = Path ( GECKO_ANGLE_DIR , ' targets ' )
checkout_dir = Path ( GECKO_ANGLE_DIR , ' checkout ' )
if not DRY_RUN :
shutil . rmtree ( targets_dir , True )
shutil . rmtree ( checkout_dir , True )
targets_dir . mkdir ( exist_ok = True )
checkout_dir . mkdir ( exist_ok = True )
2018-02-23 00:59:06 +03:00
def export_target ( root ) :
name = root [ ' target_name ' ]
assert name . startswith ( ' //: ' )
name = name [ 3 : ]
2018-06-30 00:20:22 +03:00
used_files = root [ ' sources_with_deps ' ]
used_files = [ x for x in used_files if x . split ( ' . ' ) [ - 1 ] not in [ ' dll ' ] ]
global total_used_files
total_used_files | = set ( used_files )
# Check includes, since `gn check` seems to be broken.
includable = set ( root [ ' sources_with_deps ' ] ) | vendor_prereq_outputs
for x in includable :
if is_source_file ( x ) :
assert_valid_includes ( name , x , includable , root [ ' include_dirs_with_deps ' ] )
# Accumulate a combined dict for the target including non-lib deps.
2018-02-23 00:59:06 +03:00
accum_desc = dict ( root )
2018-06-30 00:20:22 +03:00
del accum_desc [ ' dep_nodes ' ]
2018-02-23 00:59:06 +03:00
use_libs = set ( )
checkable_sources = set ( )
target_includable_files = set ( )
def pre ( cur ) :
2018-06-30 00:20:22 +03:00
assert not cur . get ( ' allow_circular_includes_from ' , ( ) ) , cur [ ' target_name ' ]
deps = cur [ ' dep_nodes ' ]
2018-02-23 00:59:06 +03:00
if cur != root :
if cur [ ' type ' ] in [ ' shared_library ' , ' static_library ' ] :
2018-06-30 00:20:22 +03:00
deps = [ ]
2018-02-23 00:59:06 +03:00
name = cur [ ' target_name ' ]
assert name . startswith ( ' //: ' )
name = name [ 3 : ]
use_libs . add ( name )
elif cur [ ' type ' ] in ( ' source_set ' , ' group ' , ' action ' ) :
for ( k , v ) in cur . items ( ) :
2018-06-30 00:20:22 +03:00
if k in ( ' dep_nodes ' , ' sources_with_deps ' , ' include_dirs_with_deps ' ) :
continue
if type ( v ) in ( list , tuple ) :
vs = accum_desc . setdefault ( k , ( ) )
2018-02-23 00:59:06 +03:00
vs + = v
else :
accum_desc . setdefault ( k , v )
2018-06-30 00:20:22 +03:00
return ( deps , )
2018-02-23 00:59:06 +03:00
2018-06-30 00:20:22 +03:00
traverse ( [ root ] , pre )
2018-02-23 00:59:06 +03:00
2018-06-30 00:20:22 +03:00
# Create our manifest lines
2018-02-23 00:59:06 +03:00
target_dir = Path ( targets_dir , name )
target_dir . mkdir ( exist_ok = True )
lines = COMMON_HEADER [ : ]
lines . append ( ' ' )
2018-03-22 00:23:13 +03:00
2018-02-23 00:59:06 +03:00
for x in sorted ( set ( accum_desc [ ' defines ' ] ) ) :
try :
( k , v ) = x . split ( ' = ' , 1 )
v = " ' {} ' " . format ( v )
except ValueError :
( k , v ) = ( x , ' True ' )
try :
line = " DEFINES[ ' {} ' ] = {} " . format ( k , v )
if REGISTERED_DEFINES [ k ] == False :
line = ' # ' + line
lines . append ( line )
except KeyError :
print ( ' [ {} ] Unrecognized define: {} ' . format ( name , k ) )
2018-03-22 00:23:13 +03:00
lines . append ( ' ' )
2018-02-23 00:59:06 +03:00
cxxflags = set ( accum_desc [ ' cflags ' ] + accum_desc [ ' cflags_cc ' ] )
def fixup_paths ( listt ) :
for x in set ( listt ) :
assert x . startswith ( ' // ' ) , x
yield ' ../../checkout/ ' + x [ 2 : ]
2018-03-22 00:23:13 +03:00
sources_by_config = { }
2018-02-23 00:59:06 +03:00
extras = dict ( )
for x in fixup_paths ( accum_desc [ ' sources ' ] ) :
( b , e ) = x . rsplit ( ' . ' , 1 )
2018-06-30 00:20:22 +03:00
if e in [ ' h ' , ' y ' , ' l ' , ' inc ' , ' inl ' ] :
2018-02-23 00:59:06 +03:00
continue
2019-01-22 22:21:28 +03:00
elif e in [ ' cpp ' , ' cc ' , ' c ' ] :
2018-02-23 00:59:06 +03:00
if b . endswith ( ' _win ' ) :
2018-03-22 00:23:13 +03:00
config = " CONFIG[ ' OS_ARCH ' ] == ' WINNT ' "
2018-02-23 00:59:06 +03:00
elif b . endswith ( ' _linux ' ) :
2018-03-22 00:23:13 +03:00
# Include these on BSDs too.
config = " CONFIG[ ' OS_ARCH ' ] not in ( ' Darwin ' , ' WINNT ' ) "
2018-02-23 00:59:06 +03:00
elif b . endswith ( ' _mac ' ) :
2018-03-22 00:23:13 +03:00
config = " CONFIG[ ' OS_ARCH ' ] == ' Darwin ' "
2018-02-23 00:59:06 +03:00
else :
2018-03-22 00:23:13 +03:00
config = ' ' # None can't compare against str.
sources_by_config . setdefault ( config , [ ] ) . append ( x )
2018-02-23 00:59:06 +03:00
continue
elif e == ' rc ' :
assert ' RCFILE ' not in extras
extras [ ' RCFILE ' ] = " ' {} ' " . format ( x )
continue
else :
2018-06-30 00:20:22 +03:00
assert False , " Unhandled ext: {} " . format ( x )
ldflags = set ( accum_desc [ ' ldflags ' ] )
DEF_PREFIX = ' /DEF: '
for x in set ( ldflags ) :
if x . startswith ( DEF_PREFIX ) :
assert ' DEFFILE ' not in extras
ldflags . remove ( x )
def_path = OUT_DIR + ' / ' + x [ len ( DEF_PREFIX ) : ]
def_path = ' // ' + collapse_dotdots ( def_path )
total_used_files . add ( def_path )
def_rel_path = list ( fixup_paths ( [ def_path ] ) ) [ 0 ]
2018-09-18 22:50:19 +03:00
extras [ ' DEFFILE ' ] = " ' {} ' " . format ( def_rel_path )
2018-02-23 00:59:06 +03:00
os_libs = list ( map ( lambda x : x [ : - len ( ' .lib ' ) ] , set ( accum_desc . get ( ' libs ' , [ ] ) ) ) )
def append_arr_commented ( dest , name , src ) :
lines = [ ]
append_arr ( lines , name , src )
2018-03-22 00:23:13 +03:00
def comment ( x ) :
if x :
x = ' # ' + x
return x
lines = map ( comment , lines )
2018-02-23 00:59:06 +03:00
dest + = lines
append_arr ( lines , ' LOCAL_INCLUDES ' , fixup_paths ( accum_desc [ ' include_dirs ' ] ) )
append_arr_commented ( lines , ' CXXFLAGS ' , cxxflags )
2018-03-22 00:23:13 +03:00
for ( config , v ) in sorted_items ( sources_by_config ) :
indent = 0
if config :
lines . append ( " if {} : " . format ( config ) )
indent = 1
append_arr ( lines , ' SOURCES ' , v , indent = indent )
2018-02-23 00:59:06 +03:00
append_arr ( lines , ' USE_LIBS ' , use_libs )
append_arr ( lines , ' DIRS ' , [ ' ../ ' + x for x in use_libs ] )
append_arr ( lines , ' OS_LIBS ' , os_libs )
append_arr_commented ( lines , ' LDFLAGS ' , ldflags )
for ( k , v ) in sorted ( extras . items ( ) ) :
lines . append ( ' {} = {} ' . format ( k , v ) )
lib_type = root [ ' type ' ]
if lib_type == ' shared_library ' :
2018-03-22 00:23:13 +03:00
lines . append ( " GeckoSharedLibrary( ' {} ' , linkage=None) " . format ( name ) )
2018-02-23 00:59:06 +03:00
elif lib_type == ' static_library ' :
2018-03-22 00:23:13 +03:00
lines . append ( " Library( ' {} ' ) " . format ( name ) )
2018-02-23 00:59:06 +03:00
else :
assert False , lib_type
# Write it out
mozbuild = Path ( target_dir , ' moz.build ' )
2018-06-30 00:20:22 +03:00
print_now ( ' {} Writing {} ' . format ( ACTION_PREFIX , mozbuild ) )
if not DRY_RUN :
with mozbuild . open ( ' w ' , newline = ' \n ' ) as f :
for x in lines :
f . write ( x + ' \n ' )
2018-02-23 00:59:06 +03:00
return
for x in real_targets :
export_target ( x )
# Copy all the files
print_now ( ' Migrate files ' )
2018-06-30 00:20:22 +03:00
total_used_files = sorted ( total_used_files )
2018-02-23 00:59:06 +03:00
i = 0
for x in total_used_files :
i + = 1
2018-06-30 00:20:22 +03:00
sys . stdout . write ( ' \r {} Copying {} / {} ' . format ( ACTION_PREFIX , i , len ( total_used_files ) ) )
2018-02-23 00:59:06 +03:00
sys . stdout . flush ( )
assert x . startswith ( ' // ' ) , x
x = x [ 2 : ]
src = Path ( REPO_DIR , x )
dest = Path ( checkout_dir , x )
2018-06-30 00:20:22 +03:00
if not DRY_RUN :
dest . parent . mkdir ( parents = True , exist_ok = True )
data = src . read_bytes ( )
data = data . replace ( b ' \r \n ' , b ' \n ' )
dest . write_bytes ( data )
2018-02-23 00:59:06 +03:00
print ( ' \n Done ' )