2012-12-02 03:31:36 +04:00
#!/usr/bin/env python2
2010-09-10 07:03:24 +04:00
2011-10-13 22:40:46 +04:00
'''
2012-01-22 05:16:50 +04:00
You should normally never use this ! Use emcc instead .
2011-10-13 22:40:46 +04:00
2012-01-22 05:16:50 +04:00
This is a small wrapper script around the core JS compiler . This calls that
compiler with the settings given to it . It can also read data from C / C + +
header files ( so that the JS compiler can see the constants in those
headers , for the libc implementation in JS ) .
2011-10-13 22:40:46 +04:00
'''
2013-09-26 13:43:02 +04:00
import os , sys , json , optparse , subprocess , re , time , multiprocessing , string , logging
2010-09-10 07:03:24 +04:00
2013-02-02 02:07:23 +04:00
from tools import jsrun , cache as cache_module , tempfiles
2013-04-11 11:39:31 +04:00
from tools . response_file import read_response_file
2011-01-15 09:44:52 +03:00
2011-10-05 22:12:45 +04:00
__rootpath__ = os . path . abspath ( os . path . dirname ( __file__ ) )
2011-09-23 14:43:03 +04:00
def path_from_root ( * pathelems ) :
""" Returns the absolute path for which the given path elements are
relative to the emscripten root .
"""
2011-10-05 22:12:45 +04:00
return os . path . join ( __rootpath__ , * pathelems )
2011-07-07 11:38:35 +04:00
2013-04-02 13:24:14 +04:00
def get_configuration ( ) :
if hasattr ( get_configuration , ' configuration ' ) :
return get_configuration . configuration
2013-07-17 22:04:21 +04:00
from tools import shared
2013-04-02 13:24:14 +04:00
configuration = shared . Configuration ( environ = os . environ )
get_configuration . configuration = configuration
return configuration
2012-11-06 00:24:46 +04:00
def scan ( ll , settings ) :
# blockaddress(@main, %23)
blockaddrs = [ ]
for blockaddr in re . findall ( ' blockaddress \ ([^)]* \ ) ' , ll ) :
b = blockaddr . split ( ' ( ' ) [ 1 ] [ : - 1 ] . split ( ' , ' )
blockaddrs . append ( b )
if len ( blockaddrs ) > 0 :
settings [ ' NECESSARY_BLOCKADDRS ' ] = blockaddrs
2013-09-24 05:14:40 +04:00
NUM_CHUNKS_PER_CORE = 1.0
2012-11-07 04:29:33 +04:00
MIN_CHUNK_SIZE = 1024 * 1024
2012-11-07 21:59:35 +04:00
MAX_CHUNK_SIZE = float ( os . environ . get ( ' EMSCRIPT_MAX_CHUNK_SIZE ' ) or ' inf ' ) # configuring this is just for debugging purposes
2012-11-07 03:43:24 +04:00
2013-09-11 01:26:58 +04:00
STDERR_FILE = os . environ . get ( ' EMCC_STDERR_FILE ' )
if STDERR_FILE :
STDERR_FILE = os . path . abspath ( STDERR_FILE )
2013-09-26 13:43:02 +04:00
logging . info ( ' logging stderr in js compiler phase into %s ' % STDERR_FILE )
2013-09-11 01:26:58 +04:00
STDERR_FILE = open ( STDERR_FILE , ' w ' )
2013-03-10 21:18:45 +04:00
def process_funcs ( ( i , funcs , meta , settings_file , compiler , forwarded_file , libraries , compiler_engine , temp_files , DEBUG ) ) :
2013-07-09 11:50:09 +04:00
try :
funcs_file = temp_files . get ( ' .func_ %d .ll ' % i ) . name
f = open ( funcs_file , ' w ' )
f . write ( funcs )
funcs = None
f . write ( ' \n ' )
f . write ( meta )
f . close ( )
2013-09-26 02:40:06 +04:00
#print >> sys.stderr, 'running', str([settings_file, funcs_file, 'funcs', forwarded_file] + libraries).replace("'/", "'") # can use this in src/compiler_funcs.html arguments,
# # just copy temp dir to under this one
2013-07-09 11:50:09 +04:00
out = jsrun . run_js (
compiler ,
engine = compiler_engine ,
args = [ settings_file , funcs_file , ' funcs ' , forwarded_file ] + libraries ,
stdout = subprocess . PIPE ,
2013-09-11 01:26:58 +04:00
stderr = STDERR_FILE ,
2013-07-09 11:50:09 +04:00
cwd = path_from_root ( ' src ' ) )
except KeyboardInterrupt :
# Python 2.7 seems to lock up when a child process throws KeyboardInterrupt
raise Exception ( )
2013-09-26 13:43:02 +04:00
if DEBUG : logging . debug ( ' . ' )
2012-11-22 16:49:54 +04:00
return out
2012-11-07 01:51:48 +04:00
2013-02-02 02:54:00 +04:00
def emscript ( infile , settings , outfile , libraries = [ ] , compiler_engine = None ,
2013-03-05 07:50:42 +04:00
jcache = None , temp_files = None , DEBUG = None , DEBUG_CACHE = None ) :
2012-11-06 00:24:46 +04:00
""" Runs the emscripten LLVM-to-JS compiler. We parallelize as much as possible
2011-07-07 11:38:35 +04:00
Args :
infile : The path to the input LLVM assembly file .
2012-11-06 00:24:46 +04:00
settings : JSON - formatted settings that override the values
2011-07-07 11:38:35 +04:00
defined in src / settings . js .
outfile : The file where the output is written .
"""
2012-11-06 00:24:46 +04:00
compiler = path_from_root ( ' src ' , ' compiler.js ' )
2012-11-06 22:46:04 +04:00
# Parallelization: We run 3 phases:
2012-11-06 00:24:46 +04:00
# 1 aka 'pre' : Process types and metadata and so forth, and generate the preamble.
# 2 aka 'funcs': Process functions. We can parallelize this, working on each function independently.
# 3 aka 'post' : Process globals, generate postamble and finishing touches.
2013-09-26 13:43:02 +04:00
if DEBUG : logging . debug ( ' emscript: ll=>js ' )
2012-11-06 22:46:04 +04:00
2013-01-31 03:15:37 +04:00
if jcache : jcache . ensure ( )
2012-11-18 21:17:38 +04:00
2012-11-06 00:24:46 +04:00
# Pre-scan ll and alter settings as necessary
2012-11-06 22:46:04 +04:00
if DEBUG : t = time . time ( )
2012-11-06 00:24:46 +04:00
ll = open ( infile ) . read ( )
scan ( ll , settings )
2012-11-07 04:29:33 +04:00
total_ll_size = len ( ll )
2012-11-06 00:24:46 +04:00
ll = None # allow collection
2013-09-26 13:43:02 +04:00
if DEBUG : logging . debug ( ' emscript: scan took %s seconds ' % ( time . time ( ) - t ) )
2012-11-06 00:24:46 +04:00
# Split input into the relevant parts for each phase
2012-11-06 23:14:29 +04:00
pre = [ ]
2012-11-06 00:24:46 +04:00
funcs = [ ] # split up functions here, for parallelism later
2012-11-06 23:14:29 +04:00
meta = [ ] # needed by each function XXX
2012-11-06 00:24:46 +04:00
2012-11-06 22:46:04 +04:00
if DEBUG : t = time . time ( )
2012-11-06 00:24:46 +04:00
in_func = False
ll_lines = open ( infile ) . readlines ( )
2013-04-12 03:12:21 +04:00
curr_func = None
2012-11-06 00:24:46 +04:00
for line in ll_lines :
if in_func :
2013-04-12 03:12:21 +04:00
curr_func . append ( line )
2012-11-06 00:24:46 +04:00
if line . startswith ( ' } ' ) :
in_func = False
2013-04-12 03:12:21 +04:00
funcs . append ( ( curr_func [ 0 ] , ' ' . join ( curr_func ) ) ) # use the entire line as the identifier
# pre needs to know about all implemented functions, even for non-pre func
pre . append ( curr_func [ 0 ] )
pre . append ( line )
curr_func = None
2012-11-06 00:24:46 +04:00
else :
2012-11-19 18:21:02 +04:00
if line . startswith ( ' ; ' ) : continue
2012-11-06 00:24:46 +04:00
if line . startswith ( ' define ' ) :
in_func = True
2013-04-12 03:12:21 +04:00
curr_func = [ line ]
2012-11-06 00:24:46 +04:00
elif line . find ( ' = type { ' ) > 0 :
2012-11-06 23:14:29 +04:00
pre . append ( line ) # type
2012-11-06 00:24:46 +04:00
elif line . startswith ( ' ! ' ) :
2012-12-11 04:40:47 +04:00
if line . startswith ( ' !llvm.module ' ) : continue # we can ignore that
2012-11-06 23:14:29 +04:00
meta . append ( line ) # metadata
2012-11-06 00:24:46 +04:00
else :
2012-11-16 02:35:30 +04:00
pre . append ( line ) # pre needs it so we know about globals in pre and funcs. So emit globals there
2012-11-06 00:24:46 +04:00
ll_lines = None
2012-11-07 01:58:45 +04:00
meta = ' ' . join ( meta )
2013-09-26 13:43:02 +04:00
if DEBUG and len ( meta ) > 1024 * 1024 : logging . debug ( ' emscript warning: large amounts of metadata, will slow things down ' )
if DEBUG : logging . debug ( ' emscript: split took %s seconds ' % ( time . time ( ) - t ) )
2012-11-06 00:24:46 +04:00
2013-06-09 07:20:55 +04:00
if len ( funcs ) == 0 :
2013-09-26 13:43:02 +04:00
logging . error ( ' No functions to process. Make sure you prevented LLVM from eliminating them as dead (use EXPORTED_FUNCTIONS if necessary, see the FAQ) ' )
2013-06-09 07:20:55 +04:00
2012-11-06 23:14:29 +04:00
#if DEBUG:
2013-09-26 13:43:02 +04:00
# logging.debug('========= pre ================\n')
# logging.debug(''.join(pre))
# logging.debug('========== funcs ===============\n')
2012-11-06 23:14:29 +04:00
# for func in funcs:
2013-09-26 13:43:02 +04:00
# logging.debug('\n// ===\n\n', ''.join(func))
# logging.debug('=========================\n')
2012-11-06 00:24:46 +04:00
# Save settings to a file to work around v8 issue 1579
settings_file = temp_files . get ( ' .txt ' ) . name
2012-11-30 03:05:41 +04:00
def save_settings ( ) :
2013-01-01 04:52:04 +04:00
global settings_text
2013-02-27 01:22:22 +04:00
settings_text = json . dumps ( settings , sort_keys = True )
2012-11-30 03:05:41 +04:00
s = open ( settings_file , ' w ' )
s . write ( settings_text )
s . close ( )
save_settings ( )
2012-11-06 00:24:46 +04:00
2012-11-07 02:46:25 +04:00
# Phase 1 - pre
2012-11-06 22:46:04 +04:00
if DEBUG : t = time . time ( )
2012-11-06 23:14:29 +04:00
pre_file = temp_files . get ( ' .pre.ll ' ) . name
2012-11-18 21:00:27 +04:00
pre_input = ' ' . join ( pre ) + ' \n ' + meta
out = None
if jcache :
2012-11-18 21:17:38 +04:00
keys = [ pre_input , settings_text , ' , ' . join ( libraries ) ]
2013-01-31 03:15:37 +04:00
shortkey = jcache . get_shortkey ( keys )
2013-09-26 13:43:02 +04:00
if DEBUG_CACHE : logging . debug ( ' shortkey ' , shortkey )
2013-02-27 04:39:14 +04:00
2013-01-31 03:15:37 +04:00
out = jcache . get ( shortkey , keys )
2013-02-27 04:39:14 +04:00
2013-02-27 05:13:02 +04:00
if DEBUG_CACHE and not out :
2013-03-27 20:01:14 +04:00
dfpath = os . path . join ( get_configuration ( ) . TEMP_DIR , " ems_ " + shortkey )
2013-02-27 04:39:14 +04:00
dfp = open ( dfpath , ' w ' )
2013-07-09 03:15:21 +04:00
dfp . write ( pre_input )
dfp . write ( " \n \n ========================== settings_text \n \n " )
dfp . write ( settings_text )
dfp . write ( " \n \n ========================== libraries \n \n " )
2013-02-27 04:39:14 +04:00
dfp . write ( " \n " . join ( libraries ) )
dfp . close ( )
2013-09-26 13:43:02 +04:00
logging . debug ( ' cache miss, key data dumped to %s ' % dfpath )
2013-02-27 04:39:14 +04:00
2013-09-26 13:43:02 +04:00
if out and DEBUG : logging . debug ( ' loading pre from jcache ' )
2012-11-18 21:00:27 +04:00
if not out :
open ( pre_file , ' w ' ) . write ( pre_input )
2013-09-27 05:52:55 +04:00
#print >> sys.stderr, 'running', str([settings_file, pre_file, 'pre'] + libraries).replace("'/", "'") # see funcs
2013-09-11 01:26:58 +04:00
out = jsrun . run_js ( compiler , compiler_engine , [ settings_file , pre_file , ' pre ' ] + libraries , stdout = subprocess . PIPE , stderr = STDERR_FILE ,
2013-03-06 03:29:33 +04:00
cwd = path_from_root ( ' src ' ) )
2013-03-21 08:01:15 +04:00
assert ' //FORWARDED_DATA: ' in out , ' Did not receive forwarded data in pre output - process failed? '
2012-11-18 21:00:27 +04:00
if jcache :
2013-09-26 13:43:02 +04:00
if DEBUG : logging . debug ( ' saving pre to jcache ' )
2013-01-31 03:15:37 +04:00
jcache . set ( shortkey , keys , out )
2012-11-16 02:35:30 +04:00
pre , forwarded_data = out . split ( ' //FORWARDED_DATA: ' )
2012-11-06 00:24:46 +04:00
forwarded_file = temp_files . get ( ' .json ' ) . name
open ( forwarded_file , ' w ' ) . write ( forwarded_data )
2013-09-26 13:43:02 +04:00
if DEBUG : logging . debug ( ' emscript: phase 1 took %s seconds ' % ( time . time ( ) - t ) )
2012-11-06 00:24:46 +04:00
2013-08-16 05:18:50 +04:00
indexed_functions = set ( )
forwarded_json = json . loads ( forwarded_data )
for key in forwarded_json [ ' Functions ' ] [ ' indexedFunctions ' ] . iterkeys ( ) :
indexed_functions . add ( key )
2012-11-07 02:46:25 +04:00
# Phase 2 - func
2012-11-07 04:29:33 +04:00
2013-02-26 21:36:51 +04:00
cores = int ( os . environ . get ( ' EMCC_CORES ' ) or multiprocessing . cpu_count ( ) )
2012-11-07 04:29:33 +04:00
assert cores > = 1
2012-11-08 06:57:43 +04:00
if cores > 1 :
2013-02-26 19:16:15 +04:00
intended_num_chunks = int ( round ( cores * NUM_CHUNKS_PER_CORE ) )
2012-11-08 06:57:43 +04:00
chunk_size = max ( MIN_CHUNK_SIZE , total_ll_size / intended_num_chunks )
2013-09-24 05:14:40 +04:00
chunk_size + = 3 * len ( meta ) # keep ratio of lots of function code to meta (expensive to process, and done in each parallel task)
2012-11-08 06:57:43 +04:00
chunk_size = min ( MAX_CHUNK_SIZE , chunk_size )
else :
chunk_size = MAX_CHUNK_SIZE # if 1 core, just use the max chunk size
2012-11-07 04:29:33 +04:00
2012-11-06 22:46:04 +04:00
if DEBUG : t = time . time ( )
2012-11-30 03:05:41 +04:00
if settings . get ( ' ASM_JS ' ) :
settings [ ' EXPORTED_FUNCTIONS ' ] = forwarded_json [ ' EXPORTED_FUNCTIONS ' ]
save_settings ( )
2012-11-20 22:57:54 +04:00
2013-01-31 06:11:51 +04:00
chunks = cache_module . chunkify (
2013-01-31 05:16:12 +04:00
funcs , chunk_size ,
jcache . get_cachename ( ' emscript_files ' ) if jcache else None )
2012-11-21 00:32:22 +04:00
2013-09-27 05:52:55 +04:00
#sys.exit(1)
2013-09-26 02:40:06 +04:00
#chunks = [chunks[0]] # pick specific chunks for debugging/profiling
2013-03-13 08:38:06 +04:00
funcs = None
2012-11-21 13:52:58 +04:00
if jcache :
# load chunks from cache where we can # TODO: ignore small chunks
2012-11-21 17:53:45 +04:00
cached_outputs = [ ]
2012-11-21 13:52:58 +04:00
def load_from_cache ( chunk ) :
keys = [ settings_text , forwarded_data , chunk ]
2013-01-31 03:15:37 +04:00
shortkey = jcache . get_shortkey ( keys ) # TODO: share shortkeys with later code
out = jcache . get ( shortkey , keys ) # this is relatively expensive (pickling?)
2012-11-21 13:52:58 +04:00
if out :
2012-11-21 17:53:45 +04:00
cached_outputs . append ( out )
2012-11-21 13:52:58 +04:00
return False
return True
chunks = filter ( load_from_cache , chunks )
2012-11-21 17:53:45 +04:00
if len ( cached_outputs ) > 0 :
2013-09-26 13:43:02 +04:00
if out and DEBUG : logging . debug ( ' loading %d funcchunks from jcache ' % len ( cached_outputs ) )
2012-11-21 13:52:58 +04:00
else :
2012-11-21 17:53:45 +04:00
cached_outputs = [ ]
# TODO: minimize size of forwarded data from funcs to what we actually need
2012-11-21 13:52:58 +04:00
if len ( chunks ) > 0 :
2013-02-09 06:20:19 +04:00
if cores == 1 and total_ll_size < MAX_CHUNK_SIZE :
assert len ( chunks ) == 1 , ' no point in splitting up without multiple cores '
2013-09-26 13:43:02 +04:00
if DEBUG : logging . debug ( ' emscript: phase 2 working on %d chunks %s (intended chunk size: %.2f MB, meta: %.2f MB, forwarded: %.2f MB, total: %.2f MB) ' % ( len ( chunks ) , ( ' using %d cores ' % cores ) if len ( chunks ) > 1 else ' ' , chunk_size / ( 1024 * 1024. ) , len ( meta ) / ( 1024 * 1024. ) , len ( forwarded_data ) / ( 1024 * 1024. ) , total_ll_size / ( 1024 * 1024. ) ) )
2012-11-21 13:52:58 +04:00
2013-01-30 00:43:37 +04:00
commands = [
2013-09-26 02:40:06 +04:00
( i , chunk , meta , settings_file , compiler , forwarded_file , libraries , compiler_engine , # + ['--prof'],
temp_files , DEBUG )
2013-01-30 06:07:16 +04:00
for i , chunk in enumerate ( chunks )
]
2012-11-07 05:05:45 +04:00
2012-11-21 13:52:58 +04:00
if len ( chunks ) > 1 :
pool = multiprocessing . Pool ( processes = cores )
2013-02-02 00:36:20 +04:00
outputs = pool . map ( process_funcs , commands , chunksize = 1 )
2012-11-21 13:52:58 +04:00
elif len ( chunks ) == 1 :
outputs = [ process_funcs ( commands [ 0 ] ) ]
2013-03-13 08:38:06 +04:00
commands = None
2012-11-07 05:05:45 +04:00
else :
2012-11-21 13:52:58 +04:00
outputs = [ ]
if jcache :
# save chunks to cache
for i in range ( len ( chunks ) ) :
chunk = chunks [ i ]
keys = [ settings_text , forwarded_data , chunk ]
2013-01-31 03:15:37 +04:00
shortkey = jcache . get_shortkey ( keys )
jcache . set ( shortkey , keys , outputs [ i ] )
2013-09-26 13:43:02 +04:00
if out and DEBUG and len ( chunks ) > 0 : logging . debug ( ' saving %d funcchunks to jcache ' % len ( chunks ) )
2012-11-07 05:05:45 +04:00
2013-03-13 08:38:06 +04:00
chunks = None
2012-11-21 17:53:45 +04:00
if jcache : outputs + = cached_outputs # TODO: preserve order
2012-11-22 16:49:54 +04:00
outputs = [ output . split ( ' //FORWARDED_DATA: ' ) for output in outputs ]
2013-03-02 06:57:47 +04:00
for output in outputs :
2013-03-07 00:18:45 +04:00
assert len ( output ) == 2 , ' Did not receive forwarded data in an output - process failed? We only got: ' + output [ 0 ] [ - 3000 : ]
2012-11-22 16:49:54 +04:00
2013-09-26 13:43:02 +04:00
if DEBUG : logging . debug ( ' emscript: phase 2 took %s seconds ' % ( time . time ( ) - t ) )
2012-11-22 19:27:04 +04:00
if DEBUG : t = time . time ( )
2012-11-14 08:54:48 +04:00
# merge forwarded data
2012-11-24 17:27:17 +04:00
if settings . get ( ' ASM_JS ' ) :
all_exported_functions = set ( settings [ ' EXPORTED_FUNCTIONS ' ] ) # both asm.js and otherwise
2013-01-25 02:07:29 +04:00
for additional_export in settings [ ' DEFAULT_LIBRARY_FUNCS_TO_INCLUDE ' ] : # additional functions to export from asm, if they are implemented
all_exported_functions . add ( ' _ ' + additional_export )
2012-11-24 17:27:17 +04:00
exported_implemented_functions = set ( )
2012-11-14 08:54:48 +04:00
for func_js , curr_forwarded_data in outputs :
curr_forwarded_json = json . loads ( curr_forwarded_data )
2013-09-09 00:04:13 +04:00
forwarded_json [ ' Types ' ] [ ' hasInlineJS ' ] = forwarded_json [ ' Types ' ] [ ' hasInlineJS ' ] or curr_forwarded_json [ ' Types ' ] [ ' hasInlineJS ' ]
2012-11-14 08:54:48 +04:00
forwarded_json [ ' Types ' ] [ ' preciseI64MathUsed ' ] = forwarded_json [ ' Types ' ] [ ' preciseI64MathUsed ' ] or curr_forwarded_json [ ' Types ' ] [ ' preciseI64MathUsed ' ]
for key , value in curr_forwarded_json [ ' Functions ' ] [ ' blockAddresses ' ] . iteritems ( ) :
forwarded_json [ ' Functions ' ] [ ' blockAddresses ' ] [ key ] = value
for key in curr_forwarded_json [ ' Functions ' ] [ ' indexedFunctions ' ] . iterkeys ( ) :
indexed_functions . add ( key )
2012-11-24 17:27:17 +04:00
if settings . get ( ' ASM_JS ' ) :
2013-03-16 21:27:51 +04:00
export_bindings = settings [ ' EXPORT_BINDINGS ' ]
2013-07-25 02:22:36 +04:00
export_all = settings [ ' EXPORT_ALL ' ]
2012-11-24 17:27:17 +04:00
for key in curr_forwarded_json [ ' Functions ' ] [ ' implementedFunctions ' ] . iterkeys ( ) :
2013-07-25 02:22:36 +04:00
if key in all_exported_functions or export_all or ( export_bindings and key . startswith ( ' _emscripten_bind ' ) ) :
2013-03-16 21:27:51 +04:00
exported_implemented_functions . add ( key )
2012-11-29 04:52:51 +04:00
for key , value in curr_forwarded_json [ ' Functions ' ] [ ' unimplementedFunctions ' ] . iteritems ( ) :
forwarded_json [ ' Functions ' ] [ ' unimplementedFunctions ' ] [ key ] = value
2013-05-15 22:34:18 +04:00
for key , value in curr_forwarded_json [ ' Functions ' ] [ ' neededTables ' ] . iteritems ( ) :
forwarded_json [ ' Functions ' ] [ ' neededTables ' ] [ key ] = value
2012-11-14 08:54:48 +04:00
2013-01-17 00:22:34 +04:00
if settings . get ( ' ASM_JS ' ) :
parts = pre . split ( ' // ASM_LIBRARY FUNCTIONS \n ' )
if len ( parts ) > 1 :
pre = parts [ 0 ]
outputs . append ( [ parts [ 1 ] ] )
2013-03-31 05:31:43 +04:00
funcs_js = [ output [ 0 ] for output in outputs ]
2012-11-24 22:34:25 +04:00
2012-11-14 06:19:34 +04:00
outputs = None
2013-09-26 13:43:02 +04:00
if DEBUG : logging . debug ( ' emscript: phase 2b took %s seconds ' % ( time . time ( ) - t ) )
2012-11-07 01:51:48 +04:00
if DEBUG : t = time . time ( )
2012-11-07 05:05:45 +04:00
2012-11-07 01:51:48 +04:00
# calculations on merged forwarded data
forwarded_json [ ' Functions ' ] [ ' indexedFunctions ' ] = { }
2013-10-02 21:57:35 +04:00
i = settings [ ' FUNCTION_POINTER_ALIGNMENT ' ] # universal counter
if settings [ ' ASM_JS ' ] : i + = settings [ ' RESERVED_FUNCTION_POINTERS ' ] * settings [ ' FUNCTION_POINTER_ALIGNMENT ' ]
base_fp = i
2013-05-15 22:16:53 +04:00
table_counters = { } # table-specific counters
alias = settings [ ' ASM_JS ' ] and settings [ ' ALIASING_FUNCTION_POINTERS ' ]
sig = None
2012-11-07 01:51:48 +04:00
for indexed in indexed_functions :
2013-05-15 22:16:53 +04:00
if alias :
sig = forwarded_json [ ' Functions ' ] [ ' implementedFunctions ' ] . get ( indexed ) or forwarded_json [ ' Functions ' ] [ ' unimplementedFunctions ' ] . get ( indexed )
assert sig , indexed
if sig not in table_counters :
2013-10-02 21:57:35 +04:00
table_counters [ sig ] = base_fp
2013-05-15 22:16:53 +04:00
curr = table_counters [ sig ]
2013-10-02 21:57:35 +04:00
table_counters [ sig ] + = settings [ ' FUNCTION_POINTER_ALIGNMENT ' ]
2013-05-15 22:16:53 +04:00
else :
curr = i
2013-10-02 21:57:35 +04:00
i + = settings [ ' FUNCTION_POINTER_ALIGNMENT ' ]
2013-09-26 13:43:02 +04:00
#logging.debug('function indexing', indexed, curr, sig)
2013-05-15 22:16:53 +04:00
forwarded_json [ ' Functions ' ] [ ' indexedFunctions ' ] [ indexed ] = curr # make sure not to modify this python object later - we use it in indexize
2012-11-16 02:35:30 +04:00
2013-04-09 22:31:53 +04:00
def split_32 ( x ) :
x = int ( x )
return ' %d , %d , %d , %d ' % ( x & 255 , ( x >> 8 ) & 255 , ( x >> 16 ) & 255 , ( x >> 24 ) & 255 )
2012-11-07 05:15:28 +04:00
indexing = forwarded_json [ ' Functions ' ] [ ' indexedFunctions ' ]
2013-10-02 03:04:58 +04:00
def indexize_mem ( js ) :
return re . sub ( r " \" ? ' ? {{ FI_([ \ w \ d_$]+) }} ' ? \" ?,0,0,0 " , lambda m : split_32 ( indexing . get ( m . groups ( 0 ) [ 0 ] ) or 0 ) , js )
2012-11-07 01:51:48 +04:00
def indexize ( js ) :
2013-10-02 03:04:58 +04:00
return re . sub ( r " ' {{ FI_([ \ w \ d_$]+) }} ' " , lambda m : str ( indexing . get ( m . groups ( 0 ) [ 0 ] ) or 0 ) , js )
2012-11-16 02:35:30 +04:00
blockaddrs = forwarded_json [ ' Functions ' ] [ ' blockAddresses ' ]
2013-10-02 03:04:58 +04:00
def blockaddrsize_mem ( js ) :
return re . sub ( r ' " ? {{ { BA_([ \ w \ d_$]+) \ |([ \ w \ d_$]+) }}} " ?,0,0,0 ' , lambda m : split_32 ( blockaddrs [ m . groups ( 0 ) [ 0 ] ] [ m . groups ( 0 ) [ 1 ] ] ) , js )
2012-11-16 02:35:30 +04:00
def blockaddrsize ( js ) :
2013-10-02 03:04:58 +04:00
return re . sub ( r ' " ? {{ { BA_([ \ w \ d_$]+) \ |([ \ w \ d_$]+) }}} " ? ' , lambda m : str ( blockaddrs [ m . groups ( 0 ) [ 0 ] ] [ m . groups ( 0 ) [ 1 ] ] ) , js )
2012-11-16 02:35:30 +04:00
2013-10-02 03:04:58 +04:00
pre = blockaddrsize ( blockaddrsize_mem ( indexize ( indexize_mem ( pre ) ) ) )
2013-07-04 01:43:20 +04:00
if settings . get ( ' ASM_JS ' ) :
# move postsets into the asm module
class PostSets : js = ' '
def handle_post_sets ( m ) :
PostSets . js = m . group ( 0 )
return ' \n '
pre = re . sub ( r ' function runPostSets[^}]+} ' , handle_post_sets , pre )
2012-11-22 22:42:29 +04:00
#if DEBUG: outfile.write('// pre\n')
2013-07-04 01:43:20 +04:00
outfile . write ( pre )
2012-11-16 02:35:30 +04:00
pre = None
2012-11-22 22:42:29 +04:00
#if DEBUG: outfile.write('// funcs\n')
2012-11-14 06:19:34 +04:00
2012-11-07 01:51:48 +04:00
# forward
forwarded_data = json . dumps ( forwarded_json )
forwarded_file = temp_files . get ( ' .2.json ' ) . name
2012-11-16 02:35:30 +04:00
open ( forwarded_file , ' w ' ) . write ( indexize ( forwarded_data ) )
2013-09-26 13:43:02 +04:00
if DEBUG : logging . debug ( ' emscript: phase 2c took %s seconds ' % ( time . time ( ) - t ) )
2012-11-06 00:24:46 +04:00
2012-11-07 02:46:25 +04:00
# Phase 3 - post
2012-11-06 22:46:04 +04:00
if DEBUG : t = time . time ( )
2012-11-06 23:14:29 +04:00
post_file = temp_files . get ( ' .post.ll ' ) . name
2012-11-16 02:35:30 +04:00
open ( post_file , ' w ' ) . write ( ' \n ' ) # no input, just processing of forwarded data
2013-09-11 01:26:58 +04:00
out = jsrun . run_js ( compiler , compiler_engine , [ settings_file , post_file , ' post ' , forwarded_file ] + libraries , stdout = subprocess . PIPE , stderr = STDERR_FILE ,
2013-03-06 03:29:33 +04:00
cwd = path_from_root ( ' src ' ) )
2013-03-02 06:57:47 +04:00
post , last_forwarded_data = out . split ( ' //FORWARDED_DATA: ' ) # if this fails, perhaps the process failed prior to printing forwarded data?
2012-11-29 05:29:12 +04:00
last_forwarded_json = json . loads ( last_forwarded_data )
2012-11-29 05:01:02 +04:00
if settings . get ( ' ASM_JS ' ) :
2013-03-13 04:14:07 +04:00
post_funcs , post_rest = post . split ( ' // EMSCRIPTEN_END_FUNCS \n ' )
post = post_rest
2013-04-15 01:09:10 +04:00
# Move preAsms to their right place
def move_preasm ( m ) :
contents = m . groups ( 0 ) [ 0 ]
outfile . write ( contents + ' \n ' )
return ' '
2013-04-15 03:17:56 +04:00
post_funcs = re . sub ( r ' / \ * PRE_ASM \ */(.*) \ n ' , lambda m : move_preasm ( m ) , post_funcs )
funcs_js + = [ ' \n ' + post_funcs + ' // EMSCRIPTEN_END_FUNCS \n ' ]
2013-04-15 01:09:10 +04:00
2013-01-07 22:34:48 +04:00
simple = os . environ . get ( ' EMCC_SIMPLE_ASM ' )
2012-12-08 04:38:35 +04:00
class Counter :
i = 0
2013-10-02 02:28:11 +04:00
j = 0
2013-01-19 02:07:57 +04:00
pre_tables = last_forwarded_json [ ' Functions ' ] [ ' tables ' ] [ ' pre ' ]
del last_forwarded_json [ ' Functions ' ] [ ' tables ' ] [ ' pre ' ]
2013-01-19 03:01:17 +04:00
2012-12-08 04:38:35 +04:00
def make_table ( sig , raw ) :
i = Counter . i
Counter . i + = 1
bad = ' b ' + str ( i )
params = ' , ' . join ( [ ' p %d ' % p for p in range ( len ( sig ) - 1 ) ] )
2013-01-23 03:16:20 +04:00
coercions = ' ; ' . join ( [ ' p %d = %s p %d %s ' % ( p , ' + ' if sig [ p + 1 ] != ' i ' else ' ' , p , ' ' if sig [ p + 1 ] != ' i ' else ' |0 ' ) for p in range ( len ( sig ) - 1 ) ] ) + ' ; '
ret = ' ' if sig [ 0 ] == ' v ' else ( ' return %s 0 ' % ( ' + ' if sig [ 0 ] != ' i ' else ' ' ) )
2013-04-20 07:30:03 +04:00
start = raw . index ( ' [ ' )
end = raw . rindex ( ' ] ' )
body = raw [ start + 1 : end ] . split ( ' , ' )
for j in range ( settings [ ' RESERVED_FUNCTION_POINTERS ' ] ) :
2013-10-02 21:57:35 +04:00
body [ settings [ ' FUNCTION_POINTER_ALIGNMENT ' ] * ( 1 + j ) ] = ' jsCall_ %s _ %s ' % ( sig , j )
2013-10-02 02:28:11 +04:00
Counter . j = 0
2013-04-20 07:30:03 +04:00
def fix_item ( item ) :
2013-10-02 02:28:11 +04:00
Counter . j + = 1
2013-10-02 07:28:02 +04:00
newline = Counter . j % 30 == 29
2013-10-02 02:28:11 +04:00
if item == ' 0 ' : return bad if not newline else ( bad + ' \n ' )
return item if not newline else ( item + ' \n ' )
2013-04-20 07:30:03 +04:00
body = ' , ' . join ( map ( fix_item , body ) )
2013-10-02 02:28:11 +04:00
return ( ' function %s ( %s ) { %s %s ( %d ); %s } ' % ( bad , params , coercions , ' abort ' if not settings [ ' ASSERTIONS ' ] else ' nullFunc ' , i , ret ) , ' ' . join ( [ raw [ : start + 1 ] , body , raw [ end : ] ] ) )
2013-01-17 04:22:45 +04:00
infos = [ make_table ( sig , raw ) for sig , raw in last_forwarded_json [ ' Functions ' ] [ ' tables ' ] . iteritems ( ) ]
2013-10-02 02:28:11 +04:00
2013-03-09 03:29:39 +04:00
function_tables_defs = ' \n ' . join ( [ info [ 0 ] for info in infos ] ) + ' \n // EMSCRIPTEN_END_FUNCS \n ' + ' \n ' . join ( [ info [ 1 ] for info in infos ] )
2012-12-08 04:38:35 +04:00
2013-02-02 09:32:13 +04:00
asm_setup = ' '
2013-02-23 06:41:37 +04:00
maths = [ ' Math. ' + func for func in [ ' floor ' , ' abs ' , ' sqrt ' , ' pow ' , ' cos ' , ' sin ' , ' tan ' , ' acos ' , ' asin ' , ' atan ' , ' atan2 ' , ' exp ' , ' log ' , ' ceil ' , ' imul ' ] ]
2013-01-23 03:05:38 +04:00
fundamentals = [ ' Math ' , ' Int8Array ' , ' Int16Array ' , ' Int32Array ' , ' Uint8Array ' , ' Uint16Array ' , ' Uint32Array ' , ' Float32Array ' , ' Float64Array ' ]
2013-03-14 23:52:50 +04:00
math_envs = [ ' Math.min ' ] # TODO: move min to maths
2013-02-02 09:32:13 +04:00
asm_setup + = ' \n ' . join ( [ ' var %s = %s ; ' % ( f . replace ( ' . ' , ' _ ' ) , f ) for f in math_envs ] )
2013-04-26 02:44:54 +04:00
2013-08-12 08:57:15 +04:00
if settings [ ' TO_FLOAT32 ' ] : maths + = [ ' Math.toFloat32 ' ]
2013-06-20 22:31:36 +04:00
basic_funcs = [ ' abort ' , ' assert ' , ' asmPrintInt ' , ' asmPrintFloat ' ] + [ m . replace ( ' . ' , ' _ ' ) for m in math_envs ]
2013-04-20 07:30:03 +04:00
if settings [ ' RESERVED_FUNCTION_POINTERS ' ] > 0 : basic_funcs . append ( ' jsCall ' )
2013-01-28 23:15:12 +04:00
if settings [ ' SAFE_HEAP ' ] : basic_funcs + = [ ' SAFE_HEAP_LOAD ' , ' SAFE_HEAP_STORE ' , ' SAFE_HEAP_CLEAR ' ]
2013-02-27 08:04:11 +04:00
if settings [ ' CHECK_HEAP_ALIGN ' ] : basic_funcs + = [ ' CHECK_ALIGN_2 ' , ' CHECK_ALIGN_4 ' , ' CHECK_ALIGN_8 ' ]
2013-04-26 02:44:54 +04:00
if settings [ ' ASSERTIONS ' ] :
basic_funcs + = [ ' nullFunc ' ]
asm_setup + = ' function nullFunc(x) { Module[ " printErr " ]( " Invalid function pointer called. Perhaps a miscast function pointer (check compilation warnings) or bad vtable lookup (maybe due to derefing a bad pointer, like NULL)? " ); abort(x) } \n '
2012-12-13 04:12:50 +04:00
basic_vars = [ ' STACKTOP ' , ' STACK_MAX ' , ' tempDoublePtr ' , ' ABORT ' ]
2013-01-15 22:20:26 +04:00
basic_float_vars = [ ' NaN ' , ' Infinity ' ]
2013-04-14 22:31:56 +04:00
2013-04-15 03:17:56 +04:00
if forwarded_json [ ' Types ' ] [ ' preciseI64MathUsed ' ] or \
forwarded_json [ ' Functions ' ] [ ' libraryFunctions ' ] . get ( ' llvm_cttz_i32 ' ) or \
forwarded_json [ ' Functions ' ] [ ' libraryFunctions ' ] . get ( ' llvm_ctlz_i32 ' ) :
2013-04-14 22:31:56 +04:00
basic_vars + = [ ' cttz_i8 ' , ' ctlz_i8 ' ]
2013-08-29 22:30:11 +04:00
if settings . get ( ' DLOPEN_SUPPORT ' ) :
for sig in last_forwarded_json [ ' Functions ' ] [ ' tables ' ] . iterkeys ( ) :
basic_vars . append ( ' F_BASE_ %s ' % sig )
asm_setup + = ' var F_BASE_ %s = %s ; \n ' % ( sig , ' FUNCTION_TABLE_OFFSET ' if settings . get ( ' SIDE_MODULE ' ) else ' 0 ' ) + ' \n '
2012-12-07 08:44:21 +04:00
asm_runtime_funcs = [ ' stackAlloc ' , ' stackSave ' , ' stackRestore ' , ' setThrew ' ] + [ ' setTempRet %d ' % i for i in range ( 10 ) ]
2012-12-01 09:51:43 +04:00
# function tables
2013-01-17 04:25:27 +04:00
def asm_coerce ( value , sig ) :
2013-01-17 04:33:59 +04:00
if sig == ' v ' : return value
2013-01-23 03:16:20 +04:00
return ( ' + ' if sig != ' i ' else ' ' ) + value + ( ' |0 ' if sig == ' i ' else ' ' )
2013-04-20 07:30:03 +04:00
2012-12-01 09:51:43 +04:00
function_tables = [ ' dynCall_ ' + table for table in last_forwarded_json [ ' Functions ' ] [ ' tables ' ] ]
function_tables_impls = [ ]
2013-10-02 02:28:11 +04:00
2012-12-01 09:51:43 +04:00
for sig in last_forwarded_json [ ' Functions ' ] [ ' tables ' ] . iterkeys ( ) :
args = ' , ' . join ( [ ' a ' + str ( i ) for i in range ( 1 , len ( sig ) ) ] )
2013-01-17 04:25:27 +04:00
arg_coercions = ' ' . join ( [ ' a ' + str ( i ) + ' = ' + asm_coerce ( ' a ' + str ( i ) , sig [ i ] ) + ' ; ' for i in range ( 1 , len ( sig ) ) ] )
2013-01-17 04:32:53 +04:00
coerced_args = ' , ' . join ( [ asm_coerce ( ' a ' + str ( i ) , sig [ i ] ) for i in range ( 1 , len ( sig ) ) ] )
2013-01-19 07:23:46 +04:00
ret = ( ' return ' if sig [ 0 ] != ' v ' else ' ' ) + asm_coerce ( ' FUNCTION_TABLE_ %s [index& {{ { FTM_ %s }}}]( %s ) ' % ( sig , sig , coerced_args ) , sig [ 0 ] )
2012-12-01 09:51:43 +04:00
function_tables_impls . append ( '''
function dynCall_ % s ( index % s % s ) {
2012-12-13 23:51:13 +04:00
index = index | 0 ;
2012-12-01 09:51:43 +04:00
% s
2013-01-17 04:33:59 +04:00
% s ;
2012-12-01 09:51:43 +04:00
}
2013-01-17 04:32:53 +04:00
''' % (sig, ' , ' if len(sig) > 1 else ' ' , args, arg_coercions, ret))
2013-04-20 07:30:03 +04:00
for i in range ( settings [ ' RESERVED_FUNCTION_POINTERS ' ] ) :
jsret = ( ' return ' if sig [ 0 ] != ' v ' else ' ' ) + asm_coerce ( ' jsCall( %d %s %s ) ' % ( i , ' , ' if coerced_args else ' ' , coerced_args ) , sig [ 0 ] )
function_tables_impls . append ( '''
function jsCall_ % s_ % s ( % s ) {
% s
% s ;
}
''' % (sig, i, args, arg_coercions, jsret))
2013-07-17 22:04:21 +04:00
from tools import shared
2013-09-02 06:45:59 +04:00
shared . Settings . copy ( settings )
2013-06-29 21:05:29 +04:00
asm_setup + = ' \n ' + shared . JS . make_invoke ( sig ) + ' \n '
2013-04-12 07:03:13 +04:00
basic_funcs . append ( ' invoke_ %s ' % sig )
2013-09-04 02:57:31 +04:00
if settings . get ( ' DLOPEN_SUPPORT ' ) :
asm_setup + = ' \n ' + shared . JS . make_extcall ( sig ) + ' \n '
basic_funcs . append ( ' extCall_ %s ' % sig )
2013-01-19 03:01:17 +04:00
2012-11-29 05:29:12 +04:00
# calculate exports
exported_implemented_functions = list ( exported_implemented_functions )
2013-07-04 01:43:20 +04:00
exported_implemented_functions . append ( ' runPostSets ' )
2012-11-29 05:29:12 +04:00
exports = [ ]
2013-01-07 22:34:48 +04:00
if not simple :
for export in exported_implemented_functions + asm_runtime_funcs + function_tables :
2013-01-08 01:15:08 +04:00
exports . append ( " %s : %s " % ( export , export ) )
2013-01-07 22:34:48 +04:00
exports = ' { ' + ' , ' . join ( exports ) + ' } '
else :
exports = ' _main '
2012-11-29 05:29:12 +04:00
# calculate globals
2012-11-30 03:40:53 +04:00
try :
del forwarded_json [ ' Variables ' ] [ ' globals ' ] [ ' _llvm_global_ctors ' ] # not a true variable
except :
pass
2013-01-01 02:03:29 +04:00
# If no named globals, only need externals
2013-01-01 03:27:12 +04:00
global_vars = map ( lambda g : g [ ' name ' ] , filter ( lambda g : settings [ ' NAMED_GLOBALS ' ] or g . get ( ' external ' ) or g . get ( ' unIndexable ' ) , forwarded_json [ ' Variables ' ] [ ' globals ' ] . values ( ) ) )
2013-04-15 03:17:56 +04:00
global_funcs = [ ' _ ' + key for key , value in forwarded_json [ ' Functions ' ] [ ' libraryFunctions ' ] . iteritems ( ) if value != 2 ]
2013-01-15 03:40:47 +04:00
def math_fix ( g ) :
2013-07-09 03:15:21 +04:00
return g if not g . startswith ( ' Math_ ' ) else g . split ( ' _ ' ) [ 1 ]
2013-01-18 23:28:04 +04:00
asm_global_funcs = ' ' . join ( [ ' var ' + g . replace ( ' . ' , ' _ ' ) + ' =global. ' + g + ' ; \n ' for g in maths ] ) + \
' ' . join ( [ ' var ' + g + ' =env. ' + math_fix ( g ) + ' ; \n ' for g in basic_funcs + global_funcs ] )
2013-01-15 05:59:15 +04:00
asm_global_vars = ' ' . join ( [ ' var ' + g + ' =env. ' + g + ' |0; \n ' for g in basic_vars + global_vars ] ) + \
' ' . join ( [ ' var ' + g + ' =+env. ' + g + ' ; \n ' for g in basic_float_vars ] )
2013-07-02 05:03:05 +04:00
# In linkable modules, we need to add some explicit globals for global variables that can be linked and used across modules
if settings . get ( ' MAIN_MODULE ' ) or settings . get ( ' SIDE_MODULE ' ) :
2013-07-02 05:45:02 +04:00
assert settings . get ( ' TARGET_LE32 ' ) , ' TODO: support x86 target when linking modules (needs offset of 4 and not 8 here) '
2013-07-02 05:03:05 +04:00
for key , value in forwarded_json [ ' Variables ' ] [ ' globals ' ] . iteritems ( ) :
if value . get ( ' linkable ' ) :
2013-07-02 05:45:02 +04:00
init = forwarded_json [ ' Variables ' ] [ ' indexedGlobals ' ] [ key ] + 8 # 8 is Runtime.GLOBAL_BASE / STATIC_BASE
if settings . get ( ' SIDE_MODULE ' ) : init = ' (H_BASE+ ' + str ( init ) + ' )|0 '
asm_global_vars + = ' var %s = %s ; \n ' % ( key , str ( init ) )
2013-07-02 05:03:05 +04:00
2012-11-29 05:29:12 +04:00
# sent data
2013-05-17 05:12:08 +04:00
the_global = ' { ' + ' , ' . join ( [ ' " ' + math_fix ( s ) + ' " : ' + s for s in fundamentals ] ) + ' } '
sending = ' { ' + ' , ' . join ( [ ' " ' + math_fix ( s ) + ' " : ' + s for s in basic_funcs + global_funcs + basic_vars + basic_float_vars + global_vars ] ) + ' } '
2012-11-29 05:29:12 +04:00
# received
2013-01-07 22:34:48 +04:00
if not simple :
2013-05-17 05:12:08 +04:00
receiving = ' ; \n ' . join ( [ ' var ' + s + ' = Module[ " ' + s + ' " ] = asm[ " ' + s + ' " ] ' for s in exported_implemented_functions + function_tables ] )
2013-01-07 22:34:48 +04:00
else :
receiving = ' var _main = Module[ " _main " ] = asm; '
2013-03-12 03:00:36 +04:00
2012-11-29 05:29:12 +04:00
# finalize
2013-03-12 03:00:36 +04:00
2013-09-26 22:23:47 +04:00
if DEBUG : logging . debug ( ' asm text sizes ' + str ( [ map ( len , funcs_js ) , len ( asm_setup ) , len ( asm_global_vars ) , len ( asm_global_funcs ) , len ( pre_tables ) , len ( ' \n ' . join ( function_tables_impls ) ) , len ( function_tables_defs . replace ( ' \n ' , ' \n ' ) ) , len ( exports ) , len ( the_global ) , len ( sending ) , len ( receiving ) ] ) )
2013-03-12 03:00:36 +04:00
2013-03-14 02:26:42 +04:00
funcs_js = [ '''
2012-12-02 02:19:52 +04:00
% s
2013-01-29 01:59:39 +04:00
function asmPrintInt ( x , y ) {
Module . print ( ' int ' + x + ' , ' + y ) ; / / + ' ' + new Error ( ) . stack ) ;
2013-01-15 22:20:26 +04:00
}
2013-01-29 01:59:39 +04:00
function asmPrintFloat ( x , y ) {
Module . print ( ' float ' + x + ' , ' + y ) ; / / + ' ' + new Error ( ) . stack ) ;
2013-01-15 22:20:26 +04:00
}
2013-03-09 03:29:39 +04:00
/ / EMSCRIPTEN_START_ASM
2013-01-18 22:39:19 +04:00
var asm = ( function ( global , env , buffer ) {
2013-09-09 00:04:13 +04:00
% s
2013-01-18 22:39:19 +04:00
var HEAP8 = new global . Int8Array ( buffer ) ;
var HEAP16 = new global . Int16Array ( buffer ) ;
var HEAP32 = new global . Int32Array ( buffer ) ;
var HEAPU8 = new global . Uint8Array ( buffer ) ;
var HEAPU16 = new global . Uint16Array ( buffer ) ;
var HEAPU32 = new global . Uint32Array ( buffer ) ;
var HEAPF32 = new global . Float32Array ( buffer ) ;
var HEAPF64 = new global . Float64Array ( buffer ) ;
2013-09-09 22:21:39 +04:00
''' % (asm_setup, " ' use asm ' ; " if not forwarded_json[ ' Types ' ][ ' hasInlineJS ' ] and not settings[ ' SIDE_MODULE ' ] else " ' almost asm ' ; " ) + ' \n ' + asm_global_vars + '''
2012-12-07 03:53:28 +04:00
var __THREW__ = 0 ;
2013-04-19 07:20:55 +04:00
var threwValue = 0 ;
var setjmpId = 0 ;
2012-12-07 04:26:48 +04:00
var undef = 0 ;
2013-01-24 04:48:49 +04:00
var tempInt = 0 , tempBigInt = 0 , tempBigIntP = 0 , tempBigIntS = 0 , tempBigIntR = 0.0 , tempBigIntI = 0 , tempBigIntD = 0 , tempValue = 0 , tempDouble = 0.0 ;
2012-12-13 23:51:13 +04:00
''' + ' ' .join([ '''
var tempRet % d = 0 ; ''' % i for i in range(10)]) + ' \n ' + asm_global_funcs + '''
2013-03-09 04:17:52 +04:00
/ / EMSCRIPTEN_START_FUNCS
2013-08-21 22:29:11 +04:00
function stackAlloc ( size ) {
size = size | 0 ;
var ret = 0 ;
ret = STACKTOP ;
STACKTOP = ( STACKTOP + size ) | 0 ;
2013-10-03 03:11:34 +04:00
''' + ( ' STACKTOP = (STACKTOP + 3)&-4; ' if settings[ ' TARGET_X86 ' ] else ' STACKTOP = (STACKTOP + 7)&-8; ' ) + '''
2013-08-21 22:29:11 +04:00
return ret | 0 ;
}
function stackSave ( ) {
return STACKTOP | 0 ;
}
function stackRestore ( top ) {
top = top | 0 ;
STACKTOP = top ;
}
function setThrew ( threw , value ) {
threw = threw | 0 ;
value = value | 0 ;
if ( ( __THREW__ | 0 ) == 0 ) {
__THREW__ = threw ;
threwValue = value ;
2013-06-20 22:31:36 +04:00
}
2013-08-21 22:29:11 +04:00
}
function copyTempFloat ( ptr ) {
ptr = ptr | 0 ;
HEAP8 [ tempDoublePtr ] = HEAP8 [ ptr ] ;
HEAP8 [ tempDoublePtr + 1 | 0 ] = HEAP8 [ ptr + 1 | 0 ] ;
HEAP8 [ tempDoublePtr + 2 | 0 ] = HEAP8 [ ptr + 2 | 0 ] ;
HEAP8 [ tempDoublePtr + 3 | 0 ] = HEAP8 [ ptr + 3 | 0 ] ;
}
function copyTempDouble ( ptr ) {
ptr = ptr | 0 ;
HEAP8 [ tempDoublePtr ] = HEAP8 [ ptr ] ;
HEAP8 [ tempDoublePtr + 1 | 0 ] = HEAP8 [ ptr + 1 | 0 ] ;
HEAP8 [ tempDoublePtr + 2 | 0 ] = HEAP8 [ ptr + 2 | 0 ] ;
HEAP8 [ tempDoublePtr + 3 | 0 ] = HEAP8 [ ptr + 3 | 0 ] ;
HEAP8 [ tempDoublePtr + 4 | 0 ] = HEAP8 [ ptr + 4 | 0 ] ;
HEAP8 [ tempDoublePtr + 5 | 0 ] = HEAP8 [ ptr + 5 | 0 ] ;
HEAP8 [ tempDoublePtr + 6 | 0 ] = HEAP8 [ ptr + 6 | 0 ] ;
HEAP8 [ tempDoublePtr + 7 | 0 ] = HEAP8 [ ptr + 7 | 0 ] ;
}
2012-12-07 02:29:10 +04:00
''' + ' ' .join([ '''
2013-08-21 22:29:11 +04:00
function setTempRet % d ( value ) {
value = value | 0 ;
tempRet % d = value ;
}
2013-07-04 01:43:20 +04:00
''' % (i, i) for i in range(10)])] + [PostSets.js + ' \n ' ] + funcs_js + [ '''
2012-11-29 05:29:12 +04:00
% s
return % s ;
2013-03-09 03:29:39 +04:00
} )
/ / EMSCRIPTEN_END_ASM
( % s , % s , buffer ) ;
2012-11-29 05:29:12 +04:00
% s ;
2013-08-27 03:49:15 +04:00
''' % (pre_tables + ' \n ' .join(function_tables_impls) + ' \n ' + function_tables_defs.replace( ' \n ' , ' \n ' ), exports, the_global, sending, receiving)]
if not settings . get ( ' SIDE_MODULE ' ) :
funcs_js . append ( '''
2013-05-17 05:12:08 +04:00
Runtime . stackAlloc = function ( size ) { return asm [ ' stackAlloc ' ] ( size ) } ;
Runtime . stackSave = function ( ) { return asm [ ' stackSave ' ] ( ) } ;
Runtime . stackRestore = function ( top ) { asm [ ' stackRestore ' ] ( top ) } ;
2013-08-27 03:49:15 +04:00
''' )
2012-12-08 04:19:54 +04:00
# Set function table masks
2013-08-27 01:01:04 +04:00
masks = { }
max_mask = 0
for sig , table in last_forwarded_json [ ' Functions ' ] [ ' tables ' ] . iteritems ( ) :
mask = table . count ( ' , ' )
masks [ sig ] = str ( mask )
max_mask = max ( mask , max_mask )
def function_table_maskize ( js , masks ) :
2012-12-08 05:02:17 +04:00
def fix ( m ) :
sig = m . groups ( 0 ) [ 0 ]
return masks [ sig ]
return re . sub ( r ' {{ { FTM_([ \ w \ d_$]+) }}} ' , lambda m : fix ( m ) , js ) # masks[m.groups(0)[0]]
2013-08-27 01:01:04 +04:00
funcs_js = map ( lambda js : function_table_maskize ( js , masks ) , funcs_js )
2013-08-27 03:43:27 +04:00
2013-08-27 01:01:04 +04:00
if settings . get ( ' DLOPEN_SUPPORT ' ) :
funcs_js . append ( '''
2013-08-29 05:12:53 +04:00
asm . maxFunctionIndex = % ( max_mask ) d ;
2013-09-04 01:56:00 +04:00
DLFCN . registerFunctions ( asm , % ( max_mask ) d + 1 , % ( sigs ) s , Module ) ;
2013-08-29 05:12:53 +04:00
Module . SYMBOL_TABLE = SYMBOL_TABLE ;
''' % { ' max_mask ' : max_mask, ' sigs ' : str(map(str, last_forwarded_json[ ' Functions ' ][ ' tables ' ].keys())) })
2013-08-27 03:43:27 +04:00
2012-11-29 05:01:02 +04:00
else :
2012-12-08 04:38:35 +04:00
function_tables_defs = ' \n ' . join ( [ table for table in last_forwarded_json [ ' Functions ' ] [ ' tables ' ] . itervalues ( ) ] )
2012-11-29 05:29:12 +04:00
outfile . write ( function_tables_defs )
2013-03-14 02:26:42 +04:00
funcs_js = [ '''
2013-03-09 03:29:39 +04:00
/ / EMSCRIPTEN_START_FUNCS
2013-03-14 02:26:42 +04:00
''' ] + funcs_js + [ '''
2013-03-09 03:29:39 +04:00
/ / EMSCRIPTEN_END_FUNCS
2013-03-14 02:26:42 +04:00
''' ]
2013-03-09 03:29:39 +04:00
2013-07-09 18:02:47 +04:00
# Create symbol table for self-dlopen
2013-07-16 01:36:28 +04:00
if settings . get ( ' DLOPEN_SUPPORT ' ) :
2013-08-07 01:21:18 +04:00
symbol_table = { }
for k , v in forwarded_json [ ' Variables ' ] [ ' indexedGlobals ' ] . iteritems ( ) :
if forwarded_json [ ' Variables ' ] [ ' globals ' ] [ k ] [ ' named ' ] :
2013-08-29 05:12:53 +04:00
symbol_table [ k ] = str ( v + forwarded_json [ ' Runtime ' ] [ ' GLOBAL_BASE ' ] )
2013-07-09 18:02:47 +04:00
for raw in last_forwarded_json [ ' Functions ' ] [ ' tables ' ] . itervalues ( ) :
2013-07-12 06:34:01 +04:00
if raw == ' ' : continue
2013-07-12 08:02:34 +04:00
table = map ( string . strip , raw [ raw . find ( ' [ ' ) + 1 : raw . find ( ' ] ' ) ] . split ( " , " ) )
2013-08-29 05:12:53 +04:00
for i in range ( len ( table ) ) :
value = table [ i ]
if value != ' 0 ' :
if settings . get ( ' SIDE_MODULE ' ) :
symbol_table [ value ] = ' FUNCTION_TABLE_OFFSET+ ' + str ( i )
else :
symbol_table [ value ] = str ( i )
outfile . write ( " var SYMBOL_TABLE = %s ; " % json . dumps ( symbol_table ) . replace ( ' " ' , ' ' ) )
2013-07-09 18:02:47 +04:00
2013-10-02 03:04:58 +04:00
for i in range ( len ( funcs_js ) ) : # do this loop carefully to save memory
funcs_js_item = funcs_js [ i ]
funcs_js [ i ] = None
2013-03-14 02:26:42 +04:00
funcs_js_item = indexize ( funcs_js_item )
funcs_js_item = blockaddrsize ( funcs_js_item )
outfile . write ( funcs_js_item )
2012-11-29 05:01:02 +04:00
funcs_js = None
2012-11-25 02:06:44 +04:00
2012-11-29 03:24:55 +04:00
outfile . write ( indexize ( post ) )
2013-09-26 13:43:02 +04:00
if DEBUG : logging . debug ( ' emscript: phase 3 took %s seconds ' % ( time . time ( ) - t ) )
2012-11-06 00:24:46 +04:00
2011-07-07 11:38:35 +04:00
outfile . close ( )
2013-03-05 07:50:42 +04:00
def main ( args , compiler_engine , cache , jcache , relooper , temp_files , DEBUG , DEBUG_CACHE ) :
2011-07-07 11:38:35 +04:00
# Prepare settings for serialization to JSON.
settings = { }
for setting in args . settings :
2011-07-13 23:30:29 +04:00
name , value = setting . strip ( ) . split ( ' = ' , 1 )
2011-07-07 11:38:35 +04:00
settings [ name ] = json . loads ( value )
2013-09-02 06:45:59 +04:00
2012-04-14 05:57:41 +04:00
# libraries
libraries = args . libraries [ 0 ] . split ( ' , ' ) if len ( args . libraries ) > 0 else [ ]
2011-07-07 11:38:35 +04:00
# Compile the assembly to Javascript.
2013-01-31 04:01:59 +04:00
if settings . get ( ' RELOOP ' ) :
if not relooper :
2013-01-31 06:11:51 +04:00
relooper = cache . get_path ( ' relooper.js ' )
2013-01-31 04:01:59 +04:00
settings . setdefault ( ' RELOOPER ' , relooper )
2013-02-02 03:24:41 +04:00
if not os . path . exists ( relooper ) :
from tools import shared
shared . Building . ensure_relooper ( relooper )
2013-09-17 01:49:19 +04:00
settings . setdefault ( ' STRUCT_INFO ' , cache . get_path ( ' struct_info.compiled.json ' ) )
struct_info = settings . get ( ' STRUCT_INFO ' )
if not os . path . exists ( struct_info ) :
from tools import shared
shared . Building . ensure_struct_info ( struct_info )
2013-02-02 02:54:00 +04:00
emscript ( args . infile , settings , args . outfile , libraries , compiler_engine = compiler_engine ,
2013-03-05 07:50:42 +04:00
jcache = jcache , temp_files = temp_files , DEBUG = DEBUG , DEBUG_CACHE = DEBUG_CACHE )
2011-07-07 11:38:35 +04:00
2013-01-29 06:21:43 +04:00
def _main ( environ ) :
2013-04-06 22:12:55 +04:00
response_file = True
while response_file :
response_file = None
for index in range ( 1 , len ( sys . argv ) ) :
if sys . argv [ index ] [ 0 ] == ' @ ' :
# found one, loop again next time
response_file = True
2013-04-11 11:39:31 +04:00
response_file_args = read_response_file ( sys . argv [ index ] )
2013-04-06 22:12:55 +04:00
# slice in extra_args in place of the response file arg
sys . argv [ index : index + 1 ] = response_file_args
break
2011-07-13 23:30:29 +04:00
parser = optparse . OptionParser (
2013-02-01 05:48:49 +04:00
usage = ' usage: % prog [-h] [-H HEADERS] [-o OUTFILE] [-c COMPILER_ENGINE] [-s FOO=BAR]* infile ' ,
description = ( ' You should normally never use this! Use emcc instead. '
' This is a wrapper around the JS compiler, converting .ll to .js. ' ) ,
epilog = ' ' )
2011-09-25 10:26:59 +04:00
parser . add_option ( ' -H ' , ' --headers ' ,
default = [ ] ,
action = ' append ' ,
help = ' System headers (comma separated) whose #defines should be exposed to the compiled code. ' )
2012-04-14 05:57:41 +04:00
parser . add_option ( ' -L ' , ' --libraries ' ,
default = [ ] ,
action = ' append ' ,
help = ' Library files (comma separated) to use in addition to those in emscripten src/library_*. ' )
2011-07-13 23:30:29 +04:00
parser . add_option ( ' -o ' , ' --outfile ' ,
default = sys . stdout ,
help = ' Where to write the output; defaults to stdout. ' )
2012-11-07 09:11:18 +04:00
parser . add_option ( ' -c ' , ' --compiler ' ,
2013-02-02 02:02:32 +04:00
default = None ,
2012-11-07 09:11:18 +04:00
help = ' Which JS engine to use to run the compiler; defaults to the one in ~/.emscripten. ' )
2013-01-31 04:01:59 +04:00
parser . add_option ( ' --relooper ' ,
default = None ,
2013-02-02 02:54:00 +04:00
help = ' Which relooper file to use if RELOOP is enabled. ' )
2011-07-13 23:30:29 +04:00
parser . add_option ( ' -s ' , ' --setting ' ,
dest = ' settings ' ,
default = [ ] ,
action = ' append ' ,
metavar = ' FOO=BAR ' ,
help = ( ' Overrides for settings defined in settings.js. '
' May occur multiple times. ' ) )
2012-11-17 23:26:58 +04:00
parser . add_option ( ' -j ' , ' --jcache ' ,
action = ' store_true ' ,
default = False ,
help = ( ' Enable jcache (ccache-like caching of compilation results, for faster incremental builds). ' ) )
2013-02-02 02:54:00 +04:00
parser . add_option ( ' -T ' , ' --temp-dir ' ,
default = None ,
help = ( ' Where to create temporary files. ' ) )
2013-02-02 03:24:41 +04:00
parser . add_option ( ' -v ' , ' --verbose ' ,
action = ' store_true ' ,
dest = ' verbose ' ,
help = ' Displays debug output ' )
parser . add_option ( ' -q ' , ' --quiet ' ,
action = ' store_false ' ,
dest = ' verbose ' ,
help = ' Hides debug output ' )
2013-01-29 06:21:43 +04:00
parser . add_option ( ' --suppressUsageWarning ' ,
action = ' store_true ' ,
default = environ . get ( ' EMSCRIPTEN_SUPPRESS_USAGE_WARNING ' ) ,
help = ( ' Suppress usage warning ' ) )
2011-07-13 23:30:29 +04:00
# Convert to the same format that argparse would have produced.
keywords , positional = parser . parse_args ( )
2013-01-29 06:21:43 +04:00
if not keywords . suppressUsageWarning :
2013-09-26 13:43:02 +04:00
logging . warning ( '''
2013-01-29 06:21:43 +04:00
== == == == == == == == == == == == == == == == == == == == == == == == == == == == == == ==
WARNING : You should normally never use this ! Use emcc instead .
== == == == == == == == == == == == == == == == == == == == == == == == == == == == == == ==
2013-09-26 13:43:02 +04:00
''' )
2013-01-29 06:21:43 +04:00
2011-07-13 23:30:29 +04:00
if len ( positional ) != 1 :
2013-05-16 12:12:21 +04:00
raise RuntimeError ( ' Must provide exactly one positional argument. Got ' + str ( len ( positional ) ) + ' : " ' + ' " , " ' . join ( positional ) + ' " ' )
2011-07-30 21:43:43 +04:00
keywords . infile = os . path . abspath ( positional [ 0 ] )
2011-07-13 23:30:29 +04:00
if isinstance ( keywords . outfile , basestring ) :
keywords . outfile = open ( keywords . outfile , ' w ' )
2013-01-31 04:01:59 +04:00
2013-01-30 05:42:03 +04:00
if keywords . relooper :
2013-01-31 04:01:59 +04:00
relooper = os . path . abspath ( keywords . relooper )
else :
relooper = None # use the cache
2013-01-30 00:43:37 +04:00
2013-02-02 02:54:00 +04:00
if keywords . temp_dir is None :
2013-02-02 03:24:41 +04:00
temp_files = get_configuration ( ) . get_temp_files ( )
2013-05-02 11:01:08 +04:00
temp_dir = get_configuration ( ) . TEMP_DIR
2013-02-02 02:54:00 +04:00
else :
temp_dir = os . path . abspath ( keywords . temp_dir )
if not os . path . exists ( temp_dir ) :
os . makedirs ( temp_dir )
temp_files = tempfiles . TempFiles ( temp_dir )
2013-02-01 06:27:20 +04:00
2013-02-02 02:02:32 +04:00
if keywords . compiler is None :
from tools import shared
keywords . compiler = shared . COMPILER_ENGINE
2013-02-02 03:24:41 +04:00
if keywords . verbose is None :
DEBUG = get_configuration ( ) . DEBUG
2013-03-05 07:50:42 +04:00
DEBUG_CACHE = get_configuration ( ) . DEBUG_CACHE
2013-02-02 03:24:41 +04:00
else :
DEBUG = keywords . verbose
2013-03-05 07:50:42 +04:00
DEBUG_CACHE = keywords . verbose
2013-02-02 02:54:00 +04:00
2013-01-31 06:11:51 +04:00
cache = cache_module . Cache ( )
2013-01-30 06:07:16 +04:00
temp_files . run_and_clean ( lambda : main (
keywords ,
2013-02-05 02:14:59 +04:00
compiler_engine = keywords . compiler ,
2013-01-31 06:11:51 +04:00
cache = cache ,
jcache = cache_module . JCache ( cache ) if keywords . jcache else None ,
2013-02-01 06:27:20 +04:00
relooper = relooper ,
2013-02-02 02:07:23 +04:00
temp_files = temp_files ,
2013-03-05 07:50:42 +04:00
DEBUG = DEBUG ,
DEBUG_CACHE = DEBUG_CACHE ,
2013-02-02 02:07:23 +04:00
) )
2011-12-12 03:24:04 +04:00
2013-01-29 05:38:04 +04:00
if __name__ == ' __main__ ' :
2013-01-29 06:21:43 +04:00
_main ( environ = os . environ )