2012-12-02 03:31:36 +04:00
#!/usr/bin/env python2
2010-09-10 07:03:24 +04:00
2011-10-13 22:40:46 +04:00
'''
2012-01-22 05:16:50 +04:00
You should normally never use this ! Use emcc instead .
2011-10-13 22:40:46 +04:00
2012-01-22 05:16:50 +04:00
This is a small wrapper script around the core JS compiler . This calls that
compiler with the settings given to it . It can also read data from C / C + +
header files ( so that the JS compiler can see the constants in those
headers , for the libc implementation in JS ) .
2011-10-13 22:40:46 +04:00
'''
2013-01-30 06:07:16 +04:00
import os , sys , json , optparse , subprocess , re , time , multiprocessing , functools
2010-09-10 07:03:24 +04:00
2013-01-31 06:11:51 +04:00
from tools import shared , jsrun , cache as cache_module
2011-01-15 09:44:52 +03:00
2011-10-05 22:12:45 +04:00
__rootpath__ = os . path . abspath ( os . path . dirname ( __file__ ) )
2011-09-23 14:43:03 +04:00
def path_from_root ( * pathelems ) :
""" Returns the absolute path for which the given path elements are
relative to the emscripten root .
"""
2011-10-05 22:12:45 +04:00
return os . path . join ( __rootpath__ , * pathelems )
2011-07-07 11:38:35 +04:00
2013-01-29 06:08:22 +04:00
configuration = shared . Configuration ( environ = os . environ )
temp_files = shared . make_temp_files ( )
2011-07-07 11:38:35 +04:00
2012-11-06 00:24:46 +04:00
def scan ( ll , settings ) :
# blockaddress(@main, %23)
blockaddrs = [ ]
for blockaddr in re . findall ( ' blockaddress \ ([^)]* \ ) ' , ll ) :
b = blockaddr . split ( ' ( ' ) [ 1 ] [ : - 1 ] . split ( ' , ' )
blockaddrs . append ( b )
if len ( blockaddrs ) > 0 :
settings [ ' NECESSARY_BLOCKADDRS ' ] = blockaddrs
2013-02-26 19:16:15 +04:00
NUM_CHUNKS_PER_CORE = 1.25
2012-11-07 04:29:33 +04:00
MIN_CHUNK_SIZE = 1024 * 1024
2012-11-07 21:59:35 +04:00
MAX_CHUNK_SIZE = float ( os . environ . get ( ' EMSCRIPT_MAX_CHUNK_SIZE ' ) or ' inf ' ) # configuring this is just for debugging purposes
2012-11-07 03:43:24 +04:00
2013-01-30 06:07:16 +04:00
def process_funcs ( ( i , funcs , meta , settings_file , compiler , forwarded_file , libraries , compiler_engine ) ) :
2012-11-19 18:22:15 +04:00
ll = ' ' . join ( funcs ) + ' \n ' + meta
2012-11-07 01:51:48 +04:00
funcs_file = temp_files . get ( ' .func_ %d .ll ' % i ) . name
open ( funcs_file , ' w ' ) . write ( ll )
2013-01-31 04:39:40 +04:00
out = jsrun . run_js (
2013-01-30 00:43:37 +04:00
compiler ,
2013-01-30 06:07:16 +04:00
engine = compiler_engine ,
2013-01-30 00:43:37 +04:00
args = [ settings_file , funcs_file , ' funcs ' , forwarded_file ] + libraries ,
stdout = subprocess . PIPE ,
cwd = path_from_root ( ' src ' ) )
2012-11-14 02:54:40 +04:00
shared . try_delete ( funcs_file )
2012-11-22 16:49:54 +04:00
return out
2012-11-07 01:51:48 +04:00
2013-01-31 03:15:37 +04:00
def emscript ( configuration , infile , settings , outfile , libraries = [ ] ,
compiler_engine = None ,
jcache = None ) :
2012-11-06 00:24:46 +04:00
""" Runs the emscripten LLVM-to-JS compiler. We parallelize as much as possible
2011-07-07 11:38:35 +04:00
Args :
infile : The path to the input LLVM assembly file .
2012-11-06 00:24:46 +04:00
settings : JSON - formatted settings that override the values
2011-07-07 11:38:35 +04:00
defined in src / settings . js .
outfile : The file where the output is written .
"""
2012-11-06 00:24:46 +04:00
2013-01-29 06:44:46 +04:00
DEBUG = configuration . DEBUG
DEBUG_CACHE = configuration . DEBUG_CACHE
2012-11-06 00:24:46 +04:00
compiler = path_from_root ( ' src ' , ' compiler.js ' )
2012-11-06 22:46:04 +04:00
# Parallelization: We run 3 phases:
2012-11-06 00:24:46 +04:00
# 1 aka 'pre' : Process types and metadata and so forth, and generate the preamble.
# 2 aka 'funcs': Process functions. We can parallelize this, working on each function independently.
# 3 aka 'post' : Process globals, generate postamble and finishing touches.
2013-01-29 06:44:46 +04:00
configuration . debug_log ( ' emscript: ll=>js ' )
2012-11-06 22:46:04 +04:00
2013-01-31 03:15:37 +04:00
if jcache : jcache . ensure ( )
2012-11-18 21:17:38 +04:00
2012-11-06 00:24:46 +04:00
# Pre-scan ll and alter settings as necessary
2012-11-06 22:46:04 +04:00
if DEBUG : t = time . time ( )
2012-11-06 00:24:46 +04:00
ll = open ( infile ) . read ( )
scan ( ll , settings )
2012-11-07 04:29:33 +04:00
total_ll_size = len ( ll )
2012-11-06 00:24:46 +04:00
ll = None # allow collection
2012-11-06 22:46:04 +04:00
if DEBUG : print >> sys . stderr , ' emscript: scan took %s seconds ' % ( time . time ( ) - t )
2012-11-06 00:24:46 +04:00
# Split input into the relevant parts for each phase
2012-11-06 23:14:29 +04:00
pre = [ ]
2012-11-06 00:24:46 +04:00
funcs = [ ] # split up functions here, for parallelism later
2012-11-20 22:57:54 +04:00
func_idents = [ ]
2012-11-06 23:14:29 +04:00
meta = [ ] # needed by each function XXX
2012-11-06 00:24:46 +04:00
2012-11-06 22:46:04 +04:00
if DEBUG : t = time . time ( )
2012-11-06 00:24:46 +04:00
in_func = False
ll_lines = open ( infile ) . readlines ( )
for line in ll_lines :
if in_func :
2012-11-20 22:57:54 +04:00
funcs [ - 1 ] [ 1 ] . append ( line )
2012-11-06 00:24:46 +04:00
if line . startswith ( ' } ' ) :
in_func = False
2012-11-20 22:57:54 +04:00
funcs [ - 1 ] = ( funcs [ - 1 ] [ 0 ] , ' ' . join ( funcs [ - 1 ] [ 1 ] ) )
2012-11-07 02:24:35 +04:00
pre . append ( line ) # pre needs it to, so we know about all implemented functions
2012-11-06 00:24:46 +04:00
else :
2012-11-19 18:21:02 +04:00
if line . startswith ( ' ; ' ) : continue
2012-11-06 00:24:46 +04:00
if line . startswith ( ' define ' ) :
in_func = True
2012-11-22 18:15:00 +04:00
funcs . append ( ( line , [ line ] ) ) # use the entire line as the identifier
2012-11-07 02:24:35 +04:00
pre . append ( line ) # pre needs it to, so we know about all implemented functions
2012-11-06 00:24:46 +04:00
elif line . find ( ' = type { ' ) > 0 :
2012-11-06 23:14:29 +04:00
pre . append ( line ) # type
2012-11-06 00:24:46 +04:00
elif line . startswith ( ' ! ' ) :
2012-12-11 04:40:47 +04:00
if line . startswith ( ' !llvm.module ' ) : continue # we can ignore that
2012-11-06 23:14:29 +04:00
meta . append ( line ) # metadata
2012-11-06 00:24:46 +04:00
else :
2012-11-16 02:35:30 +04:00
pre . append ( line ) # pre needs it so we know about globals in pre and funcs. So emit globals there
2012-11-06 00:24:46 +04:00
ll_lines = None
2012-11-07 01:58:45 +04:00
meta = ' ' . join ( meta )
if DEBUG and len ( meta ) > 1024 * 1024 : print >> sys . stderr , ' emscript warning: large amounts of metadata, will slow things down '
2012-11-06 22:46:04 +04:00
if DEBUG : print >> sys . stderr , ' emscript: split took %s seconds ' % ( time . time ( ) - t )
2012-11-06 00:24:46 +04:00
2012-11-06 23:14:29 +04:00
#if DEBUG:
# print >> sys.stderr, '========= pre ================\n'
# print >> sys.stderr, ''.join(pre)
# print >> sys.stderr, '========== funcs ===============\n'
# for func in funcs:
# print >> sys.stderr, '\n// ===\n\n', ''.join(func)
# print >> sys.stderr, '=========================\n'
2012-11-06 00:24:46 +04:00
# Save settings to a file to work around v8 issue 1579
settings_file = temp_files . get ( ' .txt ' ) . name
2012-11-30 03:05:41 +04:00
def save_settings ( ) :
2013-01-01 04:52:04 +04:00
global settings_text
2013-02-27 01:22:22 +04:00
settings_text = json . dumps ( settings , sort_keys = True )
2012-11-30 03:05:41 +04:00
s = open ( settings_file , ' w ' )
s . write ( settings_text )
s . close ( )
save_settings ( )
2012-11-06 00:24:46 +04:00
2012-11-07 02:46:25 +04:00
# Phase 1 - pre
2012-11-06 22:46:04 +04:00
if DEBUG : t = time . time ( )
2012-11-06 23:14:29 +04:00
pre_file = temp_files . get ( ' .pre.ll ' ) . name
2012-11-18 21:00:27 +04:00
pre_input = ' ' . join ( pre ) + ' \n ' + meta
out = None
if jcache :
2012-11-18 21:17:38 +04:00
keys = [ pre_input , settings_text , ' , ' . join ( libraries ) ]
2013-01-31 03:15:37 +04:00
shortkey = jcache . get_shortkey ( keys )
2013-02-27 04:39:14 +04:00
if DEBUG_CACHE : print >> sys . stderr , ' shortkey ' , shortkey
2013-01-31 03:15:37 +04:00
out = jcache . get ( shortkey , keys )
2013-02-27 04:39:14 +04:00
2013-02-27 05:13:02 +04:00
if DEBUG_CACHE and not out :
2013-01-31 03:15:37 +04:00
dfpath = os . path . join ( configuration . TEMP_DIR , " ems_ " + shortkey )
2013-02-27 04:39:14 +04:00
dfp = open ( dfpath , ' w ' )
dfp . write ( pre_input ) ;
dfp . write ( " \n \n ========================== settings_text \n \n " ) ;
dfp . write ( settings_text ) ;
dfp . write ( " \n \n ========================== libraries \n \n " ) ;
dfp . write ( " \n " . join ( libraries ) )
dfp . close ( )
print >> sys . stderr , ' cache miss, key data dumped to %s ' % dfpath
2012-11-19 17:46:02 +04:00
if out and DEBUG : print >> sys . stderr , ' loading pre from jcache '
2012-11-18 21:00:27 +04:00
if not out :
open ( pre_file , ' w ' ) . write ( pre_input )
2013-01-31 04:39:40 +04:00
out = jsrun . run_js ( compiler , compiler_engine , [ settings_file , pre_file , ' pre ' ] + libraries , stdout = subprocess . PIPE , cwd = path_from_root ( ' src ' ) )
2012-11-18 21:00:27 +04:00
if jcache :
2012-11-19 17:46:02 +04:00
if DEBUG : print >> sys . stderr , ' saving pre to jcache '
2013-01-31 03:15:37 +04:00
jcache . set ( shortkey , keys , out )
2012-11-16 02:35:30 +04:00
pre , forwarded_data = out . split ( ' //FORWARDED_DATA: ' )
2012-11-06 00:24:46 +04:00
forwarded_file = temp_files . get ( ' .json ' ) . name
open ( forwarded_file , ' w ' ) . write ( forwarded_data )
2012-11-06 22:46:04 +04:00
if DEBUG : print >> sys . stderr , ' emscript: phase 1 took %s seconds ' % ( time . time ( ) - t )
2012-11-06 00:24:46 +04:00
2012-11-07 02:46:25 +04:00
# Phase 2 - func
2012-11-07 04:29:33 +04:00
2013-02-26 21:36:51 +04:00
cores = int ( os . environ . get ( ' EMCC_CORES ' ) or multiprocessing . cpu_count ( ) )
2012-11-07 04:29:33 +04:00
assert cores > = 1
2012-11-08 06:57:43 +04:00
if cores > 1 :
2013-02-26 19:16:15 +04:00
intended_num_chunks = int ( round ( cores * NUM_CHUNKS_PER_CORE ) )
2012-11-08 06:57:43 +04:00
chunk_size = max ( MIN_CHUNK_SIZE , total_ll_size / intended_num_chunks )
2013-02-26 07:33:13 +04:00
chunk_size + = 3 * len ( meta ) + len ( forwarded_data ) / 3 # keep ratio of lots of function code to meta (expensive to process, and done in each parallel task) and forwarded data (less expensive but potentially significant)
2012-11-08 06:57:43 +04:00
chunk_size = min ( MAX_CHUNK_SIZE , chunk_size )
else :
chunk_size = MAX_CHUNK_SIZE # if 1 core, just use the max chunk size
2012-11-07 04:29:33 +04:00
2012-11-06 22:46:04 +04:00
if DEBUG : t = time . time ( )
2012-11-07 01:51:48 +04:00
forwarded_json = json . loads ( forwarded_data )
indexed_functions = set ( )
2012-11-30 03:05:41 +04:00
if settings . get ( ' ASM_JS ' ) :
settings [ ' EXPORTED_FUNCTIONS ' ] = forwarded_json [ ' EXPORTED_FUNCTIONS ' ]
save_settings ( )
2012-11-20 22:57:54 +04:00
2013-01-31 06:11:51 +04:00
chunks = cache_module . chunkify (
2013-01-31 05:16:12 +04:00
funcs , chunk_size ,
jcache . get_cachename ( ' emscript_files ' ) if jcache else None )
2012-11-21 00:32:22 +04:00
2012-11-21 13:52:58 +04:00
if jcache :
# load chunks from cache where we can # TODO: ignore small chunks
2012-11-21 17:53:45 +04:00
cached_outputs = [ ]
2012-11-21 13:52:58 +04:00
def load_from_cache ( chunk ) :
keys = [ settings_text , forwarded_data , chunk ]
2013-01-31 03:15:37 +04:00
shortkey = jcache . get_shortkey ( keys ) # TODO: share shortkeys with later code
out = jcache . get ( shortkey , keys ) # this is relatively expensive (pickling?)
2012-11-21 13:52:58 +04:00
if out :
2012-11-21 17:53:45 +04:00
cached_outputs . append ( out )
2012-11-21 13:52:58 +04:00
return False
return True
chunks = filter ( load_from_cache , chunks )
2012-11-21 17:53:45 +04:00
if len ( cached_outputs ) > 0 :
if out and DEBUG : print >> sys . stderr , ' loading %d funcchunks from jcache ' % len ( cached_outputs )
2012-11-21 13:52:58 +04:00
else :
2012-11-21 17:53:45 +04:00
cached_outputs = [ ]
# TODO: minimize size of forwarded data from funcs to what we actually need
2012-11-21 13:52:58 +04:00
2013-01-30 00:43:37 +04:00
if cores == 1 and total_ll_size < MAX_CHUNK_SIZE :
assert len ( chunks ) == 1 , ' no point in splitting up without multiple cores '
2012-11-07 04:29:33 +04:00
2012-11-21 13:52:58 +04:00
if len ( chunks ) > 0 :
if DEBUG : print >> sys . stderr , ' emscript: phase 2 working on %d chunks %s (intended chunk size: %.2f MB, meta: %.2f MB, forwarded: %.2f MB, total: %.2f MB) ' % ( len ( chunks ) , ( ' using %d cores ' % cores ) if len ( chunks ) > 1 else ' ' , chunk_size / ( 1024 * 1024. ) , len ( meta ) / ( 1024 * 1024. ) , len ( forwarded_data ) / ( 1024 * 1024. ) , total_ll_size / ( 1024 * 1024. ) )
2013-01-30 00:43:37 +04:00
commands = [
2013-01-30 06:07:16 +04:00
( i , chunk , meta , settings_file , compiler , forwarded_file , libraries , compiler_engine )
for i , chunk in enumerate ( chunks )
]
2012-11-07 05:05:45 +04:00
2012-11-21 13:52:58 +04:00
if len ( chunks ) > 1 :
pool = multiprocessing . Pool ( processes = cores )
2013-01-30 06:07:16 +04:00
outputs = pool . map (
process_funcs ,
commands ,
chunksize = 1 )
2012-11-21 13:52:58 +04:00
elif len ( chunks ) == 1 :
outputs = [ process_funcs ( commands [ 0 ] ) ]
2012-11-07 05:05:45 +04:00
else :
2012-11-21 13:52:58 +04:00
outputs = [ ]
if jcache :
# save chunks to cache
for i in range ( len ( chunks ) ) :
chunk = chunks [ i ]
keys = [ settings_text , forwarded_data , chunk ]
2013-01-31 03:15:37 +04:00
shortkey = jcache . get_shortkey ( keys )
jcache . set ( shortkey , keys , outputs [ i ] )
2012-11-21 13:52:58 +04:00
if out and DEBUG and len ( chunks ) > 0 : print >> sys . stderr , ' saving %d funcchunks to jcache ' % len ( chunks )
2012-11-07 05:05:45 +04:00
2012-11-21 17:53:45 +04:00
if jcache : outputs + = cached_outputs # TODO: preserve order
2012-11-22 16:49:54 +04:00
outputs = [ output . split ( ' //FORWARDED_DATA: ' ) for output in outputs ]
2013-03-02 06:57:47 +04:00
for output in outputs :
2013-03-03 20:02:58 +04:00
assert len ( output ) == 2 , ' Did not receive forwarded data in an output - process failed? We only got: ' + output [ 0 ]
2012-11-22 16:49:54 +04:00
2012-11-22 19:27:04 +04:00
if DEBUG : print >> sys . stderr , ' emscript: phase 2 took %s seconds ' % ( time . time ( ) - t )
if DEBUG : t = time . time ( )
2012-11-14 08:54:48 +04:00
# merge forwarded data
2012-11-24 17:27:17 +04:00
if settings . get ( ' ASM_JS ' ) :
all_exported_functions = set ( settings [ ' EXPORTED_FUNCTIONS ' ] ) # both asm.js and otherwise
2013-01-25 02:07:29 +04:00
for additional_export in settings [ ' DEFAULT_LIBRARY_FUNCS_TO_INCLUDE ' ] : # additional functions to export from asm, if they are implemented
all_exported_functions . add ( ' _ ' + additional_export )
2012-11-24 17:27:17 +04:00
exported_implemented_functions = set ( )
2012-11-14 08:54:48 +04:00
for func_js , curr_forwarded_data in outputs :
curr_forwarded_json = json . loads ( curr_forwarded_data )
forwarded_json [ ' Types ' ] [ ' preciseI64MathUsed ' ] = forwarded_json [ ' Types ' ] [ ' preciseI64MathUsed ' ] or curr_forwarded_json [ ' Types ' ] [ ' preciseI64MathUsed ' ]
for key , value in curr_forwarded_json [ ' Functions ' ] [ ' blockAddresses ' ] . iteritems ( ) :
forwarded_json [ ' Functions ' ] [ ' blockAddresses ' ] [ key ] = value
for key in curr_forwarded_json [ ' Functions ' ] [ ' indexedFunctions ' ] . iterkeys ( ) :
indexed_functions . add ( key )
2012-11-24 17:27:17 +04:00
if settings . get ( ' ASM_JS ' ) :
for key in curr_forwarded_json [ ' Functions ' ] [ ' implementedFunctions ' ] . iterkeys ( ) :
if key in all_exported_functions : exported_implemented_functions . add ( key )
2012-11-29 04:52:51 +04:00
for key , value in curr_forwarded_json [ ' Functions ' ] [ ' unimplementedFunctions ' ] . iteritems ( ) :
forwarded_json [ ' Functions ' ] [ ' unimplementedFunctions ' ] [ key ] = value
2012-11-14 08:54:48 +04:00
2013-01-17 00:22:34 +04:00
if settings . get ( ' ASM_JS ' ) :
parts = pre . split ( ' // ASM_LIBRARY FUNCTIONS \n ' )
if len ( parts ) > 1 :
pre = parts [ 0 ]
outputs . append ( [ parts [ 1 ] ] )
2012-11-14 06:19:34 +04:00
funcs_js = ' ' . join ( [ output [ 0 ] for output in outputs ] )
2012-11-24 22:34:25 +04:00
2012-11-14 06:19:34 +04:00
outputs = None
2012-11-22 19:27:04 +04:00
if DEBUG : print >> sys . stderr , ' emscript: phase 2b took %s seconds ' % ( time . time ( ) - t )
2012-11-07 01:51:48 +04:00
if DEBUG : t = time . time ( )
2012-11-07 05:05:45 +04:00
2012-11-07 01:51:48 +04:00
# calculations on merged forwarded data
forwarded_json [ ' Functions ' ] [ ' indexedFunctions ' ] = { }
i = 2
for indexed in indexed_functions :
2012-12-02 04:46:45 +04:00
#print >> sys.stderr, 'indaxx', indexed, i
2012-11-07 05:15:28 +04:00
forwarded_json [ ' Functions ' ] [ ' indexedFunctions ' ] [ indexed ] = i # make sure not to modify this python object later - we use it in indexize
2012-11-07 01:51:48 +04:00
i + = 2
forwarded_json [ ' Functions ' ] [ ' nextIndex ' ] = i
2012-11-16 02:35:30 +04:00
2012-11-07 05:15:28 +04:00
indexing = forwarded_json [ ' Functions ' ] [ ' indexedFunctions ' ]
2012-11-07 01:51:48 +04:00
def indexize ( js ) :
2013-01-22 23:37:18 +04:00
return re . sub ( r " ' {{ FI_([ \ w \ d_$]+) }} ' " , lambda m : str ( indexing . get ( m . groups ( 0 ) [ 0 ] ) or 0 ) , js )
2012-11-16 02:35:30 +04:00
blockaddrs = forwarded_json [ ' Functions ' ] [ ' blockAddresses ' ]
def blockaddrsize ( js ) :
return re . sub ( r ' {{ { BA_([ \ w \ d_$]+) \ |([ \ w \ d_$]+) }}} ' , lambda m : str ( blockaddrs [ m . groups ( 0 ) [ 0 ] ] [ m . groups ( 0 ) [ 1 ] ] ) , js )
2012-11-22 22:42:29 +04:00
#if DEBUG: outfile.write('// pre\n')
2012-11-16 02:35:30 +04:00
outfile . write ( blockaddrsize ( indexize ( pre ) ) )
pre = None
2012-11-22 22:42:29 +04:00
#if DEBUG: outfile.write('// funcs\n')
2012-11-14 06:19:34 +04:00
2012-11-07 01:51:48 +04:00
# forward
forwarded_data = json . dumps ( forwarded_json )
forwarded_file = temp_files . get ( ' .2.json ' ) . name
2012-11-16 02:35:30 +04:00
open ( forwarded_file , ' w ' ) . write ( indexize ( forwarded_data ) )
2012-11-22 19:27:04 +04:00
if DEBUG : print >> sys . stderr , ' emscript: phase 2c took %s seconds ' % ( time . time ( ) - t )
2012-11-06 00:24:46 +04:00
2012-11-07 02:46:25 +04:00
# Phase 3 - post
2012-11-06 22:46:04 +04:00
if DEBUG : t = time . time ( )
2012-11-06 23:14:29 +04:00
post_file = temp_files . get ( ' .post.ll ' ) . name
2012-11-16 02:35:30 +04:00
open ( post_file , ' w ' ) . write ( ' \n ' ) # no input, just processing of forwarded data
2013-01-31 04:39:40 +04:00
out = jsrun . run_js ( compiler , compiler_engine , [ settings_file , post_file , ' post ' , forwarded_file ] + libraries , stdout = subprocess . PIPE , cwd = path_from_root ( ' src ' ) )
2013-03-02 06:57:47 +04:00
post , last_forwarded_data = out . split ( ' //FORWARDED_DATA: ' ) # if this fails, perhaps the process failed prior to printing forwarded data?
2012-11-29 05:29:12 +04:00
last_forwarded_json = json . loads ( last_forwarded_data )
2012-11-29 05:01:02 +04:00
if settings . get ( ' ASM_JS ' ) :
2013-01-07 22:34:48 +04:00
simple = os . environ . get ( ' EMCC_SIMPLE_ASM ' )
2012-12-08 04:38:35 +04:00
class Counter :
i = 0
2013-01-19 02:07:57 +04:00
pre_tables = last_forwarded_json [ ' Functions ' ] [ ' tables ' ] [ ' pre ' ]
del last_forwarded_json [ ' Functions ' ] [ ' tables ' ] [ ' pre ' ]
2013-01-19 03:01:17 +04:00
# Find function table calls without function tables generated for them
for use in set ( re . findall ( r ' {{ { FTM_[ \ w \ d_$]+ }}} ' , funcs_js ) ) :
sig = use [ 8 : len ( use ) - 4 ]
if sig not in last_forwarded_json [ ' Functions ' ] [ ' tables ' ] :
2013-01-19 06:23:28 +04:00
if DEBUG : print >> sys . stderr , ' add empty function table ' , sig
2013-01-19 03:01:17 +04:00
last_forwarded_json [ ' Functions ' ] [ ' tables ' ] [ sig ] = ' var FUNCTION_TABLE_ ' + sig + ' = [0,0]; \n '
2012-12-08 04:38:35 +04:00
def make_table ( sig , raw ) :
i = Counter . i
Counter . i + = 1
bad = ' b ' + str ( i )
params = ' , ' . join ( [ ' p %d ' % p for p in range ( len ( sig ) - 1 ) ] )
2013-01-23 03:16:20 +04:00
coercions = ' ; ' . join ( [ ' p %d = %s p %d %s ' % ( p , ' + ' if sig [ p + 1 ] != ' i ' else ' ' , p , ' ' if sig [ p + 1 ] != ' i ' else ' |0 ' ) for p in range ( len ( sig ) - 1 ) ] ) + ' ; '
ret = ' ' if sig [ 0 ] == ' v ' else ( ' return %s 0 ' % ( ' + ' if sig [ 0 ] != ' i ' else ' ' ) )
2013-02-27 00:29:59 +04:00
return ( ' function %s ( %s ) { %s abort( %d ); %s }; ' % ( bad , params , coercions , i , ret ) , raw . replace ( ' [0, ' , ' [ ' + bad + ' , ' ) . replace ( ' ,0, ' , ' , ' + bad + ' , ' ) . replace ( ' ,0, ' , ' , ' + bad + ' , ' ) . replace ( ' ,0] ' , ' , ' + bad + ' ] ' ) . replace ( ' ,0] ' , ' , ' + bad + ' ] ' ) . replace ( ' ,0 \n ' , ' , ' + bad + ' \n ' ) )
2013-01-17 04:22:45 +04:00
infos = [ make_table ( sig , raw ) for sig , raw in last_forwarded_json [ ' Functions ' ] [ ' tables ' ] . iteritems ( ) ]
function_tables_defs = ' \n ' . join ( [ info [ 0 ] for info in infos ] + [ info [ 1 ] for info in infos ] )
2012-12-08 04:38:35 +04:00
2013-02-02 09:32:13 +04:00
asm_setup = ' '
2013-02-23 06:41:37 +04:00
maths = [ ' Math. ' + func for func in [ ' floor ' , ' abs ' , ' sqrt ' , ' pow ' , ' cos ' , ' sin ' , ' tan ' , ' acos ' , ' asin ' , ' atan ' , ' atan2 ' , ' exp ' , ' log ' , ' ceil ' , ' imul ' ] ]
2013-01-23 03:05:38 +04:00
fundamentals = [ ' Math ' , ' Int8Array ' , ' Int16Array ' , ' Int32Array ' , ' Uint8Array ' , ' Uint16Array ' , ' Uint32Array ' , ' Float32Array ' , ' Float64Array ' ]
2013-01-19 07:25:53 +04:00
math_envs = [ ' Runtime.bitshift64 ' , ' Math.min ' ] # TODO: move min to maths
2013-02-02 09:32:13 +04:00
asm_setup + = ' \n ' . join ( [ ' var %s = %s ; ' % ( f . replace ( ' . ' , ' _ ' ) , f ) for f in math_envs ] )
2013-01-26 06:44:39 +04:00
basic_funcs = [ ' abort ' , ' assert ' , ' asmPrintInt ' , ' asmPrintFloat ' , ' copyTempDouble ' , ' copyTempFloat ' ] + [ m . replace ( ' . ' , ' _ ' ) for m in math_envs ]
2013-01-28 23:15:12 +04:00
if settings [ ' SAFE_HEAP ' ] : basic_funcs + = [ ' SAFE_HEAP_LOAD ' , ' SAFE_HEAP_STORE ' , ' SAFE_HEAP_CLEAR ' ]
2013-02-27 08:04:11 +04:00
if settings [ ' CHECK_HEAP_ALIGN ' ] : basic_funcs + = [ ' CHECK_ALIGN_2 ' , ' CHECK_ALIGN_4 ' , ' CHECK_ALIGN_8 ' ]
2012-12-13 04:12:50 +04:00
basic_vars = [ ' STACKTOP ' , ' STACK_MAX ' , ' tempDoublePtr ' , ' ABORT ' ]
2013-01-15 22:20:26 +04:00
basic_float_vars = [ ' NaN ' , ' Infinity ' ]
2012-12-02 02:19:52 +04:00
if forwarded_json [ ' Types ' ] [ ' preciseI64MathUsed ' ] :
2012-12-13 04:12:50 +04:00
basic_funcs + = [ ' i64Math_ ' + op for op in [ ' add ' , ' subtract ' , ' multiply ' , ' divide ' , ' modulo ' ] ]
2012-12-02 02:19:52 +04:00
asm_setup + = '''
var i64Math_add = function ( a , b , c , d ) { i64Math . add ( a , b , c , d ) } ;
var i64Math_subtract = function ( a , b , c , d ) { i64Math . subtract ( a , b , c , d ) } ;
var i64Math_multiply = function ( a , b , c , d ) { i64Math . multiply ( a , b , c , d ) } ;
2012-12-05 05:10:28 +04:00
var i64Math_divide = function ( a , b , c , d , e ) { i64Math . divide ( a , b , c , d , e ) } ;
var i64Math_modulo = function ( a , b , c , d , e ) { i64Math . modulo ( a , b , c , d , e ) } ;
2012-12-02 02:19:52 +04:00
'''
2012-12-07 08:44:21 +04:00
asm_runtime_funcs = [ ' stackAlloc ' , ' stackSave ' , ' stackRestore ' , ' setThrew ' ] + [ ' setTempRet %d ' % i for i in range ( 10 ) ]
2012-12-01 09:51:43 +04:00
# function tables
2013-01-17 04:25:27 +04:00
def asm_coerce ( value , sig ) :
2013-01-17 04:33:59 +04:00
if sig == ' v ' : return value
2013-01-23 03:16:20 +04:00
return ( ' + ' if sig != ' i ' else ' ' ) + value + ( ' |0 ' if sig == ' i ' else ' ' )
2013-01-19 03:01:17 +04:00
2012-12-01 09:51:43 +04:00
function_tables = [ ' dynCall_ ' + table for table in last_forwarded_json [ ' Functions ' ] [ ' tables ' ] ]
function_tables_impls = [ ]
for sig in last_forwarded_json [ ' Functions ' ] [ ' tables ' ] . iterkeys ( ) :
args = ' , ' . join ( [ ' a ' + str ( i ) for i in range ( 1 , len ( sig ) ) ] )
2013-01-17 04:25:27 +04:00
arg_coercions = ' ' . join ( [ ' a ' + str ( i ) + ' = ' + asm_coerce ( ' a ' + str ( i ) , sig [ i ] ) + ' ; ' for i in range ( 1 , len ( sig ) ) ] )
2013-01-17 04:32:53 +04:00
coerced_args = ' , ' . join ( [ asm_coerce ( ' a ' + str ( i ) , sig [ i ] ) for i in range ( 1 , len ( sig ) ) ] )
2013-01-19 07:23:46 +04:00
ret = ( ' return ' if sig [ 0 ] != ' v ' else ' ' ) + asm_coerce ( ' FUNCTION_TABLE_ %s [index& {{ { FTM_ %s }}}]( %s ) ' % ( sig , sig , coerced_args ) , sig [ 0 ] )
2012-12-01 09:51:43 +04:00
function_tables_impls . append ( '''
function dynCall_ % s ( index % s % s ) {
2012-12-13 23:51:13 +04:00
index = index | 0 ;
2012-12-01 09:51:43 +04:00
% s
2013-01-17 04:33:59 +04:00
% s ;
2012-12-01 09:51:43 +04:00
}
2013-01-17 04:32:53 +04:00
''' % (sig, ' , ' if len(sig) > 1 else ' ' , args, arg_coercions, ret))
2013-01-19 03:01:17 +04:00
2012-11-29 05:29:12 +04:00
# calculate exports
exported_implemented_functions = list ( exported_implemented_functions )
exports = [ ]
2013-01-07 22:34:48 +04:00
if not simple :
for export in exported_implemented_functions + asm_runtime_funcs + function_tables :
2013-01-08 01:15:08 +04:00
exports . append ( " %s : %s " % ( export , export ) )
2013-01-07 22:34:48 +04:00
exports = ' { ' + ' , ' . join ( exports ) + ' } '
else :
exports = ' _main '
2012-11-29 05:29:12 +04:00
# calculate globals
2012-11-30 03:40:53 +04:00
try :
del forwarded_json [ ' Variables ' ] [ ' globals ' ] [ ' _llvm_global_ctors ' ] # not a true variable
except :
pass
2013-01-01 02:03:29 +04:00
# If no named globals, only need externals
2013-01-01 03:27:12 +04:00
global_vars = map ( lambda g : g [ ' name ' ] , filter ( lambda g : settings [ ' NAMED_GLOBALS ' ] or g . get ( ' external ' ) or g . get ( ' unIndexable ' ) , forwarded_json [ ' Variables ' ] [ ' globals ' ] . values ( ) ) )
2012-11-29 05:29:12 +04:00
global_funcs = [ ' _ ' + x for x in forwarded_json [ ' Functions ' ] [ ' libraryFunctions ' ] . keys ( ) ]
2013-01-15 03:40:47 +04:00
def math_fix ( g ) :
return g if not g . startswith ( ' Math_ ' ) else g . split ( ' _ ' ) [ 1 ] ;
2013-01-18 23:28:04 +04:00
asm_global_funcs = ' ' . join ( [ ' var ' + g . replace ( ' . ' , ' _ ' ) + ' =global. ' + g + ' ; \n ' for g in maths ] ) + \
' ' . join ( [ ' var ' + g + ' =env. ' + math_fix ( g ) + ' ; \n ' for g in basic_funcs + global_funcs ] )
2013-01-15 05:59:15 +04:00
asm_global_vars = ' ' . join ( [ ' var ' + g + ' =env. ' + g + ' |0; \n ' for g in basic_vars + global_vars ] ) + \
' ' . join ( [ ' var ' + g + ' =+env. ' + g + ' ; \n ' for g in basic_float_vars ] )
2012-11-29 05:29:12 +04:00
# sent data
2013-01-23 03:05:38 +04:00
the_global = ' { ' + ' , ' . join ( [ math_fix ( s ) + ' : ' + s for s in fundamentals ] ) + ' } '
sending = ' { ' + ' , ' . join ( [ math_fix ( s ) + ' : ' + s for s in basic_funcs + global_funcs + basic_vars + basic_float_vars + global_vars ] ) + ' } '
2012-11-29 05:29:12 +04:00
# received
2013-01-07 22:34:48 +04:00
if not simple :
receiving = ' ; \n ' . join ( [ ' var ' + s + ' = Module[ " ' + s + ' " ] = asm. ' + s for s in exported_implemented_functions + function_tables ] )
else :
receiving = ' var _main = Module[ " _main " ] = asm; '
2012-11-29 05:29:12 +04:00
# finalize
funcs_js = '''
2012-12-02 02:19:52 +04:00
% s
2013-01-29 01:59:39 +04:00
function asmPrintInt ( x , y ) {
Module . print ( ' int ' + x + ' , ' + y ) ; / / + ' ' + new Error ( ) . stack ) ;
2013-01-15 22:20:26 +04:00
}
2013-01-29 01:59:39 +04:00
function asmPrintFloat ( x , y ) {
Module . print ( ' float ' + x + ' , ' + y ) ; / / + ' ' + new Error ( ) . stack ) ;
2013-01-15 22:20:26 +04:00
}
2013-01-18 22:39:19 +04:00
var asm = ( function ( global , env , buffer ) {
2012-11-29 05:29:12 +04:00
' use asm ' ;
2013-01-18 22:39:19 +04:00
var HEAP8 = new global . Int8Array ( buffer ) ;
var HEAP16 = new global . Int16Array ( buffer ) ;
var HEAP32 = new global . Int32Array ( buffer ) ;
var HEAPU8 = new global . Uint8Array ( buffer ) ;
var HEAPU16 = new global . Uint16Array ( buffer ) ;
var HEAPU32 = new global . Uint32Array ( buffer ) ;
var HEAPF32 = new global . Float32Array ( buffer ) ;
var HEAPF64 = new global . Float64Array ( buffer ) ;
2012-12-13 23:51:13 +04:00
''' % (asm_setup,) + ' \n ' + asm_global_vars + '''
2012-12-07 03:53:28 +04:00
var __THREW__ = 0 ;
2012-12-07 04:26:48 +04:00
var undef = 0 ;
2013-01-24 04:48:49 +04:00
var tempInt = 0 , tempBigInt = 0 , tempBigIntP = 0 , tempBigIntS = 0 , tempBigIntR = 0.0 , tempBigIntI = 0 , tempBigIntD = 0 , tempValue = 0 , tempDouble = 0.0 ;
2012-12-13 23:51:13 +04:00
''' + ' ' .join([ '''
var tempRet % d = 0 ; ''' % i for i in range(10)]) + ' \n ' + asm_global_funcs + '''
2012-12-01 04:29:52 +04:00
function stackAlloc ( size ) {
2012-12-13 04:56:41 +04:00
size = size | 0 ;
2012-12-13 22:51:43 +04:00
var ret = 0 ;
ret = STACKTOP ;
2012-12-01 04:29:52 +04:00
STACKTOP = ( STACKTOP + size ) | 0 ;
STACKTOP = ( ( STACKTOP + 3 ) >> 2 ) << 2 ;
2012-12-08 03:51:06 +04:00
return ret | 0 ;
2012-12-01 04:29:52 +04:00
}
function stackSave ( ) {
2012-12-08 03:51:06 +04:00
return STACKTOP | 0 ;
2012-12-01 04:29:52 +04:00
}
function stackRestore ( top ) {
top = top | 0 ;
STACKTOP = top ;
}
2012-12-07 08:44:21 +04:00
function setThrew ( threw ) {
threw = threw | 0 ;
__THREW__ = threw ;
}
2012-12-07 02:29:10 +04:00
''' + ' ' .join([ '''
function setTempRet % d ( value ) {
value = value | 0 ;
tempRet % d = value ;
}
2013-01-25 22:31:11 +04:00
''' % (i, i) for i in range(10)]) + funcs_js + '''
2012-11-29 05:29:12 +04:00
% s
return % s ;
2013-01-23 03:05:38 +04:00
} ) ( % s , % s , buffer ) ;
2012-11-29 05:29:12 +04:00
% s ;
2012-12-01 04:29:52 +04:00
Runtime . stackAlloc = function ( size ) { return asm . stackAlloc ( size ) } ;
Runtime . stackSave = function ( ) { return asm . stackSave ( ) } ;
Runtime . stackRestore = function ( top ) { asm . stackRestore ( top ) } ;
2013-01-23 03:05:38 +04:00
''' % (pre_tables + ' \n ' .join(function_tables_impls) + ' \n ' + function_tables_defs.replace( ' \n ' , ' \n ' ), exports, the_global, sending, receiving)
2012-12-08 04:19:54 +04:00
# Set function table masks
def function_table_maskize ( js ) :
masks = { }
2012-12-08 05:02:17 +04:00
default = None
2012-12-08 04:19:54 +04:00
for sig , table in last_forwarded_json [ ' Functions ' ] [ ' tables ' ] . iteritems ( ) :
masks [ sig ] = str ( table . count ( ' , ' ) )
2012-12-08 05:02:17 +04:00
default = sig
def fix ( m ) :
sig = m . groups ( 0 ) [ 0 ]
return masks [ sig ]
return re . sub ( r ' {{ { FTM_([ \ w \ d_$]+) }}} ' , lambda m : fix ( m ) , js ) # masks[m.groups(0)[0]]
2012-12-08 04:19:54 +04:00
funcs_js = function_table_maskize ( funcs_js )
2012-11-29 05:01:02 +04:00
else :
2012-12-08 04:38:35 +04:00
function_tables_defs = ' \n ' . join ( [ table for table in last_forwarded_json [ ' Functions ' ] [ ' tables ' ] . itervalues ( ) ] )
2012-11-29 05:29:12 +04:00
outfile . write ( function_tables_defs )
2012-11-29 05:01:02 +04:00
outfile . write ( blockaddrsize ( indexize ( funcs_js ) ) )
funcs_js = None
2012-11-25 02:06:44 +04:00
2012-11-29 03:24:55 +04:00
outfile . write ( indexize ( post ) )
2012-11-06 22:46:04 +04:00
if DEBUG : print >> sys . stderr , ' emscript: phase 3 took %s seconds ' % ( time . time ( ) - t )
2012-11-06 00:24:46 +04:00
2011-07-07 11:38:35 +04:00
outfile . close ( )
2013-01-31 06:11:51 +04:00
def main ( args , compiler_engine , cache , jcache , relooper ) :
2011-07-07 11:38:35 +04:00
# Prepare settings for serialization to JSON.
settings = { }
for setting in args . settings :
2011-07-13 23:30:29 +04:00
name , value = setting . strip ( ) . split ( ' = ' , 1 )
2011-07-07 11:38:35 +04:00
settings [ name ] = json . loads ( value )
2011-09-25 10:26:59 +04:00
# Add header defines to settings
defines = { }
include_root = path_from_root ( ' system ' , ' include ' )
2011-09-25 21:33:17 +04:00
headers = args . headers [ 0 ] . split ( ' , ' ) if len ( args . headers ) > 0 else [ ]
2011-09-26 05:56:58 +04:00
seen_headers = set ( )
2011-09-25 10:26:59 +04:00
while len ( headers ) > 0 :
header = headers . pop ( 0 )
if not os . path . isabs ( header ) :
header = os . path . join ( include_root , header )
2011-09-26 05:56:58 +04:00
seen_headers . add ( header )
2011-09-25 10:26:59 +04:00
for line in open ( header , ' r ' ) :
line = line . replace ( ' \t ' , ' ' )
2011-09-26 05:56:58 +04:00
m = re . match ( ' ^ *# *define +(?P<name>[- \ w_.]+) + \ (?(?P<value>[- \ w_.|]+) \ )?.* ' , line )
2011-10-02 19:44:32 +04:00
if not m :
# Catch enum defines of a very limited sort
m = re . match ( ' ^ +(?P<name>[A-Z_ \ d]+) += +(?P<value> \ d+).* ' , line )
2011-09-25 10:26:59 +04:00
if m :
2011-10-02 19:44:32 +04:00
if m . group ( ' name ' ) != m . group ( ' value ' ) :
defines [ m . group ( ' name ' ) ] = m . group ( ' value ' )
#else:
# print 'Warning: %s #defined to itself' % m.group('name') # XXX this can happen if we are set to be equal to an enum (with the same name)
2011-09-26 05:56:58 +04:00
m = re . match ( ' ^ *# *include *[ " <](?P<name>[ \ w_.-/]+)[ " >].* ' , line )
2011-09-25 10:26:59 +04:00
if m :
# Find this file
found = False
for w in [ w for w in os . walk ( include_root ) ] :
for f in w [ 2 ] :
curr = os . path . join ( w [ 0 ] , f )
2011-09-26 05:56:58 +04:00
if curr . endswith ( m . group ( ' name ' ) ) and curr not in seen_headers :
2011-09-25 10:26:59 +04:00
headers . append ( curr )
found = True
break
if found : break
#assert found, 'Could not find header: ' + m.group('name')
if len ( defines ) > 0 :
2011-09-25 12:20:34 +04:00
def lookup ( value ) :
try :
while not unicode ( value ) . isnumeric ( ) :
value = defines [ value ]
return value
except :
2011-09-25 21:28:19 +04:00
pass
try : # 0x300 etc.
value = eval ( value )
return value
except :
pass
try : # CONST1|CONST2
parts = map ( lookup , value . split ( ' | ' ) )
value = reduce ( lambda a , b : a | b , map ( eval , parts ) )
return value
except :
pass
return None
2011-09-25 12:20:34 +04:00
for key , value in defines . items ( ) :
value = lookup ( value )
if value is not None :
defines [ key ] = str ( value )
else :
del defines [ key ]
2012-01-08 23:19:33 +04:00
#print >> sys.stderr, 'new defs:', str(defines).replace(',', ',\n '), '\n\n'
settings . setdefault ( ' C_DEFINES ' , { } ) . update ( defines )
2011-09-25 10:26:59 +04:00
2012-04-14 05:57:41 +04:00
# libraries
libraries = args . libraries [ 0 ] . split ( ' , ' ) if len ( args . libraries ) > 0 else [ ]
2011-07-07 11:38:35 +04:00
# Compile the assembly to Javascript.
2013-01-31 04:01:59 +04:00
if settings . get ( ' RELOOP ' ) :
if not relooper :
2013-01-31 06:11:51 +04:00
relooper = cache . get_path ( ' relooper.js ' )
2013-01-31 04:01:59 +04:00
settings . setdefault ( ' RELOOPER ' , relooper )
shared . Building . ensure_relooper ( relooper )
2012-11-11 02:37:15 +04:00
2013-01-31 03:15:37 +04:00
emscript ( configuration , args . infile , settings , args . outfile , libraries ,
compiler_engine = compiler_engine ,
jcache = jcache )
2011-07-07 11:38:35 +04:00
2013-01-29 06:21:43 +04:00
def _main ( environ ) :
2011-07-13 23:30:29 +04:00
parser = optparse . OptionParser (
2012-11-07 09:11:18 +04:00
usage = ' usage: % prog [-h] [-H HEADERS] [-o OUTFILE] [-c COMPILER_ENGINE] [-s FOO=BAR]* infile ' ,
2012-01-22 05:16:50 +04:00
description = ( ' You should normally never use this! Use emcc instead. '
' This is a wrapper around the JS compiler, converting .ll to .js. ' ) ,
epilog = ' ' )
2011-09-25 10:26:59 +04:00
parser . add_option ( ' -H ' , ' --headers ' ,
default = [ ] ,
action = ' append ' ,
help = ' System headers (comma separated) whose #defines should be exposed to the compiled code. ' )
2012-04-14 05:57:41 +04:00
parser . add_option ( ' -L ' , ' --libraries ' ,
default = [ ] ,
action = ' append ' ,
help = ' Library files (comma separated) to use in addition to those in emscripten src/library_*. ' )
2011-07-13 23:30:29 +04:00
parser . add_option ( ' -o ' , ' --outfile ' ,
default = sys . stdout ,
help = ' Where to write the output; defaults to stdout. ' )
2012-11-07 09:11:18 +04:00
parser . add_option ( ' -c ' , ' --compiler ' ,
default = shared . COMPILER_ENGINE ,
help = ' Which JS engine to use to run the compiler; defaults to the one in ~/.emscripten. ' )
2013-01-31 04:01:59 +04:00
parser . add_option ( ' --relooper ' ,
default = None ,
help = ' Which relooper file to use if RELOOP is enabled ' )
2011-07-13 23:30:29 +04:00
parser . add_option ( ' -s ' , ' --setting ' ,
dest = ' settings ' ,
default = [ ] ,
action = ' append ' ,
metavar = ' FOO=BAR ' ,
help = ( ' Overrides for settings defined in settings.js. '
' May occur multiple times. ' ) )
2012-11-17 23:26:58 +04:00
parser . add_option ( ' -j ' , ' --jcache ' ,
action = ' store_true ' ,
default = False ,
help = ( ' Enable jcache (ccache-like caching of compilation results, for faster incremental builds). ' ) )
2013-01-29 06:21:43 +04:00
parser . add_option ( ' --suppressUsageWarning ' ,
action = ' store_true ' ,
default = environ . get ( ' EMSCRIPTEN_SUPPRESS_USAGE_WARNING ' ) ,
help = ( ' Suppress usage warning ' ) )
2011-07-13 23:30:29 +04:00
# Convert to the same format that argparse would have produced.
keywords , positional = parser . parse_args ( )
2013-01-29 06:21:43 +04:00
if not keywords . suppressUsageWarning :
print >> sys . stderr , '''
== == == == == == == == == == == == == == == == == == == == == == == == == == == == == == ==
WARNING : You should normally never use this ! Use emcc instead .
== == == == == == == == == == == == == == == == == == == == == == == == == == == == == == ==
'''
2011-07-13 23:30:29 +04:00
if len ( positional ) != 1 :
raise RuntimeError ( ' Must provide exactly one positional argument. ' )
2011-07-30 21:43:43 +04:00
keywords . infile = os . path . abspath ( positional [ 0 ] )
2011-07-13 23:30:29 +04:00
if isinstance ( keywords . outfile , basestring ) :
keywords . outfile = open ( keywords . outfile , ' w ' )
2013-01-31 04:01:59 +04:00
2013-01-30 05:42:03 +04:00
if keywords . relooper :
2013-01-31 04:01:59 +04:00
relooper = os . path . abspath ( keywords . relooper )
else :
relooper = None # use the cache
2013-01-30 00:43:37 +04:00
2013-01-31 06:11:51 +04:00
cache = cache_module . Cache ( )
2013-01-30 06:07:16 +04:00
temp_files . run_and_clean ( lambda : main (
keywords ,
2013-01-31 03:15:37 +04:00
compiler_engine = os . path . abspath ( keywords . compiler ) ,
2013-01-31 06:11:51 +04:00
cache = cache ,
jcache = cache_module . JCache ( cache ) if keywords . jcache else None ,
2013-01-31 04:01:59 +04:00
relooper = relooper ) )
2011-12-12 03:24:04 +04:00
2013-01-29 05:38:04 +04:00
if __name__ == ' __main__ ' :
2013-01-29 06:21:43 +04:00
_main ( environ = os . environ )