2012-04-25 01:47:33 +04:00
|
|
|
#!/usr/bin/env python
|
2010-09-10 07:03:24 +04:00
|
|
|
|
2011-10-13 22:40:46 +04:00
|
|
|
'''
|
2012-01-22 05:16:50 +04:00
|
|
|
You should normally never use this! Use emcc instead.
|
2011-10-13 22:40:46 +04:00
|
|
|
|
2012-01-22 05:16:50 +04:00
|
|
|
This is a small wrapper script around the core JS compiler. This calls that
|
|
|
|
compiler with the settings given to it. It can also read data from C/C++
|
|
|
|
header files (so that the JS compiler can see the constants in those
|
|
|
|
headers, for the libc implementation in JS).
|
2011-10-13 22:40:46 +04:00
|
|
|
'''
|
|
|
|
|
2012-11-07 04:01:01 +04:00
|
|
|
import os, sys, json, optparse, subprocess, re, time, multiprocessing
|
2010-09-10 07:03:24 +04:00
|
|
|
|
2012-02-05 22:37:40 +04:00
|
|
|
if not os.environ.get('EMSCRIPTEN_SUPPRESS_USAGE_WARNING'):
|
|
|
|
print >> sys.stderr, '''
|
|
|
|
==============================================================
|
|
|
|
WARNING: You should normally never use this! Use emcc instead.
|
|
|
|
==============================================================
|
|
|
|
'''
|
|
|
|
|
|
|
|
from tools import shared
|
2011-01-15 09:44:52 +03:00
|
|
|
|
2012-11-06 00:24:46 +04:00
|
|
|
DEBUG = os.environ.get('EMCC_DEBUG')
|
|
|
|
|
2011-10-05 22:12:45 +04:00
|
|
|
__rootpath__ = os.path.abspath(os.path.dirname(__file__))
|
2011-09-23 14:43:03 +04:00
|
|
|
def path_from_root(*pathelems):
|
|
|
|
"""Returns the absolute path for which the given path elements are
|
|
|
|
relative to the emscripten root.
|
|
|
|
"""
|
2011-10-05 22:12:45 +04:00
|
|
|
return os.path.join(__rootpath__, *pathelems)
|
2011-07-07 11:38:35 +04:00
|
|
|
|
2011-12-12 03:24:04 +04:00
|
|
|
temp_files = shared.TempFiles()
|
2011-07-07 11:38:35 +04:00
|
|
|
|
2012-11-07 09:11:18 +04:00
|
|
|
compiler_engine = None
|
2012-11-17 23:26:58 +04:00
|
|
|
jcache = False
|
2011-07-08 08:04:38 +04:00
|
|
|
|
2012-11-06 00:24:46 +04:00
|
|
|
def scan(ll, settings):
|
|
|
|
# blockaddress(@main, %23)
|
|
|
|
blockaddrs = []
|
|
|
|
for blockaddr in re.findall('blockaddress\([^)]*\)', ll):
|
|
|
|
b = blockaddr.split('(')[1][:-1].split(', ')
|
|
|
|
blockaddrs.append(b)
|
|
|
|
if len(blockaddrs) > 0:
|
|
|
|
settings['NECESSARY_BLOCKADDRS'] = blockaddrs
|
|
|
|
|
2012-11-15 02:58:08 +04:00
|
|
|
NUM_CHUNKS_PER_CORE = 5
|
2012-11-07 04:29:33 +04:00
|
|
|
MIN_CHUNK_SIZE = 1024*1024
|
2012-11-07 21:59:35 +04:00
|
|
|
MAX_CHUNK_SIZE = float(os.environ.get('EMSCRIPT_MAX_CHUNK_SIZE') or 'inf') # configuring this is just for debugging purposes
|
2012-11-07 03:43:24 +04:00
|
|
|
|
2012-11-07 01:51:48 +04:00
|
|
|
def process_funcs(args):
|
2012-11-19 18:22:15 +04:00
|
|
|
i, funcs, meta, settings_file, compiler, forwarded_file, libraries = args
|
|
|
|
ll = ''.join(funcs) + '\n' + meta
|
2012-11-07 01:51:48 +04:00
|
|
|
funcs_file = temp_files.get('.func_%d.ll' % i).name
|
|
|
|
open(funcs_file, 'w').write(ll)
|
2012-11-07 09:11:18 +04:00
|
|
|
out = shared.run_js(compiler, compiler_engine, [settings_file, funcs_file, 'funcs', forwarded_file] + libraries, stdout=subprocess.PIPE, cwd=path_from_root('src'))
|
2012-11-14 02:54:40 +04:00
|
|
|
shared.try_delete(funcs_file)
|
2012-11-22 16:49:54 +04:00
|
|
|
return out
|
2012-11-07 01:51:48 +04:00
|
|
|
|
2012-04-14 05:57:41 +04:00
|
|
|
def emscript(infile, settings, outfile, libraries=[]):
|
2012-11-06 00:24:46 +04:00
|
|
|
"""Runs the emscripten LLVM-to-JS compiler. We parallelize as much as possible
|
2011-07-07 11:38:35 +04:00
|
|
|
|
|
|
|
Args:
|
|
|
|
infile: The path to the input LLVM assembly file.
|
2012-11-06 00:24:46 +04:00
|
|
|
settings: JSON-formatted settings that override the values
|
2011-07-07 11:38:35 +04:00
|
|
|
defined in src/settings.js.
|
|
|
|
outfile: The file where the output is written.
|
|
|
|
"""
|
2012-11-06 00:24:46 +04:00
|
|
|
|
|
|
|
compiler = path_from_root('src', 'compiler.js')
|
|
|
|
|
2012-11-06 22:46:04 +04:00
|
|
|
# Parallelization: We run 3 phases:
|
2012-11-06 00:24:46 +04:00
|
|
|
# 1 aka 'pre' : Process types and metadata and so forth, and generate the preamble.
|
|
|
|
# 2 aka 'funcs': Process functions. We can parallelize this, working on each function independently.
|
|
|
|
# 3 aka 'post' : Process globals, generate postamble and finishing touches.
|
|
|
|
|
2012-11-06 22:46:04 +04:00
|
|
|
if DEBUG: print >> sys.stderr, 'emscript: ll=>js'
|
|
|
|
|
2012-11-18 21:37:46 +04:00
|
|
|
if jcache: shared.JCache.ensure()
|
2012-11-18 21:17:38 +04:00
|
|
|
|
2012-11-06 00:24:46 +04:00
|
|
|
# Pre-scan ll and alter settings as necessary
|
2012-11-06 22:46:04 +04:00
|
|
|
if DEBUG: t = time.time()
|
2012-11-06 00:24:46 +04:00
|
|
|
ll = open(infile).read()
|
|
|
|
scan(ll, settings)
|
2012-11-07 04:29:33 +04:00
|
|
|
total_ll_size = len(ll)
|
2012-11-06 00:24:46 +04:00
|
|
|
ll = None # allow collection
|
2012-11-06 22:46:04 +04:00
|
|
|
if DEBUG: print >> sys.stderr, ' emscript: scan took %s seconds' % (time.time() - t)
|
2012-11-06 00:24:46 +04:00
|
|
|
|
|
|
|
# Split input into the relevant parts for each phase
|
2012-11-06 23:14:29 +04:00
|
|
|
pre = []
|
2012-11-06 00:24:46 +04:00
|
|
|
funcs = [] # split up functions here, for parallelism later
|
2012-11-20 22:57:54 +04:00
|
|
|
func_idents = []
|
2012-11-06 23:14:29 +04:00
|
|
|
meta = [] # needed by each function XXX
|
2012-11-06 00:24:46 +04:00
|
|
|
|
2012-11-06 22:46:04 +04:00
|
|
|
if DEBUG: t = time.time()
|
2012-11-06 00:24:46 +04:00
|
|
|
in_func = False
|
|
|
|
ll_lines = open(infile).readlines()
|
|
|
|
for line in ll_lines:
|
|
|
|
if in_func:
|
2012-11-20 22:57:54 +04:00
|
|
|
funcs[-1][1].append(line)
|
2012-11-06 00:24:46 +04:00
|
|
|
if line.startswith('}'):
|
|
|
|
in_func = False
|
2012-11-20 22:57:54 +04:00
|
|
|
funcs[-1] = (funcs[-1][0], ''.join(funcs[-1][1]))
|
2012-11-07 02:24:35 +04:00
|
|
|
pre.append(line) # pre needs it to, so we know about all implemented functions
|
2012-11-06 00:24:46 +04:00
|
|
|
else:
|
2012-11-19 18:21:02 +04:00
|
|
|
if line.startswith(';'): continue
|
2012-11-06 00:24:46 +04:00
|
|
|
if line.startswith('define '):
|
|
|
|
in_func = True
|
2012-11-22 18:15:00 +04:00
|
|
|
funcs.append((line, [line])) # use the entire line as the identifier
|
2012-11-07 02:24:35 +04:00
|
|
|
pre.append(line) # pre needs it to, so we know about all implemented functions
|
2012-11-06 00:24:46 +04:00
|
|
|
elif line.find(' = type { ') > 0:
|
2012-11-06 23:14:29 +04:00
|
|
|
pre.append(line) # type
|
2012-11-06 00:24:46 +04:00
|
|
|
elif line.startswith('!'):
|
2012-11-06 23:14:29 +04:00
|
|
|
meta.append(line) # metadata
|
2012-11-06 00:24:46 +04:00
|
|
|
else:
|
2012-11-16 02:35:30 +04:00
|
|
|
pre.append(line) # pre needs it so we know about globals in pre and funcs. So emit globals there
|
2012-11-06 00:24:46 +04:00
|
|
|
ll_lines = None
|
2012-11-07 01:58:45 +04:00
|
|
|
meta = ''.join(meta)
|
|
|
|
if DEBUG and len(meta) > 1024*1024: print >> sys.stderr, 'emscript warning: large amounts of metadata, will slow things down'
|
2012-11-06 22:46:04 +04:00
|
|
|
if DEBUG: print >> sys.stderr, ' emscript: split took %s seconds' % (time.time() - t)
|
2012-11-06 00:24:46 +04:00
|
|
|
|
2012-11-06 23:14:29 +04:00
|
|
|
#if DEBUG:
|
|
|
|
# print >> sys.stderr, '========= pre ================\n'
|
|
|
|
# print >> sys.stderr, ''.join(pre)
|
|
|
|
# print >> sys.stderr, '========== funcs ===============\n'
|
|
|
|
# for func in funcs:
|
|
|
|
# print >> sys.stderr, '\n// ===\n\n', ''.join(func)
|
|
|
|
# print >> sys.stderr, '=========================\n'
|
2012-11-06 00:24:46 +04:00
|
|
|
|
|
|
|
# Save settings to a file to work around v8 issue 1579
|
|
|
|
settings_file = temp_files.get('.txt').name
|
2012-11-30 03:05:41 +04:00
|
|
|
def save_settings():
|
|
|
|
settings_text = json.dumps(settings)
|
|
|
|
s = open(settings_file, 'w')
|
|
|
|
s.write(settings_text)
|
|
|
|
s.close()
|
|
|
|
save_settings()
|
2012-11-06 00:24:46 +04:00
|
|
|
|
2012-11-07 02:46:25 +04:00
|
|
|
# Phase 1 - pre
|
2012-11-06 22:46:04 +04:00
|
|
|
if DEBUG: t = time.time()
|
2012-11-06 23:14:29 +04:00
|
|
|
pre_file = temp_files.get('.pre.ll').name
|
2012-11-18 21:00:27 +04:00
|
|
|
pre_input = ''.join(pre) + '\n' + meta
|
|
|
|
out = None
|
|
|
|
if jcache:
|
2012-11-18 21:17:38 +04:00
|
|
|
keys = [pre_input, settings_text, ','.join(libraries)]
|
2012-11-18 21:37:46 +04:00
|
|
|
shortkey = shared.JCache.get_shortkey(keys)
|
|
|
|
out = shared.JCache.get(shortkey, keys)
|
2012-11-19 17:46:02 +04:00
|
|
|
if out and DEBUG: print >> sys.stderr, ' loading pre from jcache'
|
2012-11-18 21:00:27 +04:00
|
|
|
if not out:
|
|
|
|
open(pre_file, 'w').write(pre_input)
|
|
|
|
out = shared.run_js(compiler, shared.COMPILER_ENGINE, [settings_file, pre_file, 'pre'] + libraries, stdout=subprocess.PIPE, cwd=path_from_root('src'))
|
|
|
|
if jcache:
|
2012-11-19 17:46:02 +04:00
|
|
|
if DEBUG: print >> sys.stderr, ' saving pre to jcache'
|
2012-11-18 21:37:46 +04:00
|
|
|
shared.JCache.set(shortkey, keys, out)
|
2012-11-16 02:35:30 +04:00
|
|
|
pre, forwarded_data = out.split('//FORWARDED_DATA:')
|
2012-11-06 00:24:46 +04:00
|
|
|
forwarded_file = temp_files.get('.json').name
|
|
|
|
open(forwarded_file, 'w').write(forwarded_data)
|
2012-11-06 22:46:04 +04:00
|
|
|
if DEBUG: print >> sys.stderr, ' emscript: phase 1 took %s seconds' % (time.time() - t)
|
2012-11-06 00:24:46 +04:00
|
|
|
|
2012-11-07 02:46:25 +04:00
|
|
|
# Phase 2 - func
|
2012-11-07 04:29:33 +04:00
|
|
|
|
|
|
|
cores = multiprocessing.cpu_count()
|
|
|
|
assert cores >= 1
|
2012-11-08 06:57:43 +04:00
|
|
|
if cores > 1:
|
|
|
|
intended_num_chunks = cores * NUM_CHUNKS_PER_CORE
|
|
|
|
chunk_size = max(MIN_CHUNK_SIZE, total_ll_size / intended_num_chunks)
|
2012-11-15 02:58:08 +04:00
|
|
|
chunk_size += 3*len(meta) # keep ratio of lots of function code to meta (expensive to process, and done in each parallel task)
|
2012-11-08 06:57:43 +04:00
|
|
|
chunk_size = min(MAX_CHUNK_SIZE, chunk_size)
|
|
|
|
else:
|
|
|
|
chunk_size = MAX_CHUNK_SIZE # if 1 core, just use the max chunk size
|
2012-11-07 04:29:33 +04:00
|
|
|
|
2012-11-06 22:46:04 +04:00
|
|
|
if DEBUG: t = time.time()
|
2012-11-07 01:51:48 +04:00
|
|
|
forwarded_json = json.loads(forwarded_data)
|
|
|
|
indexed_functions = set()
|
2012-11-30 03:05:41 +04:00
|
|
|
if settings.get('ASM_JS'):
|
|
|
|
settings['EXPORTED_FUNCTIONS'] = forwarded_json['EXPORTED_FUNCTIONS']
|
|
|
|
save_settings()
|
2012-11-20 22:57:54 +04:00
|
|
|
|
2012-11-20 23:50:20 +04:00
|
|
|
chunks = shared.JCache.chunkify(funcs, chunk_size, 'emscript_files' if jcache else None)
|
2012-11-21 00:32:22 +04:00
|
|
|
|
2012-11-21 13:52:58 +04:00
|
|
|
if jcache:
|
|
|
|
# load chunks from cache where we can # TODO: ignore small chunks
|
2012-11-21 17:53:45 +04:00
|
|
|
cached_outputs = []
|
2012-11-21 13:52:58 +04:00
|
|
|
def load_from_cache(chunk):
|
|
|
|
keys = [settings_text, forwarded_data, chunk]
|
|
|
|
shortkey = shared.JCache.get_shortkey(keys) # TODO: share shortkeys with later code
|
2012-11-22 19:27:04 +04:00
|
|
|
out = shared.JCache.get(shortkey, keys) # this is relatively expensive (pickling?)
|
2012-11-21 13:52:58 +04:00
|
|
|
if out:
|
2012-11-21 17:53:45 +04:00
|
|
|
cached_outputs.append(out)
|
2012-11-21 13:52:58 +04:00
|
|
|
return False
|
|
|
|
return True
|
|
|
|
chunks = filter(load_from_cache, chunks)
|
2012-11-21 17:53:45 +04:00
|
|
|
if len(cached_outputs) > 0:
|
|
|
|
if out and DEBUG: print >> sys.stderr, ' loading %d funcchunks from jcache' % len(cached_outputs)
|
2012-11-21 13:52:58 +04:00
|
|
|
else:
|
2012-11-21 17:53:45 +04:00
|
|
|
cached_outputs = []
|
|
|
|
|
|
|
|
# TODO: minimize size of forwarded data from funcs to what we actually need
|
2012-11-21 13:52:58 +04:00
|
|
|
|
2012-11-08 06:57:43 +04:00
|
|
|
if cores == 1 and total_ll_size < MAX_CHUNK_SIZE: assert len(chunks) == 1, 'no point in splitting up without multiple cores'
|
2012-11-07 04:29:33 +04:00
|
|
|
|
2012-11-21 13:52:58 +04:00
|
|
|
if len(chunks) > 0:
|
|
|
|
if DEBUG: print >> sys.stderr, ' emscript: phase 2 working on %d chunks %s (intended chunk size: %.2f MB, meta: %.2f MB, forwarded: %.2f MB, total: %.2f MB)' % (len(chunks), ('using %d cores' % cores) if len(chunks) > 1 else '', chunk_size/(1024*1024.), len(meta)/(1024*1024.), len(forwarded_data)/(1024*1024.), total_ll_size/(1024*1024.))
|
|
|
|
|
|
|
|
commands = [(i, chunks[i], meta, settings_file, compiler, forwarded_file, libraries) for i in range(len(chunks))]
|
2012-11-07 05:05:45 +04:00
|
|
|
|
2012-11-21 13:52:58 +04:00
|
|
|
if len(chunks) > 1:
|
|
|
|
pool = multiprocessing.Pool(processes=cores)
|
|
|
|
outputs = pool.map(process_funcs, commands, chunksize=1)
|
|
|
|
elif len(chunks) == 1:
|
|
|
|
outputs = [process_funcs(commands[0])]
|
2012-11-07 05:05:45 +04:00
|
|
|
else:
|
2012-11-21 13:52:58 +04:00
|
|
|
outputs = []
|
|
|
|
|
|
|
|
if jcache:
|
|
|
|
# save chunks to cache
|
|
|
|
for i in range(len(chunks)):
|
|
|
|
chunk = chunks[i]
|
|
|
|
keys = [settings_text, forwarded_data, chunk]
|
|
|
|
shortkey = shared.JCache.get_shortkey(keys)
|
|
|
|
shared.JCache.set(shortkey, keys, outputs[i])
|
|
|
|
if out and DEBUG and len(chunks) > 0: print >> sys.stderr, ' saving %d funcchunks to jcache' % len(chunks)
|
2012-11-07 05:05:45 +04:00
|
|
|
|
2012-11-21 17:53:45 +04:00
|
|
|
if jcache: outputs += cached_outputs # TODO: preserve order
|
2012-11-22 16:49:54 +04:00
|
|
|
|
|
|
|
outputs = [output.split('//FORWARDED_DATA:') for output in outputs]
|
|
|
|
|
2012-11-22 19:27:04 +04:00
|
|
|
if DEBUG: print >> sys.stderr, ' emscript: phase 2 took %s seconds' % (time.time() - t)
|
|
|
|
if DEBUG: t = time.time()
|
|
|
|
|
2012-11-14 08:54:48 +04:00
|
|
|
# merge forwarded data
|
2012-11-24 17:27:17 +04:00
|
|
|
if settings.get('ASM_JS'):
|
|
|
|
all_exported_functions = set(settings['EXPORTED_FUNCTIONS']) # both asm.js and otherwise
|
|
|
|
exported_implemented_functions = set()
|
2012-11-14 08:54:48 +04:00
|
|
|
for func_js, curr_forwarded_data in outputs:
|
|
|
|
curr_forwarded_json = json.loads(curr_forwarded_data)
|
|
|
|
forwarded_json['Types']['preciseI64MathUsed'] = forwarded_json['Types']['preciseI64MathUsed'] or curr_forwarded_json['Types']['preciseI64MathUsed']
|
|
|
|
for key, value in curr_forwarded_json['Functions']['blockAddresses'].iteritems():
|
|
|
|
forwarded_json['Functions']['blockAddresses'][key] = value
|
|
|
|
for key in curr_forwarded_json['Functions']['indexedFunctions'].iterkeys():
|
|
|
|
indexed_functions.add(key)
|
2012-11-24 17:27:17 +04:00
|
|
|
if settings.get('ASM_JS'):
|
|
|
|
for key in curr_forwarded_json['Functions']['implementedFunctions'].iterkeys():
|
|
|
|
if key in all_exported_functions: exported_implemented_functions.add(key)
|
2012-11-29 04:52:51 +04:00
|
|
|
for key, value in curr_forwarded_json['Functions']['unimplementedFunctions'].iteritems():
|
|
|
|
forwarded_json['Functions']['unimplementedFunctions'][key] = value
|
2012-11-14 08:54:48 +04:00
|
|
|
|
2012-11-14 06:19:34 +04:00
|
|
|
funcs_js = ''.join([output[0] for output in outputs])
|
2012-11-24 22:34:25 +04:00
|
|
|
|
2012-11-14 06:19:34 +04:00
|
|
|
outputs = None
|
2012-11-22 19:27:04 +04:00
|
|
|
if DEBUG: print >> sys.stderr, ' emscript: phase 2b took %s seconds' % (time.time() - t)
|
2012-11-07 01:51:48 +04:00
|
|
|
if DEBUG: t = time.time()
|
2012-11-07 05:05:45 +04:00
|
|
|
|
2012-11-07 01:51:48 +04:00
|
|
|
# calculations on merged forwarded data
|
|
|
|
forwarded_json['Functions']['indexedFunctions'] = {}
|
|
|
|
i = 2
|
|
|
|
for indexed in indexed_functions:
|
2012-12-02 04:46:45 +04:00
|
|
|
#print >> sys.stderr, 'indaxx', indexed, i
|
2012-11-07 05:15:28 +04:00
|
|
|
forwarded_json['Functions']['indexedFunctions'][indexed] = i # make sure not to modify this python object later - we use it in indexize
|
2012-11-07 01:51:48 +04:00
|
|
|
i += 2
|
|
|
|
forwarded_json['Functions']['nextIndex'] = i
|
2012-11-16 02:35:30 +04:00
|
|
|
|
2012-11-07 05:15:28 +04:00
|
|
|
indexing = forwarded_json['Functions']['indexedFunctions']
|
2012-11-07 01:51:48 +04:00
|
|
|
def indexize(js):
|
2012-11-07 05:15:28 +04:00
|
|
|
return re.sub(r'{{{ FI_([\w\d_$]+) }}}', lambda m: str(indexing[m.groups(0)[0]]), js)
|
2012-11-16 02:35:30 +04:00
|
|
|
|
|
|
|
blockaddrs = forwarded_json['Functions']['blockAddresses']
|
|
|
|
def blockaddrsize(js):
|
|
|
|
return re.sub(r'{{{ BA_([\w\d_$]+)\|([\w\d_$]+) }}}', lambda m: str(blockaddrs[m.groups(0)[0]][m.groups(0)[1]]), js)
|
|
|
|
|
2012-11-22 22:42:29 +04:00
|
|
|
#if DEBUG: outfile.write('// pre\n')
|
2012-11-16 02:35:30 +04:00
|
|
|
outfile.write(blockaddrsize(indexize(pre)))
|
|
|
|
pre = None
|
|
|
|
|
2012-11-22 22:42:29 +04:00
|
|
|
#if DEBUG: outfile.write('// funcs\n')
|
2012-11-14 06:19:34 +04:00
|
|
|
|
2012-11-07 01:51:48 +04:00
|
|
|
# forward
|
|
|
|
forwarded_data = json.dumps(forwarded_json)
|
|
|
|
forwarded_file = temp_files.get('.2.json').name
|
2012-11-16 02:35:30 +04:00
|
|
|
open(forwarded_file, 'w').write(indexize(forwarded_data))
|
2012-11-22 19:27:04 +04:00
|
|
|
if DEBUG: print >> sys.stderr, ' emscript: phase 2c took %s seconds' % (time.time() - t)
|
2012-11-06 00:24:46 +04:00
|
|
|
|
2012-11-07 02:46:25 +04:00
|
|
|
# Phase 3 - post
|
2012-11-06 22:46:04 +04:00
|
|
|
if DEBUG: t = time.time()
|
2012-11-06 23:14:29 +04:00
|
|
|
post_file = temp_files.get('.post.ll').name
|
2012-11-16 02:35:30 +04:00
|
|
|
open(post_file, 'w').write('\n') # no input, just processing of forwarded data
|
2012-11-06 00:24:46 +04:00
|
|
|
out = shared.run_js(compiler, shared.COMPILER_ENGINE, [settings_file, post_file, 'post', forwarded_file] + libraries, stdout=subprocess.PIPE, cwd=path_from_root('src'))
|
2012-11-29 05:29:12 +04:00
|
|
|
post, last_forwarded_data = out.split('//FORWARDED_DATA:')
|
|
|
|
last_forwarded_json = json.loads(last_forwarded_data)
|
2012-11-29 05:01:02 +04:00
|
|
|
|
|
|
|
if settings.get('ASM_JS'):
|
2012-12-08 04:38:35 +04:00
|
|
|
class Counter:
|
|
|
|
i = 0
|
|
|
|
def make_table(sig, raw):
|
|
|
|
i = Counter.i
|
|
|
|
Counter.i += 1
|
|
|
|
bad = 'b' + str(i)
|
|
|
|
params = ','.join(['p%d' % p for p in range(len(sig)-1)])
|
|
|
|
coercions = ';'.join(['p%d = %sp%d%s' % (p, '+' if sig[p+1] == 'd' else '', p, '' if sig[p+1] == 'd' else '|0') for p in range(len(sig)-1)]) + ';'
|
|
|
|
ret = '' if sig[0] == 'v' else ('return %s0' % ('+' if sig[0] == 'd' else ''))
|
|
|
|
return 'function %s(%s) { %s abort(%d); %s };\n' % (bad, params, coercions, i, ret) + raw.replace('0,', bad + ',').replace('0]', bad + ']')
|
|
|
|
function_tables_defs = '\n'.join([make_table(sig, raw) for sig, raw in last_forwarded_json['Functions']['tables'].iteritems()])
|
|
|
|
|
2012-12-08 03:10:36 +04:00
|
|
|
asm_setup = '\n'.join(['var %s = %s;' % (f.replace('.', '_'), f) for f in ['Runtime.bitshift64', 'Math.floor', 'Math.min']])
|
2012-11-29 21:52:09 +04:00
|
|
|
fundamentals = ['buffer', 'Int8Array', 'Int16Array', 'Int32Array', 'Uint8Array', 'Uint16Array', 'Uint32Array', 'Float32Array', 'Float64Array']
|
2012-12-08 03:10:36 +04:00
|
|
|
basics = ['abort', 'assert', 'STACKTOP', 'STACK_MAX', 'tempDoublePtr', 'ABORT', 'Runtime_bitshift64', 'Math_floor', 'Math_min']
|
2012-12-01 06:42:38 +04:00
|
|
|
if not settings['NAMED_GLOBALS']: basics += ['GLOBAL_BASE']
|
2012-12-02 02:19:52 +04:00
|
|
|
if forwarded_json['Types']['preciseI64MathUsed']:
|
|
|
|
basics += ['i64Math_' + op for op in ['add', 'subtract', 'multiply', 'divide', 'modulo']]
|
|
|
|
asm_setup += '''
|
|
|
|
var i64Math_add = function(a, b, c, d) { i64Math.add(a, b, c, d) };
|
|
|
|
var i64Math_subtract = function(a, b, c, d) { i64Math.subtract(a, b, c, d) };
|
|
|
|
var i64Math_multiply = function(a, b, c, d) { i64Math.multiply(a, b, c, d) };
|
2012-12-05 05:10:28 +04:00
|
|
|
var i64Math_divide = function(a, b, c, d, e) { i64Math.divide(a, b, c, d, e) };
|
|
|
|
var i64Math_modulo = function(a, b, c, d, e) { i64Math.modulo(a, b, c, d, e) };
|
2012-12-02 02:19:52 +04:00
|
|
|
'''
|
2012-12-07 08:44:21 +04:00
|
|
|
asm_runtime_funcs = ['stackAlloc', 'stackSave', 'stackRestore', 'setThrew'] + ['setTempRet%d' % i for i in range(10)]
|
2012-12-01 09:51:43 +04:00
|
|
|
# function tables
|
|
|
|
function_tables = ['dynCall_' + table for table in last_forwarded_json['Functions']['tables']]
|
|
|
|
function_tables_impls = []
|
|
|
|
for sig in last_forwarded_json['Functions']['tables'].iterkeys():
|
|
|
|
args = ','.join(['a' + str(i) for i in range(1, len(sig))])
|
|
|
|
arg_coercions = ' '.join(['a' + str(i) + '=' + ('+' if sig[i] == 'd' else '') + 'a' + str(i) + ('|0' if sig[i] == 'i' else '') + ';' for i in range(1, len(sig))])
|
|
|
|
function_tables_impls.append('''
|
|
|
|
function dynCall_%s(index%s%s) {
|
|
|
|
%s
|
|
|
|
%sFUNCTION_TABLE_%s[index](%s);
|
|
|
|
}
|
|
|
|
''' % (sig, ',' if len(sig) > 1 else '', args, arg_coercions, 'return ' if sig[0] != 'v' else '', sig, args))
|
2012-11-29 05:29:12 +04:00
|
|
|
# calculate exports
|
|
|
|
exported_implemented_functions = list(exported_implemented_functions)
|
|
|
|
exports = []
|
2012-12-01 09:51:43 +04:00
|
|
|
for export in exported_implemented_functions + asm_runtime_funcs + function_tables:
|
2012-11-29 05:29:12 +04:00
|
|
|
exports.append("'%s': %s" % (export, export))
|
|
|
|
exports = '{ ' + ', '.join(exports) + ' }'
|
|
|
|
# calculate globals
|
2012-11-30 03:40:53 +04:00
|
|
|
try:
|
|
|
|
del forwarded_json['Variables']['globals']['_llvm_global_ctors'] # not a true variable
|
|
|
|
except:
|
|
|
|
pass
|
2012-11-30 06:19:00 +04:00
|
|
|
global_vars = forwarded_json['Variables']['globals'].keys() if settings['NAMED_GLOBALS'] else []
|
2012-11-29 05:29:12 +04:00
|
|
|
global_funcs = ['_' + x for x in forwarded_json['Functions']['libraryFunctions'].keys()]
|
2012-11-29 21:52:09 +04:00
|
|
|
asm_globals = ''.join([' var ' + g + '=env.' + g + ';\n' for g in basics + global_funcs + global_vars])
|
2012-11-29 05:29:12 +04:00
|
|
|
# sent data
|
2012-11-29 21:52:09 +04:00
|
|
|
sending = '{ ' + ', '.join([s + ': ' + s for s in fundamentals + basics + global_funcs + global_vars]) + ' }'
|
2012-11-29 05:29:12 +04:00
|
|
|
# received
|
|
|
|
receiving = ';\n'.join(['var ' + s + ' = Module["' + s + '"] = asm.' + s for s in exported_implemented_functions + function_tables])
|
|
|
|
# finalize
|
|
|
|
funcs_js = '''
|
2012-12-02 02:19:52 +04:00
|
|
|
%s
|
2012-12-01 01:13:24 +04:00
|
|
|
var asmPre = (function(env, buffer) {
|
2012-11-29 05:29:12 +04:00
|
|
|
'use asm';
|
|
|
|
var HEAP8 = new env.Int8Array(buffer);
|
|
|
|
var HEAP16 = new env.Int16Array(buffer);
|
|
|
|
var HEAP32 = new env.Int32Array(buffer);
|
|
|
|
var HEAPU8 = new env.Uint8Array(buffer);
|
|
|
|
var HEAPU16 = new env.Uint16Array(buffer);
|
|
|
|
var HEAPU32 = new env.Uint32Array(buffer);
|
|
|
|
var HEAPF32 = new env.Float32Array(buffer);
|
|
|
|
var HEAPF64 = new env.Float64Array(buffer);
|
2012-12-02 02:19:52 +04:00
|
|
|
''' % (asm_setup,) + asm_globals + '''
|
2012-12-07 03:53:28 +04:00
|
|
|
var __THREW__ = 0;
|
2012-12-07 04:26:48 +04:00
|
|
|
var undef = 0;
|
2012-12-07 03:53:28 +04:00
|
|
|
|
2012-12-01 04:29:52 +04:00
|
|
|
function stackAlloc(size) {
|
|
|
|
var ret = STACKTOP;
|
|
|
|
STACKTOP = (STACKTOP + size)|0;
|
|
|
|
STACKTOP = ((STACKTOP + 3)>>2)<<2;
|
2012-12-08 03:51:06 +04:00
|
|
|
return ret|0;
|
2012-12-01 04:29:52 +04:00
|
|
|
}
|
|
|
|
function stackSave() {
|
2012-12-08 03:51:06 +04:00
|
|
|
return STACKTOP|0;
|
2012-12-01 04:29:52 +04:00
|
|
|
}
|
|
|
|
function stackRestore(top) {
|
|
|
|
top = top|0;
|
|
|
|
STACKTOP = top;
|
|
|
|
}
|
2012-12-07 08:44:21 +04:00
|
|
|
function setThrew(threw) {
|
|
|
|
threw = threw|0;
|
|
|
|
__THREW__ = threw;
|
|
|
|
}
|
2012-12-07 02:29:10 +04:00
|
|
|
''' + ''.join(['''
|
|
|
|
var tempRet%d = 0;
|
|
|
|
function setTempRet%d(value) {
|
|
|
|
value = value|0;
|
|
|
|
tempRet%d = value;
|
|
|
|
}
|
|
|
|
''' % (i, i, i) for i in range(10)]) + funcs_js.replace('\n', '\n ') + '''
|
2012-11-29 05:29:12 +04:00
|
|
|
|
|
|
|
%s
|
|
|
|
|
|
|
|
return %s;
|
2012-12-01 01:13:24 +04:00
|
|
|
});
|
|
|
|
if (asmPre.toSource) { // works in sm but not v8, so we get full coverage between those two
|
|
|
|
asmPre = asmPre.toSource();
|
|
|
|
asmPre = asmPre.substr(25, asmPre.length-28);
|
|
|
|
asmPre = new Function('env', 'buffer', asmPre);
|
|
|
|
}
|
|
|
|
var asm = asmPre(%s, buffer); // pass through Function to prevent seeing outside scope
|
2012-11-29 05:29:12 +04:00
|
|
|
%s;
|
2012-12-01 04:29:52 +04:00
|
|
|
Runtime.stackAlloc = function(size) { return asm.stackAlloc(size) };
|
|
|
|
Runtime.stackSave = function() { return asm.stackSave() };
|
|
|
|
Runtime.stackRestore = function(top) { asm.stackRestore(top) };
|
2012-12-01 09:51:43 +04:00
|
|
|
''' % (function_tables_defs.replace('\n', '\n ') + '\n' + '\n'.join(function_tables_impls), exports, sending, receiving)
|
2012-12-08 04:19:54 +04:00
|
|
|
|
|
|
|
# Set function table masks
|
|
|
|
def function_table_maskize(js):
|
|
|
|
masks = {}
|
|
|
|
for sig, table in last_forwarded_json['Functions']['tables'].iteritems():
|
|
|
|
masks[sig] = str(table.count(','))
|
|
|
|
return re.sub(r'{{{ FTM_([vdi]+) }}}', lambda m: masks[m.groups(0)[0]], js)
|
|
|
|
funcs_js = function_table_maskize(funcs_js)
|
2012-11-29 05:01:02 +04:00
|
|
|
else:
|
2012-12-08 04:38:35 +04:00
|
|
|
function_tables_defs = '\n'.join([table for table in last_forwarded_json['Functions']['tables'].itervalues()])
|
2012-11-29 05:29:12 +04:00
|
|
|
outfile.write(function_tables_defs)
|
2012-11-29 05:01:02 +04:00
|
|
|
outfile.write(blockaddrsize(indexize(funcs_js)))
|
|
|
|
funcs_js = None
|
2012-11-25 02:06:44 +04:00
|
|
|
|
2012-11-29 03:24:55 +04:00
|
|
|
outfile.write(indexize(post))
|
2012-11-06 22:46:04 +04:00
|
|
|
if DEBUG: print >> sys.stderr, ' emscript: phase 3 took %s seconds' % (time.time() - t)
|
2012-11-06 00:24:46 +04:00
|
|
|
|
2011-07-07 11:38:35 +04:00
|
|
|
outfile.close()
|
|
|
|
|
|
|
|
|
|
|
|
def main(args):
|
|
|
|
# Prepare settings for serialization to JSON.
|
|
|
|
settings = {}
|
|
|
|
for setting in args.settings:
|
2011-07-13 23:30:29 +04:00
|
|
|
name, value = setting.strip().split('=', 1)
|
2011-07-07 11:38:35 +04:00
|
|
|
settings[name] = json.loads(value)
|
|
|
|
|
2011-09-25 10:26:59 +04:00
|
|
|
# Add header defines to settings
|
|
|
|
defines = {}
|
|
|
|
include_root = path_from_root('system', 'include')
|
2011-09-25 21:33:17 +04:00
|
|
|
headers = args.headers[0].split(',') if len(args.headers) > 0 else []
|
2011-09-26 05:56:58 +04:00
|
|
|
seen_headers = set()
|
2011-09-25 10:26:59 +04:00
|
|
|
while len(headers) > 0:
|
|
|
|
header = headers.pop(0)
|
|
|
|
if not os.path.isabs(header):
|
|
|
|
header = os.path.join(include_root, header)
|
2011-09-26 05:56:58 +04:00
|
|
|
seen_headers.add(header)
|
2011-09-25 10:26:59 +04:00
|
|
|
for line in open(header, 'r'):
|
|
|
|
line = line.replace('\t', ' ')
|
2011-09-26 05:56:58 +04:00
|
|
|
m = re.match('^ *# *define +(?P<name>[-\w_.]+) +\(?(?P<value>[-\w_.|]+)\)?.*', line)
|
2011-10-02 19:44:32 +04:00
|
|
|
if not m:
|
|
|
|
# Catch enum defines of a very limited sort
|
|
|
|
m = re.match('^ +(?P<name>[A-Z_\d]+) += +(?P<value>\d+).*', line)
|
2011-09-25 10:26:59 +04:00
|
|
|
if m:
|
2011-10-02 19:44:32 +04:00
|
|
|
if m.group('name') != m.group('value'):
|
|
|
|
defines[m.group('name')] = m.group('value')
|
|
|
|
#else:
|
|
|
|
# print 'Warning: %s #defined to itself' % m.group('name') # XXX this can happen if we are set to be equal to an enum (with the same name)
|
2011-09-26 05:56:58 +04:00
|
|
|
m = re.match('^ *# *include *["<](?P<name>[\w_.-/]+)[">].*', line)
|
2011-09-25 10:26:59 +04:00
|
|
|
if m:
|
|
|
|
# Find this file
|
|
|
|
found = False
|
|
|
|
for w in [w for w in os.walk(include_root)]:
|
|
|
|
for f in w[2]:
|
|
|
|
curr = os.path.join(w[0], f)
|
2011-09-26 05:56:58 +04:00
|
|
|
if curr.endswith(m.group('name')) and curr not in seen_headers:
|
2011-09-25 10:26:59 +04:00
|
|
|
headers.append(curr)
|
|
|
|
found = True
|
|
|
|
break
|
|
|
|
if found: break
|
|
|
|
#assert found, 'Could not find header: ' + m.group('name')
|
|
|
|
if len(defines) > 0:
|
2011-09-25 12:20:34 +04:00
|
|
|
def lookup(value):
|
|
|
|
try:
|
|
|
|
while not unicode(value).isnumeric():
|
|
|
|
value = defines[value]
|
|
|
|
return value
|
|
|
|
except:
|
2011-09-25 21:28:19 +04:00
|
|
|
pass
|
|
|
|
try: # 0x300 etc.
|
|
|
|
value = eval(value)
|
|
|
|
return value
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
try: # CONST1|CONST2
|
|
|
|
parts = map(lookup, value.split('|'))
|
|
|
|
value = reduce(lambda a, b: a|b, map(eval, parts))
|
|
|
|
return value
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
return None
|
2011-09-25 12:20:34 +04:00
|
|
|
for key, value in defines.items():
|
|
|
|
value = lookup(value)
|
|
|
|
if value is not None:
|
|
|
|
defines[key] = str(value)
|
|
|
|
else:
|
|
|
|
del defines[key]
|
2012-01-08 23:19:33 +04:00
|
|
|
#print >> sys.stderr, 'new defs:', str(defines).replace(',', ',\n '), '\n\n'
|
|
|
|
settings.setdefault('C_DEFINES', {}).update(defines)
|
2011-09-25 10:26:59 +04:00
|
|
|
|
2012-04-14 05:57:41 +04:00
|
|
|
# libraries
|
|
|
|
libraries = args.libraries[0].split(',') if len(args.libraries) > 0 else []
|
|
|
|
|
2011-07-07 11:38:35 +04:00
|
|
|
# Compile the assembly to Javascript.
|
2012-11-11 02:37:15 +04:00
|
|
|
if settings.get('RELOOP'): shared.Building.ensure_relooper()
|
|
|
|
|
2012-11-06 00:24:46 +04:00
|
|
|
emscript(args.infile, settings, args.outfile, libraries)
|
2011-07-07 11:38:35 +04:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2011-07-13 23:30:29 +04:00
|
|
|
parser = optparse.OptionParser(
|
2012-11-07 09:11:18 +04:00
|
|
|
usage='usage: %prog [-h] [-H HEADERS] [-o OUTFILE] [-c COMPILER_ENGINE] [-s FOO=BAR]* infile',
|
2012-01-22 05:16:50 +04:00
|
|
|
description=('You should normally never use this! Use emcc instead. '
|
|
|
|
'This is a wrapper around the JS compiler, converting .ll to .js.'),
|
|
|
|
epilog='')
|
2011-09-25 10:26:59 +04:00
|
|
|
parser.add_option('-H', '--headers',
|
|
|
|
default=[],
|
|
|
|
action='append',
|
|
|
|
help='System headers (comma separated) whose #defines should be exposed to the compiled code.')
|
2012-04-14 05:57:41 +04:00
|
|
|
parser.add_option('-L', '--libraries',
|
|
|
|
default=[],
|
|
|
|
action='append',
|
|
|
|
help='Library files (comma separated) to use in addition to those in emscripten src/library_*.')
|
2011-07-13 23:30:29 +04:00
|
|
|
parser.add_option('-o', '--outfile',
|
|
|
|
default=sys.stdout,
|
|
|
|
help='Where to write the output; defaults to stdout.')
|
2012-11-07 09:11:18 +04:00
|
|
|
parser.add_option('-c', '--compiler',
|
|
|
|
default=shared.COMPILER_ENGINE,
|
|
|
|
help='Which JS engine to use to run the compiler; defaults to the one in ~/.emscripten.')
|
2011-07-13 23:30:29 +04:00
|
|
|
parser.add_option('-s', '--setting',
|
|
|
|
dest='settings',
|
|
|
|
default=[],
|
|
|
|
action='append',
|
|
|
|
metavar='FOO=BAR',
|
|
|
|
help=('Overrides for settings defined in settings.js. '
|
|
|
|
'May occur multiple times.'))
|
2012-11-17 23:26:58 +04:00
|
|
|
parser.add_option('-j', '--jcache',
|
|
|
|
action='store_true',
|
|
|
|
default=False,
|
|
|
|
help=('Enable jcache (ccache-like caching of compilation results, for faster incremental builds).'))
|
2011-07-13 23:30:29 +04:00
|
|
|
|
|
|
|
# Convert to the same format that argparse would have produced.
|
|
|
|
keywords, positional = parser.parse_args()
|
|
|
|
if len(positional) != 1:
|
|
|
|
raise RuntimeError('Must provide exactly one positional argument.')
|
2011-07-30 21:43:43 +04:00
|
|
|
keywords.infile = os.path.abspath(positional[0])
|
2011-07-13 23:30:29 +04:00
|
|
|
if isinstance(keywords.outfile, basestring):
|
|
|
|
keywords.outfile = open(keywords.outfile, 'w')
|
2012-11-07 09:11:18 +04:00
|
|
|
compiler_engine = keywords.compiler
|
2012-11-17 23:26:58 +04:00
|
|
|
jcache = keywords.jcache
|
2011-07-08 08:38:42 +04:00
|
|
|
|
2011-12-12 03:24:04 +04:00
|
|
|
temp_files.run_and_clean(lambda: main(keywords))
|
|
|
|
|