add a hash to .a objects, to allow duplicates to work #2142

This commit is contained in:
Alon Zakai 2015-05-22 12:57:29 -07:00
Родитель d74a251e45
Коммит a9b93304e8
2 изменённых файлов: 37 добавлений и 4 удалений

34
emar
Просмотреть файл

@ -12,7 +12,7 @@ from tools import shared
DEBUG = os.environ.get('EMCC_DEBUG')
if DEBUG == "0":
DEBUG = None
DEBUG = None
newargs = [shared.LLVM_AR] + sys.argv[1:]
@ -20,5 +20,37 @@ if DEBUG:
print >> sys.stderr, 'emar:', sys.argv, ' ==> ', newargs
if len(newargs) > 2:
to_delete = []
if 'r' in newargs[1]:
# we are adding files to the archive.
# find the .a; everything after it is an input file.
# we add a hash to each input, to make them unique as
# possible, as llvm-ar cannot extract duplicate names
# (and only the basename is used!)
i = 1
while i < len(newargs):
if newargs[i].endswith('.a'):
import hashlib, shutil
for j in range(i+1, len(newargs)):
orig_name = newargs[j]
full_name = os.path.abspath(orig_name)
dir_name = os.path.dirname(full_name)
base_name = os.path.basename(full_name)
h = hashlib.md5(full_name).hexdigest()[:8]
parts = base_name.split('.')
parts[0] += '_' + h
newname = '.'.join(parts)
full_newname = os.path.relpath(os.path.join(dir_name, newname))
if not os.path.exists(full_newname):
try: # it is ok to fail here, we just don't get hashing
shutil.copyfile(orig_name, full_newname)
newargs[j] = full_newname
to_delete.append(full_newname)
except:
pass
break
i += 1
subprocess.call(newargs)
for d in to_delete:
shared.try_delete(d)

Просмотреть файл

@ -1227,9 +1227,10 @@ int f() {
}
''')
out, err = Popen([PYTHON, EMCC, 'main.c', '-L.', '-la'], stderr=PIPE).communicate()
assert 'loading from archive' in err, err
assert 'which has duplicate entries' in err, err
assert 'duplicate: common.o' in err, err
assert 'loading from archive' not in err, err
assert 'which has duplicate entries' not in err, err
assert 'duplicate: common.o' not in err, err
self.assertContained('a\nb...\n', run_js('a.out.js'))
def test_export_in_a(self):
export_name = 'this_is_an_entry_point'