2012-06-11 23:50:53 +04:00
'''
A tool that generates FS API calls to generate a filesystem , and packages the files
to work with that .
This is called by emcc . You can also call it yourself .
2012-09-22 04:15:32 +04:00
You can split your files into " asset bundles " , and create each bundle separately
with this tool . Then just include the generated js for each and they will load
the data and prepare it accordingly . This allows you to share assets and reduce
data downloads .
2012-06-11 23:50:53 +04:00
Usage :
2012-06-18 04:57:47 +04:00
file_packager . py TARGET [ - - preload A [ B . . ] ] [ - - embed C [ D . . ] ] [ - - compress COMPRESSION_DATA ] [ - - pre - run ] [ - - crunch [ = X ] ]
2012-06-11 23:50:53 +04:00
2012-06-18 04:57:47 +04:00
- - pre - run Will generate wrapper code that does preloading in Module . preRun . This is necessary if you add this
code before the main file has been loading , which includes necessary components like addRunDependency .
- - crunch = X Will compress dxt files to crn with quality level X . The crunch commandline tool must be present
and CRUNCH should be defined in ~ / . emscripten that points to it . JS crunch decompressing code will
be added to convert the crn to dds in the browser .
2012-06-18 21:52:13 +04:00
crunch - worker . js will be generated in the current directory . You should include that file when
packaging your site .
2012-06-19 03:27:29 +04:00
DDS files will not be crunched if the . crn is more recent than the . dds . This prevents a lot of
unneeded computation .
2012-06-18 04:57:47 +04:00
2012-09-29 03:48:14 +04:00
Notes :
* The file packager generates unix - style file paths . So if you are on windows and a file is accessed at
subdir \file , in JS it will be subdir / file . For simplicity we treat the web platform as a * NIX .
2012-06-18 04:57:47 +04:00
TODO : You can also provide . crn files yourself , pre - crunched . With this option , they will be decompressed
to dds files in the browser , exactly the same as if this tool compressed them .
2012-06-11 23:50:53 +04:00
'''
2012-09-22 03:41:28 +04:00
import os , sys , shutil , random
2012-06-11 23:50:53 +04:00
2012-06-20 04:16:13 +04:00
from shared import Compression , execute , suffix , unsuffixed
2012-06-18 04:57:47 +04:00
import shared
from subprocess import Popen , PIPE , STDOUT
2012-06-11 23:50:53 +04:00
2012-06-18 04:57:47 +04:00
data_target = sys . argv [ 1 ]
2012-06-11 23:50:53 +04:00
IMAGE_SUFFIXES = ( ' .jpg ' , ' .png ' , ' .bmp ' )
AUDIO_SUFFIXES = ( ' .ogg ' , ' .wav ' , ' .mp3 ' )
AUDIO_MIMETYPES = { ' ogg ' : ' audio/ogg ' , ' wav ' : ' audio/wav ' , ' mp3 ' : ' audio/mpeg ' }
2012-06-18 04:57:47 +04:00
CRUNCH_INPUT_SUFFIX = ' .dds '
CRUNCH_OUTPUT_SUFFIX = ' .crn '
DDS_HEADER_SIZE = 128
2012-06-11 23:50:53 +04:00
2012-09-25 03:13:20 +04:00
AV_WORKAROUND = 0 # Set to 1 to randomize file order and add some padding, to work around silly av false positives
2012-06-11 23:50:53 +04:00
data_files = [ ]
in_preload = False
in_embed = False
has_preloaded = False
in_compress = 0
2012-06-12 03:02:06 +04:00
pre_run = False
2012-06-18 04:57:47 +04:00
crunch = 0
2012-07-23 07:10:08 +04:00
plugins = [ ]
2012-06-18 04:57:47 +04:00
2012-06-11 23:50:53 +04:00
for arg in sys . argv [ 1 : ] :
if arg == ' --preload ' :
in_preload = True
in_embed = False
has_preloaded = True
2012-06-12 03:02:06 +04:00
in_compress = 0
2012-06-11 23:50:53 +04:00
elif arg == ' --embed ' :
in_embed = True
in_preload = False
2012-06-12 03:02:06 +04:00
in_compress = 0
2012-06-11 23:50:53 +04:00
elif arg == ' --compress ' :
Compression . on = True
in_compress = 1
in_preload = False
in_embed = False
2012-06-12 03:02:06 +04:00
elif arg == ' --pre-run ' :
pre_run = True
in_preload = False
in_embed = False
in_compress = 0
2012-06-18 04:57:47 +04:00
elif arg . startswith ( ' --crunch ' ) :
2012-06-20 04:16:13 +04:00
from shared import CRUNCH
2012-06-18 04:57:47 +04:00
crunch = arg . split ( ' = ' ) [ 1 ] if ' = ' in arg else ' 128 '
in_preload = False
in_embed = False
in_compress = 0
2012-07-23 07:10:08 +04:00
elif arg . startswith ( ' --plugin ' ) :
plugin = open ( arg . split ( ' = ' ) [ 1 ] , ' r ' ) . read ( )
eval ( plugin ) # should append itself to plugins
in_preload = False
in_embed = False
in_compress = 0
2012-06-11 23:50:53 +04:00
elif in_preload :
data_files . append ( { ' name ' : arg , ' mode ' : ' preload ' } )
elif in_embed :
data_files . append ( { ' name ' : arg , ' mode ' : ' embed ' } )
elif in_compress :
if in_compress == 1 :
Compression . encoder = arg
in_compress = 2
elif in_compress == 2 :
Compression . decoder = arg
in_compress = 3
elif in_compress == 3 :
Compression . js_name = arg
in_compress = 0
2012-09-22 02:29:57 +04:00
print '''
( function ( ) {
'''
2012-06-12 04:30:46 +04:00
code = '''
function assert ( check , msg ) {
2012-07-19 03:43:57 +04:00
if ( ! check ) throw msg + new Error ( ) . stack ;
2012-06-12 04:30:46 +04:00
}
'''
2012-06-11 23:50:53 +04:00
# Expand directories into individual files
def add ( mode , dirname , names ) :
for name in names :
fullname = os . path . join ( dirname , name )
if not os . path . isdir ( fullname ) :
data_files . append ( { ' name ' : fullname , ' mode ' : mode } )
for file_ in data_files :
if os . path . isdir ( file_ [ ' name ' ] ) :
os . path . walk ( file_ [ ' name ' ] , add , file_ [ ' mode ' ] )
data_files = filter ( lambda file_ : not os . path . isdir ( file_ [ ' name ' ] ) , data_files )
for file_ in data_files :
2012-06-18 04:57:47 +04:00
file_ [ ' name ' ] = file_ [ ' name ' ] . replace ( os . path . sep , ' / ' ) # name in the filesystem, native and emulated
file_ [ ' localname ' ] = file_ [ ' name ' ] # name to actually load from local filesystem, after transformations
2012-06-11 23:50:53 +04:00
2012-06-18 04:57:47 +04:00
# Remove duplicates (can occur naively, for example preload dir/, preload dir/subdir/)
2012-06-12 21:44:11 +04:00
seen = { }
def was_seen ( name ) :
if seen . get ( name ) : return True
seen [ name ] = 1
return False
data_files = filter ( lambda file_ : not was_seen ( file_ [ ' name ' ] ) , data_files )
2012-09-25 03:13:20 +04:00
if AV_WORKAROUND :
random . shuffle ( data_files )
2012-09-22 03:41:28 +04:00
2012-07-23 07:10:08 +04:00
# Apply plugins
for file_ in data_files :
for plugin in plugins :
plugin ( file_ )
2012-06-18 04:57:47 +04:00
# Crunch files
if crunch :
2012-06-18 21:52:13 +04:00
shutil . copyfile ( shared . path_from_root ( ' tools ' , ' crunch-worker.js ' ) , ' crunch-worker.js ' )
print '''
var decrunchWorker = new Worker ( ' crunch-worker.js ' ) ;
var decrunchCallbacks = [ ] ;
decrunchWorker . onmessage = function ( msg ) {
decrunchCallbacks [ msg . data . callbackID ] ( msg . data . data ) ;
2012-06-20 03:39:32 +04:00
console . log ( ' decrunched ' + msg . data . filename + ' in ' + msg . data . time + ' ms, ' + msg . data . data . length + ' bytes ' ) ;
2012-06-18 21:52:13 +04:00
decrunchCallbacks [ msg . data . callbackID ] = null ;
} ;
2012-06-19 04:42:36 +04:00
function requestDecrunch ( filename , data , callback ) {
2012-06-18 21:52:13 +04:00
decrunchWorker . postMessage ( {
2012-06-19 04:42:36 +04:00
filename : filename ,
2012-06-18 21:52:13 +04:00
data : data ,
callbackID : decrunchCallbacks . length
} ) ;
decrunchCallbacks . push ( callback ) ;
}
'''
2012-06-18 04:57:47 +04:00
for file_ in data_files :
if file_ [ ' name ' ] . endswith ( CRUNCH_INPUT_SUFFIX ) :
2012-06-19 03:27:29 +04:00
# Do not crunch if crunched version exists and is more recent than dds source
2012-06-18 04:57:47 +04:00
crunch_name = unsuffixed ( file_ [ ' name ' ] ) + CRUNCH_OUTPUT_SUFFIX
2012-06-20 05:47:17 +04:00
file_ [ ' localname ' ] = crunch_name
2012-06-19 03:47:33 +04:00
try :
crunch_time = os . stat ( crunch_name ) . st_mtime
dds_time = os . stat ( file_ [ ' name ' ] ) . st_mtime
if dds_time < crunch_time : continue
except :
pass # if one of them does not exist, continue on
2012-06-19 03:27:29 +04:00
2012-06-19 04:50:41 +04:00
# guess at format. this lets us tell crunch to not try to be clever and use odd formats like DXT5_AGBR
try :
format = Popen ( [ ' file ' , file_ [ ' name ' ] ] , stdout = PIPE ) . communicate ( ) [ 0 ]
if ' DXT5 ' in format :
format = [ ' -dxt5 ' ]
elif ' DXT1 ' in format :
format = [ ' -dxt1 ' ]
else :
raise Exception ( ' unknown format ' )
except :
format = [ ]
Popen ( [ CRUNCH , ' -file ' , file_ [ ' name ' ] , ' -quality ' , crunch ] + format , stdout = sys . stderr ) . communicate ( )
2012-07-17 02:45:25 +04:00
#if not os.path.exists(os.path.basename(crunch_name)):
# print >> sys.stderr, 'Failed to crunch, perhaps a weird dxt format? Looking for a source PNG for the DDS'
# Popen([CRUNCH, '-file', unsuffixed(file_['name']) + '.png', '-quality', crunch] + format, stdout=sys.stderr).communicate()
assert os . path . exists ( os . path . basename ( crunch_name ) ) , ' crunch failed to generate output '
2012-06-18 08:46:02 +04:00
shutil . move ( os . path . basename ( crunch_name ) , crunch_name ) # crunch places files in the current dir
2012-06-18 04:57:47 +04:00
# prepend the dds header
crunched = open ( crunch_name , ' rb ' ) . read ( )
c = open ( crunch_name , ' wb ' )
c . write ( open ( file_ [ ' name ' ] , ' rb ' ) . read ( ) [ : DDS_HEADER_SIZE ] )
c . write ( crunched )
c . close ( )
2012-06-11 23:50:53 +04:00
# Set up folders
partial_dirs = [ ]
for file_ in data_files :
dirname = os . path . dirname ( file_ [ ' name ' ] )
dirname = dirname . lstrip ( ' / ' ) # absolute paths start with '/', remove that
if dirname != ' ' :
parts = dirname . split ( ' / ' )
for i in range ( len ( parts ) ) :
partial = ' / ' . join ( parts [ : i + 1 ] )
if partial not in partial_dirs :
2012-09-22 03:00:01 +04:00
code + = ''' Module[ ' FS_createPath ' ]( ' / %s ' , ' %s ' , true, true); \n ''' % ( ' / ' . join ( parts [ : i ] ) , parts [ i ] )
2012-06-11 23:50:53 +04:00
partial_dirs . append ( partial )
if has_preloaded :
# Bundle all datafiles into one archive. Avoids doing lots of simultaneous XHRs which has overhead.
data = open ( data_target , ' wb ' )
start = 0
for file_ in data_files :
file_ [ ' data_start ' ] = start
2012-06-18 04:57:47 +04:00
curr = open ( file_ [ ' localname ' ] , ' rb ' ) . read ( )
2012-06-11 23:50:53 +04:00
file_ [ ' data_end ' ] = start + len ( curr )
2012-09-25 03:13:20 +04:00
if AV_WORKAROUND : curr + = ' \x00 '
2012-06-20 05:47:17 +04:00
print >> sys . stderr , ' bundling ' , file_ [ ' name ' ] , file_ [ ' localname ' ] , file_ [ ' data_start ' ] , file_ [ ' data_end ' ]
2012-06-11 23:50:53 +04:00
start + = len ( curr )
data . write ( curr )
data . close ( )
if Compression . on :
Compression . compress ( data_target )
# Data requests - for getting a block of data out of the big archive - have a similar API to XHRs
code + = '''
function DataRequest ( ) { }
DataRequest . prototype = {
requests : { } ,
open : function ( mode , name ) {
this . requests [ name ] = this ;
} ,
send : function ( ) { }
} ;
'''
counter = 0
for file_ in data_files :
filename = file_ [ ' name ' ]
if file_ [ ' mode ' ] == ' embed ' :
# Embed
2012-10-01 22:02:01 +04:00
code + = ''' Module[ ' FS_createDataFile ' ]( ' / %s ' , ' %s ' , %s , true, true); \n ''' % ( os . path . dirname ( filename ) , os . path . basename ( filename ) , str ( map ( ord , open ( file_ [ ' localname ' ] , ' rb ' ) . read ( ) ) ) )
2012-06-11 23:50:53 +04:00
elif file_ [ ' mode ' ] == ' preload ' :
# Preload
varname = ' filePreload %d ' % counter
counter + = 1
2012-06-18 04:57:47 +04:00
dds = crunch and filename . endswith ( CRUNCH_INPUT_SUFFIX )
prepare = ' '
2012-07-19 05:14:21 +04:00
finish = " Module[ ' removeRunDependency ' ]( ' fp %s ' ); \n " % filename
2012-06-11 23:50:53 +04:00
2012-07-19 03:24:37 +04:00
if dds :
2012-06-18 04:57:47 +04:00
# decompress crunch format into dds
prepare = '''
var ddsHeader = byteArray . subarray ( 0 , % ( dds_header_size ) d ) ;
2012-06-19 04:42:36 +04:00
requestDecrunch ( ' %(filename)s ' , byteArray . subarray ( % ( dds_header_size ) d ) , function ( ddsData ) {
2012-06-18 21:52:13 +04:00
byteArray = new Uint8Array ( ddsHeader . length + ddsData . length ) ;
byteArray . set ( ddsHeader , 0 ) ;
byteArray . set ( ddsData , % ( dds_header_size ) d ) ;
2012-06-19 04:42:36 +04:00
''' % { ' filename ' : filename, ' dds_header_size ' : DDS_HEADER_SIZE }
2012-06-11 23:50:53 +04:00
2012-07-19 05:14:21 +04:00
finish + = '''
2012-06-18 21:52:13 +04:00
} ) ;
'''
2012-06-11 23:50:53 +04:00
code + = '''
var % ( varname ) s = new % ( request ) s ( ) ;
2012-06-18 04:57:47 +04:00
% ( varname ) s . open ( ' GET ' , ' %(filename)s ' , true ) ;
2012-06-11 23:50:53 +04:00
% ( varname ) s . responseType = ' arraybuffer ' ;
% ( varname ) s . onload = function ( ) {
var arrayBuffer = % ( varname ) s . response ;
assert ( arrayBuffer , ' Loading file %(filename)s failed. ' ) ;
2012-08-09 05:18:34 +04:00
var byteArray = ! arrayBuffer . subarray ? new Uint8Array ( arrayBuffer ) : arrayBuffer ;
2012-06-18 04:57:47 +04:00
% ( prepare ) s
2012-07-19 03:37:23 +04:00
Module [ ' FS_createPreloadedFile ' ] ( ' / %(dirname)s ' , ' %(basename)s ' , byteArray , true , true , function ( ) {
% ( finish ) s
2012-07-19 05:40:23 +04:00
} % ( fail ) s ) ;
2012-06-11 23:50:53 +04:00
} ;
2012-07-19 05:14:21 +04:00
Module [ ' addRunDependency ' ] ( ' fp %(filename)s ' ) ;
2012-06-11 23:50:53 +04:00
% ( varname ) s . send ( null ) ;
''' % {
' request ' : ' DataRequest ' , # In the past we also supported XHRs here
' varname ' : varname ,
' filename ' : filename ,
' dirname ' : os . path . dirname ( filename ) ,
' basename ' : os . path . basename ( filename ) ,
2012-06-18 04:57:47 +04:00
' prepare ' : prepare ,
2012-07-19 05:40:23 +04:00
' finish ' : finish ,
' fail ' : ' ' if filename [ - 4 : ] not in AUDIO_SUFFIXES else ''' , function() { Module[ ' removeRunDependency ' ]( ' fp %s ' ) } ''' % filename # workaround for chromium bug 124926 (still no audio with this, but at least we don't hang)
2012-06-11 23:50:53 +04:00
}
else :
assert 0
if has_preloaded :
# Get the big archive and split it up
use_data = ' '
for file_ in data_files :
if file_ [ ' mode ' ] == ' preload ' :
use_data + = '''
curr = DataRequest . prototype . requests [ ' %s ' ] ;
curr . response = byteArray . subarray ( % d , % d ) ;
curr . onload ( ) ;
''' % (file_[ ' name ' ], file_[ ' data_start ' ], file_[ ' data_end ' ])
2012-09-22 02:40:08 +04:00
use_data + = " Module[ ' removeRunDependency ' ]( ' datafile_ %s ' ); \n " % data_target
2012-06-11 23:50:53 +04:00
if Compression . on :
use_data = '''
Module [ " decompress " ] ( byteArray , function ( decompressed ) {
byteArray = new Uint8Array ( decompressed ) ;
% s
} ) ;
''' % u se_data
2012-09-24 23:27:57 +04:00
code + = r '''
if ( ! Module . expectedDataFileDownloads ) {
Module . expectedDataFileDownloads = 0 ;
Module . finishedDataFileDownloads = 0 ;
}
Module . expectedDataFileDownloads + + ;
2012-06-11 23:50:53 +04:00
var dataFile = new XMLHttpRequest ( ) ;
2012-08-10 04:32:10 +04:00
dataFile . onprogress = function ( event ) {
2012-09-24 23:27:57 +04:00
var url = ' %s ' ;
2012-08-10 04:32:10 +04:00
if ( event . loaded & & event . total ) {
2012-09-24 23:27:57 +04:00
if ( ! dataFile . addedTotal ) {
dataFile . addedTotal = true ;
if ( ! Module . dataFileDownloads ) Module . dataFileDownloads = { } ;
Module . dataFileDownloads [ url ] = {
loaded : event . loaded ,
total : event . total
} ;
} else {
Module . dataFileDownloads [ url ] . loaded = event . loaded ;
}
var total = 0 ;
var loaded = 0 ;
var num = 0 ;
for ( var download in Module . dataFileDownloads ) {
var data = Module . dataFileDownloads [ download ] ;
total + = data . total ;
loaded + = data . loaded ;
num + + ;
}
total = Math . ceil ( total * Module . expectedDataFileDownloads / num ) ;
2012-10-18 07:34:08 +04:00
Module [ ' setStatus ' ] ( ' Downloading data... ( ' + loaded + ' / ' + total + ' ) ' ) ;
2012-09-24 23:27:57 +04:00
} else if ( ! Module . dataFileDownloads ) {
2012-10-18 07:34:08 +04:00
Module [ ' setStatus ' ] ( ' Downloading data... ' ) ;
2012-08-10 04:32:10 +04:00
}
}
2012-06-11 23:50:53 +04:00
dataFile . open ( ' GET ' , ' %s ' , true ) ;
dataFile . responseType = ' arraybuffer ' ;
dataFile . onload = function ( ) {
2012-09-24 23:27:57 +04:00
Module . finishedDataFileDownloads + + ;
2012-06-11 23:50:53 +04:00
var arrayBuffer = dataFile . response ;
assert ( arrayBuffer , ' Loading data file failed. ' ) ;
var byteArray = new Uint8Array ( arrayBuffer ) ;
var curr ;
% s
} ;
2012-09-22 02:40:08 +04:00
Module [ ' addRunDependency ' ] ( ' datafile_ %s ' ) ;
2012-06-11 23:50:53 +04:00
dataFile . send ( null ) ;
if ( Module [ ' setStatus ' ] ) Module [ ' setStatus ' ] ( ' Downloading... ' ) ;
2012-09-24 23:27:57 +04:00
''' % (data_target, os.path.basename(Compression.compressed_name(data_target) if Compression.on else data_target), use_data, data_target) # use basename because from the browser ' s point of view, we need to find the datafile in the same dir as the html file
2012-06-11 23:50:53 +04:00
2012-06-12 03:02:06 +04:00
if pre_run :
2012-06-18 04:57:47 +04:00
print '''
if ( typeof Module == ' undefined ' ) Module = { } ;
if ( ! Module [ ' preRun ' ] ) Module [ ' preRun ' ] = [ ] ;
Module [ " preRun " ] . push ( function ( ) {
'''
2012-06-12 03:02:06 +04:00
2012-06-11 23:50:53 +04:00
print code
2012-06-12 03:02:06 +04:00
if pre_run :
2012-06-18 04:57:47 +04:00
print ' }); \n '
2012-06-12 03:02:06 +04:00
2012-06-18 21:52:13 +04:00
if crunch :
print '''
if ( ! Module [ ' postRun ' ] ) Module [ ' postRun ' ] = [ ] ;
Module [ " postRun " ] . push ( function ( ) {
decrunchWorker . terminate ( ) ;
} ) ;
'''
2012-09-22 02:29:57 +04:00
print '''
} ) ( ) ;
'''