2013-08-20 10:23:28 +04:00
|
|
|
# This Source Code Form is subject to the terms of the Mozilla Public
|
|
|
|
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
|
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
|
|
|
|
|
|
|
# This script is used to capture the content of config.status-generated
|
|
|
|
# files and subsequently restore their timestamp if they haven't changed.
|
|
|
|
|
2014-07-11 14:06:14 +04:00
|
|
|
import argparse
|
2014-08-02 03:02:30 +04:00
|
|
|
import errno
|
2014-11-07 04:16:42 +03:00
|
|
|
import itertools
|
2013-08-20 10:23:28 +04:00
|
|
|
import os
|
2013-08-20 10:26:23 +04:00
|
|
|
import re
|
2013-08-20 10:23:28 +04:00
|
|
|
import subprocess
|
|
|
|
import sys
|
|
|
|
import pickle
|
|
|
|
|
2014-08-02 03:02:30 +04:00
|
|
|
import mozpack.path as mozpath
|
|
|
|
|
2014-11-04 07:50:18 +03:00
|
|
|
|
2014-11-07 04:16:42 +03:00
|
|
|
class Pool(object):
|
|
|
|
def __new__(cls, size):
|
|
|
|
try:
|
|
|
|
import multiprocessing
|
|
|
|
size = min(size, multiprocessing.cpu_count())
|
|
|
|
return multiprocessing.Pool(size)
|
|
|
|
except:
|
|
|
|
return super(Pool, cls).__new__(cls)
|
2014-11-04 07:50:18 +03:00
|
|
|
|
2014-11-07 04:16:42 +03:00
|
|
|
def imap_unordered(self, fn, iterable):
|
|
|
|
return itertools.imap(fn, iterable)
|
2014-11-04 07:50:18 +03:00
|
|
|
|
2014-11-07 04:16:42 +03:00
|
|
|
def close(self):
|
|
|
|
pass
|
2014-11-04 07:50:18 +03:00
|
|
|
|
2014-11-07 04:16:42 +03:00
|
|
|
def join(self):
|
|
|
|
pass
|
2014-11-04 07:50:18 +03:00
|
|
|
|
|
|
|
|
2013-08-20 10:23:28 +04:00
|
|
|
class File(object):
|
|
|
|
def __init__(self, path):
|
|
|
|
self._path = path
|
|
|
|
self._content = open(path, 'rb').read()
|
|
|
|
stat = os.stat(path)
|
|
|
|
self._times = (stat.st_atime, stat.st_mtime)
|
|
|
|
|
2014-01-24 02:57:54 +04:00
|
|
|
@property
|
|
|
|
def path(self):
|
|
|
|
return self._path
|
|
|
|
|
|
|
|
@property
|
|
|
|
def mtime(self):
|
|
|
|
return self._times[1]
|
|
|
|
|
2014-08-02 03:02:52 +04:00
|
|
|
@property
|
|
|
|
def modified(self):
|
|
|
|
'''Returns whether the file was modified since the instance was
|
|
|
|
created. Result is memoized.'''
|
|
|
|
if hasattr(self, '_modified'):
|
|
|
|
return self._modified
|
|
|
|
|
|
|
|
modified = True
|
|
|
|
if os.path.exists(self._path):
|
|
|
|
if open(self._path, 'rb').read() == self._content:
|
|
|
|
modified = False
|
|
|
|
self._modified = modified
|
|
|
|
return modified
|
|
|
|
|
2013-08-20 10:23:28 +04:00
|
|
|
def update_time(self):
|
|
|
|
'''If the file hasn't changed since the instance was created,
|
|
|
|
restore its old modification time.'''
|
2014-08-02 03:02:52 +04:00
|
|
|
if not self.modified:
|
2013-08-20 10:23:28 +04:00
|
|
|
os.utime(self._path, self._times)
|
|
|
|
|
2013-08-20 10:26:23 +04:00
|
|
|
|
|
|
|
# As defined in the various sub-configures in the tree
|
|
|
|
PRECIOUS_VARS = set([
|
|
|
|
'build_alias',
|
|
|
|
'host_alias',
|
|
|
|
'target_alias',
|
|
|
|
'CC',
|
|
|
|
'CFLAGS',
|
|
|
|
'LDFLAGS',
|
|
|
|
'LIBS',
|
|
|
|
'CPPFLAGS',
|
|
|
|
'CPP',
|
|
|
|
'CCC',
|
|
|
|
'CXXFLAGS',
|
|
|
|
'CXX',
|
|
|
|
'CCASFLAGS',
|
|
|
|
'CCAS',
|
|
|
|
])
|
|
|
|
|
|
|
|
|
2014-11-04 07:50:18 +03:00
|
|
|
CONFIGURE_DATA = 'configure.pkl'
|
|
|
|
|
|
|
|
|
2013-08-20 10:26:23 +04:00
|
|
|
# Autoconf, in some of the sub-configures used in the tree, likes to error
|
|
|
|
# out when "precious" variables change in value. The solution it gives to
|
|
|
|
# straighten things is to either run make distclean or remove config.cache.
|
|
|
|
# There's no reason not to do the latter automatically instead of failing,
|
|
|
|
# doing the cleanup (which, on buildbots means a full clobber), and
|
|
|
|
# restarting from scratch.
|
2014-08-02 03:02:30 +04:00
|
|
|
def maybe_clear_cache(data):
|
|
|
|
env = dict(data['env'])
|
2014-07-11 14:06:14 +04:00
|
|
|
for kind in ('target', 'host', 'build'):
|
2014-08-02 03:02:30 +04:00
|
|
|
arg = data[kind]
|
2014-07-11 14:06:14 +04:00
|
|
|
if arg is not None:
|
|
|
|
env['%s_alias' % kind] = arg
|
|
|
|
# configure can take variables assignments in its arguments, and that
|
|
|
|
# overrides whatever is in the environment.
|
2014-08-02 03:02:30 +04:00
|
|
|
for arg in data['args']:
|
2014-07-11 14:06:14 +04:00
|
|
|
if arg[:1] != '-' and '=' in arg:
|
|
|
|
key, value = arg.split('=', 1)
|
|
|
|
env[key] = value
|
|
|
|
|
2013-08-20 10:26:23 +04:00
|
|
|
comment = re.compile(r'^\s+#')
|
|
|
|
cache = {}
|
2014-08-02 03:02:30 +04:00
|
|
|
with open(data['cache-file']) as f:
|
2014-07-11 14:06:14 +04:00
|
|
|
for line in f:
|
2013-08-20 10:26:23 +04:00
|
|
|
if not comment.match(line) and '=' in line:
|
2014-07-11 14:06:14 +04:00
|
|
|
key, value = line.rstrip(os.linesep).split('=', 1)
|
|
|
|
# If the value is quoted, unquote it
|
|
|
|
if value[:1] == "'":
|
|
|
|
value = value[1:-1].replace("'\\''", "'")
|
2013-08-20 10:26:23 +04:00
|
|
|
cache[key] = value
|
|
|
|
for precious in PRECIOUS_VARS:
|
2014-07-11 14:06:14 +04:00
|
|
|
# If there is no entry at all for that precious variable, then
|
|
|
|
# its value is not precious for that particular configure.
|
|
|
|
if 'ac_cv_env_%s_set' % precious not in cache:
|
|
|
|
continue
|
|
|
|
is_set = cache.get('ac_cv_env_%s_set' % precious) == 'set'
|
|
|
|
value = cache.get('ac_cv_env_%s_value' % precious) if is_set else None
|
|
|
|
if value != env.get(precious):
|
2014-08-02 03:02:30 +04:00
|
|
|
print 'Removing %s because of %s value change from:' \
|
|
|
|
% (data['cache-file'], precious)
|
2014-07-11 14:06:14 +04:00
|
|
|
print ' %s' % (value if value is not None else 'undefined')
|
|
|
|
print 'to:'
|
|
|
|
print ' %s' % env.get(precious, 'undefined')
|
2014-08-02 03:02:30 +04:00
|
|
|
os.remove(data['cache-file'])
|
2014-08-02 03:02:52 +04:00
|
|
|
return True
|
|
|
|
return False
|
2013-08-20 10:26:23 +04:00
|
|
|
|
|
|
|
|
2014-08-02 03:02:30 +04:00
|
|
|
def split_template(s):
|
|
|
|
"""Given a "file:template" string, returns "file", "template". If the string
|
|
|
|
is of the form "file" (without a template), returns "file", "file.in"."""
|
|
|
|
if ':' in s:
|
|
|
|
return s.split(':', 1)
|
|
|
|
return s, '%s.in' % s
|
|
|
|
|
|
|
|
|
|
|
|
def get_config_files(data):
|
|
|
|
config_status = mozpath.join(data['objdir'], 'config.status')
|
|
|
|
if not os.path.exists(config_status):
|
|
|
|
return [], []
|
2013-08-20 10:23:28 +04:00
|
|
|
|
2014-08-02 03:02:30 +04:00
|
|
|
configure = mozpath.join(data['srcdir'], 'configure')
|
2014-08-02 03:02:52 +04:00
|
|
|
config_files = []
|
2014-08-02 03:02:30 +04:00
|
|
|
command_files = []
|
2013-08-20 10:23:28 +04:00
|
|
|
|
|
|
|
# Scan the config.status output for information about configuration files
|
|
|
|
# it generates.
|
|
|
|
config_status_output = subprocess.check_output(
|
2014-08-02 03:02:30 +04:00
|
|
|
[data['shell'], '-c', '%s --help' % config_status],
|
2013-08-20 10:23:28 +04:00
|
|
|
stderr=subprocess.STDOUT).splitlines()
|
|
|
|
state = None
|
|
|
|
for line in config_status_output:
|
|
|
|
if line.startswith('Configuration') and line.endswith(':'):
|
2014-08-02 03:02:30 +04:00
|
|
|
if line.endswith('commands:'):
|
|
|
|
state = 'commands'
|
|
|
|
else:
|
|
|
|
state = 'config'
|
|
|
|
elif not line.strip():
|
2013-08-20 10:23:28 +04:00
|
|
|
state = None
|
2014-08-02 03:02:30 +04:00
|
|
|
elif state:
|
|
|
|
for f, t in (split_template(couple) for couple in line.split()):
|
|
|
|
f = mozpath.join(data['objdir'], f)
|
|
|
|
t = mozpath.join(data['srcdir'], t)
|
|
|
|
if state == 'commands':
|
|
|
|
command_files.append(f)
|
|
|
|
else:
|
|
|
|
config_files.append((f, t))
|
2013-08-20 10:23:28 +04:00
|
|
|
|
2014-08-02 03:02:30 +04:00
|
|
|
return config_files, command_files
|
2013-08-20 10:23:28 +04:00
|
|
|
|
|
|
|
|
2014-11-04 07:50:18 +03:00
|
|
|
def prepare(srcdir, objdir, shell, args):
|
2014-08-02 03:02:30 +04:00
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
parser.add_argument('--target', type=str)
|
|
|
|
parser.add_argument('--host', type=str)
|
|
|
|
parser.add_argument('--build', type=str)
|
|
|
|
parser.add_argument('--cache-file', type=str)
|
|
|
|
# The --srcdir argument is simply ignored. It's a useless autoconf feature
|
|
|
|
# that we don't support well anyways. This makes it stripped from `others`
|
|
|
|
# and allows to skip setting it when calling the subconfigure (configure
|
|
|
|
# will take it from the configure path anyways).
|
|
|
|
parser.add_argument('--srcdir', type=str)
|
|
|
|
|
2014-11-04 07:50:18 +03:00
|
|
|
data_file = os.path.join(objdir, CONFIGURE_DATA)
|
2014-08-02 03:02:52 +04:00
|
|
|
previous_args = None
|
|
|
|
if os.path.exists(data_file):
|
|
|
|
with open(data_file, 'rb') as f:
|
|
|
|
data = pickle.load(f)
|
|
|
|
previous_args = data['args']
|
|
|
|
|
2014-08-02 03:02:30 +04:00
|
|
|
# Msys likes to break environment variables and command line arguments,
|
|
|
|
# so read those from stdin, as they are passed from the configure script
|
|
|
|
# when necessary (on windows).
|
|
|
|
input = sys.stdin.read()
|
|
|
|
if input:
|
|
|
|
data = {a: b for [a, b] in eval(input)}
|
|
|
|
environ = {a: b for a, b in data['env']}
|
2016-07-27 01:27:19 +03:00
|
|
|
# These environment variables as passed from old-configure may contain
|
|
|
|
# posix-style paths, which will not be meaningful to the js
|
|
|
|
# subconfigure, which runs as a native python process, so use their
|
|
|
|
# values from the environment. In the case of autoconf implemented
|
|
|
|
# subconfigures, Msys will re-convert them properly.
|
|
|
|
for var in ('HOME', 'TERM', 'PATH', 'TMPDIR', 'TMP',
|
|
|
|
'TEMP', 'INCLUDE'):
|
|
|
|
if var in environ and var in os.environ:
|
|
|
|
environ[var] = os.environ[var]
|
2014-08-02 03:02:30 +04:00
|
|
|
args = data['args']
|
|
|
|
else:
|
|
|
|
environ = os.environ
|
|
|
|
|
|
|
|
args, others = parser.parse_known_args(args)
|
2013-08-20 10:23:28 +04:00
|
|
|
|
2014-08-02 03:02:30 +04:00
|
|
|
data = {
|
|
|
|
'target': args.target,
|
|
|
|
'host': args.host,
|
|
|
|
'build': args.build,
|
|
|
|
'args': others,
|
|
|
|
'shell': shell,
|
|
|
|
'srcdir': srcdir,
|
|
|
|
'env': environ,
|
|
|
|
}
|
|
|
|
|
|
|
|
if args.cache_file:
|
|
|
|
data['cache-file'] = mozpath.normpath(mozpath.join(os.getcwd(),
|
|
|
|
args.cache_file))
|
|
|
|
else:
|
|
|
|
data['cache-file'] = mozpath.join(objdir, 'config.cache')
|
2013-08-20 10:23:28 +04:00
|
|
|
|
2014-08-02 03:02:52 +04:00
|
|
|
if previous_args is not None:
|
|
|
|
data['previous-args'] = previous_args
|
|
|
|
|
2013-08-20 10:23:28 +04:00
|
|
|
try:
|
2014-08-02 03:02:30 +04:00
|
|
|
os.makedirs(objdir)
|
|
|
|
except OSError as e:
|
|
|
|
if e.errno != errno.EEXIST:
|
|
|
|
raise
|
|
|
|
|
2014-08-02 03:02:52 +04:00
|
|
|
with open(data_file, 'wb') as f:
|
2014-08-02 03:02:30 +04:00
|
|
|
pickle.dump(data, f)
|
|
|
|
|
|
|
|
|
2014-11-04 07:50:18 +03:00
|
|
|
def prefix_lines(text, prefix):
|
|
|
|
return ''.join('%s> %s' % (prefix, line) for line in text.splitlines(True))
|
|
|
|
|
|
|
|
|
|
|
|
def run(objdir):
|
2014-08-02 03:02:52 +04:00
|
|
|
ret = 0
|
2014-11-04 07:50:18 +03:00
|
|
|
output = ''
|
2014-08-02 03:02:52 +04:00
|
|
|
|
2014-11-04 07:50:18 +03:00
|
|
|
with open(os.path.join(objdir, CONFIGURE_DATA), 'rb') as f:
|
2014-08-02 03:02:30 +04:00
|
|
|
data = pickle.load(f)
|
|
|
|
|
|
|
|
data['objdir'] = objdir
|
|
|
|
|
|
|
|
cache_file = data['cache-file']
|
2014-08-02 03:02:52 +04:00
|
|
|
cleared_cache = True
|
2014-08-02 03:02:30 +04:00
|
|
|
if os.path.exists(cache_file):
|
2014-08-02 03:02:52 +04:00
|
|
|
cleared_cache = maybe_clear_cache(data)
|
2014-08-02 03:02:30 +04:00
|
|
|
|
|
|
|
config_files, command_files = get_config_files(data)
|
|
|
|
contents = []
|
|
|
|
for f, t in config_files:
|
|
|
|
contents.append(File(f))
|
2013-08-20 10:23:28 +04:00
|
|
|
|
2014-08-02 03:02:30 +04:00
|
|
|
# AC_CONFIG_COMMANDS actually only registers tags, not file names
|
|
|
|
# but most commands are tagged with the file name they create.
|
|
|
|
# However, a few don't, or are tagged with a directory name (and their
|
|
|
|
# command is just to create that directory)
|
|
|
|
for f in command_files:
|
|
|
|
if os.path.isfile(f):
|
|
|
|
contents.append(File(f))
|
|
|
|
|
2014-08-02 03:02:52 +04:00
|
|
|
# Only run configure if one of the following is true:
|
|
|
|
# - config.status doesn't exist
|
|
|
|
# - config.status is older than configure
|
|
|
|
# - the configure arguments changed
|
|
|
|
# - the environment changed in a way that requires a cache clear.
|
2014-08-02 03:02:30 +04:00
|
|
|
configure = mozpath.join(data['srcdir'], 'configure')
|
2014-08-02 03:02:52 +04:00
|
|
|
config_status_path = mozpath.join(objdir, 'config.status')
|
|
|
|
skip_configure = True
|
|
|
|
if not os.path.exists(config_status_path):
|
|
|
|
skip_configure = False
|
|
|
|
config_status = None
|
|
|
|
else:
|
|
|
|
config_status = File(config_status_path)
|
|
|
|
if config_status.mtime < os.path.getmtime(configure) or \
|
|
|
|
data.get('previous-args', data['args']) != data['args'] or \
|
|
|
|
cleared_cache:
|
|
|
|
skip_configure = False
|
|
|
|
|
2014-11-04 07:50:18 +03:00
|
|
|
relobjdir = os.path.relpath(objdir, os.getcwd())
|
|
|
|
|
2014-08-02 03:02:52 +04:00
|
|
|
if not skip_configure:
|
2016-02-23 03:42:40 +03:00
|
|
|
if mozpath.normsep(relobjdir) == 'js/src':
|
|
|
|
# Because configure is a shell script calling a python script
|
|
|
|
# calling a shell script, on Windows, with msys screwing the
|
|
|
|
# environment, we lose the benefits from our own efforts in this
|
|
|
|
# script to get past the msys problems. So manually call the python
|
|
|
|
# script instead, so that we don't do a native->msys transition
|
|
|
|
# here. Then the python configure will still have the right
|
|
|
|
# environment when calling the shell configure.
|
|
|
|
command = [
|
|
|
|
sys.executable,
|
|
|
|
os.path.join(os.path.dirname(__file__), '..', 'configure.py'),
|
2016-03-22 03:42:31 +03:00
|
|
|
'--enable-project=js',
|
2016-02-23 03:42:40 +03:00
|
|
|
]
|
|
|
|
data['env']['OLD_CONFIGURE'] = os.path.join(
|
|
|
|
os.path.dirname(configure), 'old-configure')
|
|
|
|
else:
|
|
|
|
command = [data['shell'], configure]
|
2014-08-02 03:02:52 +04:00
|
|
|
for kind in ('target', 'build', 'host'):
|
|
|
|
if data.get(kind) is not None:
|
|
|
|
command += ['--%s=%s' % (kind, data[kind])]
|
|
|
|
command += data['args']
|
|
|
|
command += ['--cache-file=%s' % cache_file]
|
|
|
|
|
|
|
|
# Pass --no-create to configure so that it doesn't run config.status.
|
|
|
|
# We're going to run it ourselves.
|
|
|
|
command += ['--no-create']
|
|
|
|
|
2014-11-04 07:50:18 +03:00
|
|
|
print prefix_lines('configuring', relobjdir)
|
|
|
|
print prefix_lines('running %s' % ' '.join(command[:-1]), relobjdir)
|
2014-08-02 03:02:52 +04:00
|
|
|
sys.stdout.flush()
|
2014-11-04 07:50:18 +03:00
|
|
|
try:
|
|
|
|
output += subprocess.check_output(command,
|
|
|
|
stderr=subprocess.STDOUT, cwd=objdir, env=data['env'])
|
|
|
|
except subprocess.CalledProcessError as e:
|
|
|
|
return relobjdir, e.returncode, e.output
|
2014-08-02 03:02:52 +04:00
|
|
|
|
|
|
|
# Leave config.status with a new timestamp if configure is newer than
|
|
|
|
# its original mtime.
|
|
|
|
if config_status and os.path.getmtime(configure) <= config_status.mtime:
|
|
|
|
config_status.update_time()
|
|
|
|
|
|
|
|
# Only run config.status if one of the following is true:
|
|
|
|
# - config.status changed or did not exist
|
|
|
|
# - one of the templates for config files is newer than the corresponding
|
|
|
|
# config file.
|
|
|
|
skip_config_status = True
|
|
|
|
if not config_status or config_status.modified:
|
|
|
|
# If config.status doesn't exist after configure (because it's not
|
|
|
|
# an autoconf configure), skip it.
|
|
|
|
if os.path.exists(config_status_path):
|
|
|
|
skip_config_status = False
|
|
|
|
else:
|
|
|
|
# config.status changed or was created, so we need to update the
|
|
|
|
# list of config and command files.
|
|
|
|
config_files, command_files = get_config_files(data)
|
|
|
|
for f, t in config_files:
|
2014-08-04 05:30:21 +04:00
|
|
|
if not os.path.exists(t) or \
|
|
|
|
os.path.getmtime(f) < os.path.getmtime(t):
|
2014-08-02 03:02:52 +04:00
|
|
|
skip_config_status = False
|
|
|
|
|
|
|
|
if not skip_config_status:
|
|
|
|
if skip_configure:
|
2014-11-04 07:50:18 +03:00
|
|
|
print prefix_lines('running config.status', relobjdir)
|
2014-08-02 03:02:52 +04:00
|
|
|
sys.stdout.flush()
|
2014-11-04 07:50:18 +03:00
|
|
|
try:
|
|
|
|
output += subprocess.check_output([data['shell'], '-c',
|
|
|
|
'./config.status'], stderr=subprocess.STDOUT, cwd=objdir,
|
|
|
|
env=data['env'])
|
|
|
|
except subprocess.CalledProcessError as e:
|
|
|
|
ret = e.returncode
|
|
|
|
output += e.output
|
2014-08-02 03:02:52 +04:00
|
|
|
|
|
|
|
for f in contents:
|
|
|
|
f.update_time()
|
2013-08-20 10:23:28 +04:00
|
|
|
|
2014-11-04 07:50:18 +03:00
|
|
|
return relobjdir, ret, output
|
2014-08-02 03:02:30 +04:00
|
|
|
|
|
|
|
|
2014-11-04 07:50:18 +03:00
|
|
|
def subconfigure(args):
|
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
parser.add_argument('--list', type=str,
|
|
|
|
help='File containing a list of subconfigures to run')
|
|
|
|
parser.add_argument('--skip', type=str,
|
|
|
|
help='File containing a list of Subconfigures to skip')
|
|
|
|
parser.add_argument('subconfigures', type=str, nargs='*',
|
|
|
|
help='Subconfigures to run if no list file is given')
|
|
|
|
args, others = parser.parse_known_args(args)
|
|
|
|
subconfigures = args.subconfigures
|
|
|
|
if args.list:
|
|
|
|
subconfigures.extend(open(args.list, 'rb').read().splitlines())
|
|
|
|
if args.skip:
|
|
|
|
skips = set(open(args.skip, 'rb').read().splitlines())
|
|
|
|
subconfigures = [s for s in subconfigures if s not in skips]
|
|
|
|
|
|
|
|
if not subconfigures:
|
|
|
|
return 0
|
|
|
|
|
|
|
|
ret = 0
|
|
|
|
# One would think using a ThreadPool would be faster, considering
|
|
|
|
# everything happens in subprocesses anyways, but no, it's actually
|
|
|
|
# slower on Windows. (20s difference overall!)
|
2014-11-07 04:16:42 +03:00
|
|
|
pool = Pool(len(subconfigures))
|
2014-11-04 07:50:18 +03:00
|
|
|
for relobjdir, returncode, output in \
|
|
|
|
pool.imap_unordered(run, subconfigures):
|
|
|
|
print prefix_lines(output, relobjdir)
|
|
|
|
sys.stdout.flush()
|
|
|
|
ret = max(returncode, ret)
|
|
|
|
if ret:
|
|
|
|
break
|
|
|
|
pool.close()
|
|
|
|
pool.join()
|
|
|
|
return ret
|
|
|
|
|
2014-08-02 03:02:30 +04:00
|
|
|
|
|
|
|
def main(args):
|
|
|
|
if args[0] != '--prepare':
|
2014-11-04 07:50:18 +03:00
|
|
|
return subconfigure(args)
|
2014-08-02 03:02:30 +04:00
|
|
|
|
|
|
|
topsrcdir = os.path.abspath(args[1])
|
|
|
|
subdir = args[2]
|
|
|
|
# subdir can be of the form srcdir:objdir
|
|
|
|
if ':' in subdir:
|
|
|
|
srcdir, subdir = subdir.split(':', 1)
|
|
|
|
else:
|
|
|
|
srcdir = subdir
|
|
|
|
srcdir = os.path.join(topsrcdir, srcdir)
|
|
|
|
objdir = os.path.abspath(subdir)
|
2013-08-20 10:23:28 +04:00
|
|
|
|
2014-11-04 07:50:18 +03:00
|
|
|
return prepare(srcdir, objdir, args[3], args[4:])
|
2013-08-20 10:23:28 +04:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2014-08-02 03:02:30 +04:00
|
|
|
sys.exit(main(sys.argv[1:]))
|