@@ -47,6 +47,7 @@ WAFLIB_STRIP_EXTRAS=" | |||
boo | |||
boost | |||
c_dumbpreproc | |||
c_emscripten | |||
cabal | |||
cfg_altoptions | |||
cfg_cross_gnu | |||
@@ -32,7 +32,7 @@ POSSIBILITY OF SUCH DAMAGE. | |||
import os, sys, inspect | |||
VERSION="1.8.9" | |||
VERSION="1.8.14" | |||
REVISION="x" | |||
GIT="x" | |||
INSTALL="x" | |||
@@ -1015,6 +1015,7 @@ class InstallContext(BuildContext): | |||
if tsk.runnable_status() == Task.ASK_LATER: | |||
raise self.WafError('cannot post the task %r' % tsk) | |||
tsk.run() | |||
tsk.hasrun = True | |||
def install_files(self, dest, files, env=None, chmod=Utils.O644, relative_trick=False, cwd=None, add=True, postpone=True, task=None): | |||
""" | |||
@@ -167,6 +167,7 @@ class ConfigSet(object): | |||
for x in keys: | |||
tbl[x] = copy.deepcopy(tbl[x]) | |||
self.table = tbl | |||
return self | |||
def get_flat(self, key): | |||
""" | |||
@@ -192,11 +192,11 @@ class ConfigurationContext(Context.Context): | |||
env['files'] = self.files | |||
env['environ'] = dict(self.environ) | |||
if not self.env.NO_LOCK_IN_RUN: | |||
if not self.env.NO_LOCK_IN_RUN and not getattr(Options.options, 'no_lock_in_run'): | |||
env.store(os.path.join(Context.run_dir, Options.lockfile)) | |||
if not self.env.NO_LOCK_IN_TOP: | |||
if not self.env.NO_LOCK_IN_TOP and not getattr(Options.options, 'no_lock_in_top'): | |||
env.store(os.path.join(Context.top_dir, Options.lockfile)) | |||
if not self.env.NO_LOCK_IN_OUT: | |||
if not self.env.NO_LOCK_IN_OUT and not getattr(Options.options, 'no_lock_in_out'): | |||
env.store(os.path.join(Context.out_dir, Options.lockfile)) | |||
def prepare_env(self, env): | |||
@@ -234,7 +234,7 @@ class ConfigurationContext(Context.Context): | |||
tmpenv = self.all_envs[key] | |||
tmpenv.store(os.path.join(self.cachedir.abspath(), key + Build.CACHE_SUFFIX)) | |||
def load(self, input, tooldir=None, funs=None): | |||
def load(self, input, tooldir=None, funs=None, with_sys_path=True): | |||
""" | |||
Load Waf tools, which will be imported whenever a build is started. | |||
@@ -252,7 +252,7 @@ class ConfigurationContext(Context.Context): | |||
# avoid loading the same tool more than once with the same functions | |||
# used by composite projects | |||
mag = (tool, id(self.env), funs) | |||
mag = (tool, id(self.env), tooldir, funs) | |||
if mag in self.tool_cache: | |||
self.to_log('(tool %s is already loaded, skipping)' % tool) | |||
continue | |||
@@ -260,7 +260,7 @@ class ConfigurationContext(Context.Context): | |||
module = None | |||
try: | |||
module = Context.load_tool(tool, tooldir, ctx=self) | |||
module = Context.load_tool(tool, tooldir, ctx=self, with_sys_path=with_sys_path) | |||
except ImportError as e: | |||
self.fatal('Could not load the Waf tool %r from %r\n%s' % (tool, sys.path, e)) | |||
except Exception as e: | |||
@@ -352,7 +352,7 @@ def conf(f): | |||
return f | |||
@conf | |||
def add_os_flags(self, var, dest=None): | |||
def add_os_flags(self, var, dest=None, dup=True): | |||
""" | |||
Import operating system environment values into ``conf.env`` dict:: | |||
@@ -363,10 +363,16 @@ def add_os_flags(self, var, dest=None): | |||
:type var: string | |||
:param dest: destination variable, by default the same as var | |||
:type dest: string | |||
:param dup: add the same set of flags again | |||
:type dup: bool | |||
""" | |||
# do not use 'get' to make certain the variable is not defined | |||
try: self.env.append_value(dest or var, shlex.split(self.environ[var])) | |||
except KeyError: pass | |||
try: | |||
flags = shlex.split(self.environ[var]) | |||
except KeyError: | |||
return | |||
# TODO: in waf 1.9, make dup=False the default | |||
if dup or ''.join(flags) not in ''.join(Utils.to_list(self.env[dest or var])): | |||
self.env.append_value(dest or var, flags) | |||
@conf | |||
def cmd_to_list(self, cmd): | |||
@@ -11,13 +11,13 @@ from waflib import Utils, Errors, Logs | |||
import waflib.Node | |||
# the following 3 constants are updated on each new release (do not touch) | |||
HEXVERSION=0x1080900 | |||
HEXVERSION=0x1080e00 | |||
"""Constant updated on new releases""" | |||
WAFVERSION="1.8.9" | |||
WAFVERSION="1.8.14" | |||
"""Constant updated on new releases""" | |||
WAFREVISION="06e49b2a82166aeb14dde8357c58387f252fc722" | |||
WAFREVISION="ce8234c396bb246a20ea9f51594ee051d5b378e7" | |||
"""Git revision when the waf version is updated""" | |||
ABI = 98 | |||
@@ -55,7 +55,7 @@ waf_dir = '' | |||
local_repo = '' | |||
"""Local repository containing additional Waf tools (plugins)""" | |||
remote_repo = 'http://waf.googlecode.com/git/' | |||
remote_repo = 'https://raw.githubusercontent.com/waf-project/waf/master/' | |||
""" | |||
Remote directory containing downloadable waf tools. The missing tools can be downloaded by using:: | |||
@@ -210,9 +210,10 @@ class Context(ctx): | |||
""" | |||
tools = Utils.to_list(tool_list) | |||
path = Utils.to_list(kw.get('tooldir', '')) | |||
with_sys_path = kw.get('with_sys_path', True) | |||
for t in tools: | |||
module = load_tool(t, path) | |||
module = load_tool(t, path, with_sys_path=with_sys_path) | |||
fun = getattr(module, kw.get('name', self.fun), None) | |||
if fun: | |||
fun(self) | |||
@@ -321,11 +322,11 @@ class Context(ctx): | |||
unlike :py:meth:`waflib.Context.Context.cmd_and_log` | |||
:param cmd: command argument for subprocess.Popen | |||
:param kw: keyword arguments for subprocess.Popen | |||
:param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate. | |||
""" | |||
subprocess = Utils.subprocess | |||
kw['shell'] = isinstance(cmd, str) | |||
Logs.debug('runner: %r' % cmd) | |||
Logs.debug('runner: %r' % (cmd,)) | |||
Logs.debug('runner_env: kw=%s' % kw) | |||
if self.logger: | |||
@@ -339,14 +340,25 @@ class Context(ctx): | |||
if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]): | |||
raise Errors.WafError("Program %s not found!" % cmd[0]) | |||
wargs = {} | |||
if 'timeout' in kw: | |||
if kw['timeout'] is not None: | |||
wargs['timeout'] = kw['timeout'] | |||
del kw['timeout'] | |||
if 'input' in kw: | |||
if kw['input']: | |||
wargs['input'] = kw['input'] | |||
kw['stdin'] = Utils.subprocess.PIPE | |||
del kw['input'] | |||
try: | |||
if kw['stdout'] or kw['stderr']: | |||
p = subprocess.Popen(cmd, **kw) | |||
(out, err) = p.communicate() | |||
(out, err) = p.communicate(**wargs) | |||
ret = p.returncode | |||
else: | |||
out, err = (None, None) | |||
ret = subprocess.Popen(cmd, **kw).wait() | |||
ret = subprocess.Popen(cmd, **kw).wait(**wargs) | |||
except Exception as e: | |||
raise Errors.WafError('Execution failure: %s' % str(e), ex=e) | |||
@@ -369,24 +381,25 @@ class Context(ctx): | |||
def cmd_and_log(self, cmd, **kw): | |||
""" | |||
Execute a command and return stdout if the execution is successful. | |||
Execute a command and return stdout/stderr if the execution is successful. | |||
An exception is thrown when the exit status is non-0. In that case, both stderr and stdout | |||
will be bound to the WafError object:: | |||
def configure(conf): | |||
out = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.STDOUT, quiet=waflib.Context.BOTH) | |||
(out, err) = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.BOTH) | |||
(out, err) = conf.cmd_and_log(cmd, input='\\n'.encode(), output=waflib.Context.STDOUT) | |||
try: | |||
conf.cmd_and_log(['which', 'someapp'], output=waflib.Context.BOTH) | |||
except Exception as e: | |||
print(e.stdout, e.stderr) | |||
:param cmd: args for subprocess.Popen | |||
:param kw: keyword arguments for subprocess.Popen | |||
:param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate. | |||
""" | |||
subprocess = Utils.subprocess | |||
kw['shell'] = isinstance(cmd, str) | |||
Logs.debug('runner: %r' % cmd) | |||
Logs.debug('runner: %r' % (cmd,)) | |||
if 'quiet' in kw: | |||
quiet = kw['quiet'] | |||
@@ -406,9 +419,21 @@ class Context(ctx): | |||
kw['stdout'] = kw['stderr'] = subprocess.PIPE | |||
if quiet is None: | |||
self.to_log(cmd) | |||
wargs = {} | |||
if 'timeout' in kw: | |||
if kw['timeout'] is not None: | |||
wargs['timeout'] = kw['timeout'] | |||
del kw['timeout'] | |||
if 'input' in kw: | |||
if kw['input']: | |||
wargs['input'] = kw['input'] | |||
kw['stdin'] = Utils.subprocess.PIPE | |||
del kw['input'] | |||
try: | |||
p = subprocess.Popen(cmd, **kw) | |||
(out, err) = p.communicate() | |||
(out, err) = p.communicate(**wargs) | |||
except Exception as e: | |||
raise Errors.WafError('Execution failure: %s' % str(e), ex=e) | |||
@@ -620,14 +645,14 @@ def load_module(path, encoding=None): | |||
module_dir = os.path.dirname(path) | |||
sys.path.insert(0, module_dir) | |||
exec(compile(code, path, 'exec'), module.__dict__) | |||
sys.path.remove(module_dir) | |||
try : exec(compile(code, path, 'exec'), module.__dict__) | |||
finally: sys.path.remove(module_dir) | |||
cache_modules[path] = module | |||
return module | |||
def load_tool(tool, tooldir=None, ctx=None): | |||
def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True): | |||
""" | |||
Import a Waf tool (python module), and store it in the dict :py:const:`waflib.Context.Context.tools` | |||
@@ -635,33 +660,44 @@ def load_tool(tool, tooldir=None, ctx=None): | |||
:param tool: Name of the tool | |||
:type tooldir: list | |||
:param tooldir: List of directories to search for the tool module | |||
:type with_sys_path: boolean | |||
:param with_sys_path: whether or not to search the regular sys.path, besides waf_dir and potentially given tooldirs | |||
""" | |||
if tool == 'java': | |||
tool = 'javaw' # jython | |||
else: | |||
tool = tool.replace('++', 'xx') | |||
if tooldir: | |||
assert isinstance(tooldir, list) | |||
sys.path = tooldir + sys.path | |||
try: | |||
__import__(tool) | |||
origSysPath = sys.path | |||
if not with_sys_path: sys.path = [] | |||
try: | |||
if tooldir: | |||
assert isinstance(tooldir, list) | |||
sys.path = tooldir + sys.path | |||
try: | |||
__import__(tool) | |||
finally: | |||
for d in tooldir: | |||
sys.path.remove(d) | |||
ret = sys.modules[tool] | |||
Context.tools[tool] = ret | |||
return ret | |||
finally: | |||
for d in tooldir: | |||
sys.path.remove(d) | |||
else: | |||
for x in ('waflib.Tools.%s', 'waflib.extras.%s', 'waflib.%s', '%s'): | |||
else: | |||
if not with_sys_path: sys.path.insert(0, waf_dir) | |||
try: | |||
__import__(x % tool) | |||
break | |||
except ImportError: | |||
x = None | |||
if x is None: # raise an exception | |||
__import__(tool) | |||
ret = sys.modules[x % tool] | |||
Context.tools[tool] = ret | |||
return ret | |||
for x in ('waflib.Tools.%s', 'waflib.extras.%s', 'waflib.%s', '%s'): | |||
try: | |||
__import__(x % tool) | |||
break | |||
except ImportError: | |||
x = None | |||
if x is None: # raise an exception | |||
__import__(tool) | |||
finally: | |||
if not with_sys_path: sys.path.remove(waf_dir) | |||
ret = sys.modules[x % tool] | |||
Context.tools[tool] = ret | |||
return ret | |||
finally: | |||
if not with_sys_path: sys.path += origSysPath | |||
@@ -20,8 +20,8 @@ if not os.environ.get('NOSYNC', False): | |||
# in case someone uses the root logger | |||
import logging | |||
LOG_FORMAT = "%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s" | |||
HOUR_FORMAT = "%H:%M:%S" | |||
LOG_FORMAT = os.environ.get('WAF_LOG_FORMAT', '%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s') | |||
HOUR_FORMAT = os.environ.get('WAF_HOUR_FORMAT', '%H:%M:%S') | |||
zones = '' | |||
verbose = 0 | |||
@@ -35,6 +35,7 @@ colors_lst = { | |||
'PINK' :'\x1b[35m', | |||
'BLUE' :'\x1b[01;34m', | |||
'CYAN' :'\x1b[36m', | |||
'GREY' :'\x1b[37m', | |||
'NORMAL':'\x1b[0m', | |||
'cursor_on' :'\x1b[?25h', | |||
'cursor_off' :'\x1b[?25l', | |||
@@ -126,6 +126,10 @@ class OptionsContext(Context.Context): | |||
gr.add_option('-o', '--out', action='store', default='', help='build dir for the project', dest='out') | |||
gr.add_option('-t', '--top', action='store', default='', help='src dir for the project', dest='top') | |||
gr.add_option('--no-lock-in-run', action='store_true', default='', help=optparse.SUPPRESS_HELP, dest='no_lock_in_run') | |||
gr.add_option('--no-lock-in-out', action='store_true', default='', help=optparse.SUPPRESS_HELP, dest='no_lock_in_out') | |||
gr.add_option('--no-lock-in-top', action='store_true', default='', help=optparse.SUPPRESS_HELP, dest='no_lock_in_top') | |||
default_prefix = getattr(Context.g_module, 'default_prefix', os.environ.get('PREFIX')) | |||
if not default_prefix: | |||
if platform == 'win32': | |||
@@ -61,10 +61,17 @@ def waf_entry_point(current_directory, version, wafdir): | |||
no_climb = True | |||
break | |||
# if --top is provided assume the build started in the top directory | |||
for x in sys.argv: | |||
if x.startswith('--top='): | |||
Context.run_dir = Context.top_dir = x[6:] | |||
if x.startswith('--out='): | |||
Context.out_dir = x[6:] | |||
# try to find a lock file (if the project was configured) | |||
# at the same time, store the first wscript file seen | |||
cur = current_directory | |||
while cur: | |||
while cur and not Context.top_dir: | |||
lst = os.listdir(cur) | |||
if Options.lockfile in lst: | |||
env = ConfigSet.ConfigSet() | |||
@@ -557,16 +564,26 @@ def distcheck(ctx): | |||
pass | |||
def update(ctx): | |||
'''updates the plugins from the *waflib/extras* directory''' | |||
lst = Options.options.files.split(',') | |||
if not lst: | |||
lst = [x for x in Utils.listdir(Context.waf_dir + '/waflib/extras') if x.endswith('.py')] | |||
lst = Options.options.files | |||
if lst: | |||
lst = lst.split(',') | |||
else: | |||
path = os.path.join(Context.waf_dir, 'waflib', 'extras') | |||
lst = [x for x in Utils.listdir(path) if x.endswith('.py')] | |||
for x in lst: | |||
tool = x.replace('.py', '') | |||
if not tool: | |||
continue | |||
try: | |||
dl = Configure.download_tool | |||
except AttributeError: | |||
ctx.fatal('The command "update" is dangerous; include the tool "use_config" in your project!') | |||
try: | |||
Configure.download_tool(tool, force=True, ctx=ctx) | |||
dl(tool, force=True, ctx=ctx) | |||
except Errors.WafError: | |||
Logs.error('Could not find the tool %s in the remote repository' % x) | |||
Logs.error('Could not find the tool %r in the remote repository' % x) | |||
else: | |||
Logs.warn('Updated %r' % tool) | |||
def autoconfigure(execute_method): | |||
""" | |||
@@ -64,7 +64,7 @@ def f(tsk): | |||
''' | |||
classes = {} | |||
"class tasks created by user scripts or Waf tools are kept in this dict name -> class object" | |||
"Class tasks created by user scripts or Waf tools (maps names to class objects). Task classes defined in Waf tools are registered here through the metaclass :py:class:`waflib.Task.store_task_type`." | |||
class store_task_type(type): | |||
""" | |||
@@ -118,6 +118,8 @@ class TaskBase(evil): | |||
#. runnable_status: ask the task if it should be run, skipped, or if we have to ask later | |||
#. run: let threads execute the task | |||
#. post_run: let threads update the data regarding the task (cache) | |||
.. warning:: For backward compatibility reasons, the suffix "_task" is truncated in derived class names. This limitation will be removed in Waf 1.9. | |||
""" | |||
color = 'GREEN' | |||
@@ -402,6 +404,8 @@ class Task(TaskBase): | |||
uses a hash value (from :py:class:`waflib.Task.Task.signature`) which is persistent from build to build. When the value changes, | |||
the task has to be executed. The method :py:class:`waflib.Task.Task.post_run` will assign the task signature to the output | |||
nodes (if present). | |||
.. warning:: For backward compatibility reasons, the suffix "_task" is truncated in derived class names. This limitation will be removed in Waf 1.9. | |||
""" | |||
vars = [] | |||
"""Variables to depend on (class attribute used for :py:meth:`waflib.Task.Task.sig_vars`)""" | |||
@@ -593,6 +593,12 @@ def process_rule(self): | |||
if getattr(self, 'cache_rule', 'True'): | |||
cache[(name, self.rule)] = cls | |||
if getattr(self, 'cls_str', None): | |||
setattr(cls, '__str__', self.cls_str) | |||
if getattr(self, 'cls_keyword', None): | |||
setattr(cls, 'keyword', self.cls_keyword) | |||
# now create one instance | |||
tsk = self.create_task(name) | |||
@@ -1,11 +1,11 @@ | |||
#!/usr/bin/env python | |||
# encoding: utf-8 | |||
# Thomas Nagy, 2005-2010 (ita) | |||
# Thomas Nagy, 2005-2015 (ita) | |||
"base for all c/c++ programs and libraries" | |||
import os, sys, re | |||
from waflib import Utils, Build | |||
from waflib import Utils, Build, Errors | |||
from waflib.Configure import conf | |||
def get_extensions(lst): | |||
@@ -44,26 +44,34 @@ def sniff_features(**kw): | |||
feats = [] | |||
# watch the order, cxx will have the precedence | |||
if 'cxx' in exts or 'cpp' in exts or 'c++' in exts or 'cc' in exts or 'C' in exts: | |||
feats.append('cxx') | |||
for x in 'cxx cpp c++ cc C'.split(): | |||
if x in exts: | |||
feats.append('cxx') | |||
break | |||
if 'c' in exts or 'vala' in exts: | |||
feats.append('c') | |||
for x in 'f f90 F F90 for FOR'.split(): | |||
if x in exts: | |||
feats.append('fc') | |||
break | |||
if 'd' in exts: | |||
feats.append('d') | |||
if 'java' in exts: | |||
feats.append('java') | |||
if 'java' in exts: | |||
return 'java' | |||
if type in ('program', 'shlib', 'stlib'): | |||
will_link = False | |||
for x in feats: | |||
if x in ('cxx', 'd', 'c'): | |||
if x in ('cxx', 'd', 'fc', 'c'): | |||
feats.append(x + type) | |||
will_link = True | |||
if not will_link and not kw.get('features', []): | |||
raise Errors.WafError('Cannot link from %r, try passing eg: features="cprogram"?' % kw) | |||
return feats | |||
def set_features(kw, _type): | |||
@@ -25,10 +25,10 @@ cfg_ver = { | |||
SNIP_FUNCTION = ''' | |||
int main(int argc, char **argv) { | |||
void *p; | |||
void (*p)(); | |||
(void)argc; (void)argv; | |||
p=(void*)(%s); | |||
return (int)p; | |||
p=(void(*)())(%s); | |||
return !p; | |||
} | |||
''' | |||
"""Code template for checking for functions""" | |||
@@ -383,7 +383,9 @@ def check_cfg(self, *k, **kw): | |||
conf.check_cfg(path='sdl-config', args='--cflags --libs', package='', uselib_store='SDL') | |||
conf.check_cfg(path='mpicc', args='--showme:compile --showme:link', | |||
package='', uselib_store='OPEN_MPI', mandatory=False) | |||
# variables | |||
conf.check_cfg(package='gtk+-2.0', variables=['includedir', 'prefix'], uselib_store='FOO') | |||
print(conf.env.FOO_includedir) | |||
""" | |||
if k: | |||
lst = k[0].split() | |||
@@ -587,6 +589,7 @@ def validate_c(self, kw): | |||
kw['execute'] = False | |||
if kw['execute']: | |||
kw['features'].append('test_exec') | |||
kw['chmod'] = 493 | |||
if not 'errmsg' in kw: | |||
kw['errmsg'] = 'not found' | |||
@@ -661,6 +664,16 @@ def check(self, *k, **kw): | |||
Perform a configuration test by calling :py:func:`waflib.Configure.run_build`. | |||
For the complete list of parameters, see :py:func:`waflib.Tools.c_config.validate_c`. | |||
To force a specific compiler, pass "compiler='c'" or "compiler='cxx'" in the arguments | |||
Besides build targets, complete builds can be given though a build function. All files will | |||
be written to a temporary directory:: | |||
def build(bld): | |||
lib_node = bld.srcnode.make_node('libdir/liblc1.c') | |||
lib_node.parent.mkdir() | |||
lib_node.write('#include <stdio.h>\\nint lib_func(void) { FILE *f = fopen("foo", "r");}\\n', 'w') | |||
bld(features='c cshlib', source=[lib_node], linkflags=conf.env.EXTRA_LDFLAGS, target='liblc') | |||
conf.check(build_fun=build, msg=msg) | |||
""" | |||
self.validate_c(kw) | |||
self.start_msg(kw['msg'], **kw) | |||
@@ -855,7 +868,10 @@ def write_config_header(self, configfile='', guard='', top=False, defines=True, | |||
cnf.define('A', 1) | |||
cnf.write_config_header('config.h') | |||
:param configfile: relative path to the file to create | |||
This function only adds include guards (if necessary), consult | |||
:py:func:`waflib.Tools.c_config.get_config_header` for details on the body. | |||
:param configfile: path to the file to create (relative or absolute) | |||
:type configfile: string | |||
:param guard: include guard name to add, by default it is computed from the file name | |||
:type guard: string | |||
@@ -884,7 +900,7 @@ def write_config_header(self, configfile='', guard='', top=False, defines=True, | |||
node.write('\n'.join(lst)) | |||
# config files are not removed on "waf clean" | |||
# config files must not be removed on "waf clean" | |||
self.env.append_unique(Build.CFG_FILES, [node.abspath()]) | |||
if remove: | |||
@@ -898,9 +914,16 @@ def get_config_header(self, defines=True, headers=False, define_prefix=''): | |||
Create the contents of a ``config.h`` file from the defines and includes | |||
set in conf.env.define_key / conf.env.include_key. No include guards are added. | |||
A prelude will be added from the variable env.WAF_CONFIG_H_PRELUDE if provided. This | |||
can be used to insert complex macros or include guards:: | |||
def configure(conf): | |||
conf.env.WAF_CONFIG_H_PRELUDE = '#include <unistd.h>\\n' | |||
conf.write_config_header('config.h') | |||
:param defines: write the defines values | |||
:type defines: bool | |||
:param headers: write the headers | |||
:param headers: write include entries for each element in self.env.INCKEYS | |||
:type headers: bool | |||
:type define_prefix: string | |||
:param define_prefix: prefix all the defines with a particular prefix | |||
@@ -908,6 +931,10 @@ def get_config_header(self, defines=True, headers=False, define_prefix=''): | |||
:rtype: string | |||
""" | |||
lst = [] | |||
if self.env.WAF_CONFIG_H_PRELUDE: | |||
lst.append(self.env.WAF_CONFIG_H_PRELUDE) | |||
if headers: | |||
for x in self.env[INCKEYS]: | |||
lst.append('#include <%s>' % x) | |||
@@ -931,24 +958,24 @@ def cc_add_flags(conf): | |||
""" | |||
Add CFLAGS / CPPFLAGS from os.environ to conf.env | |||
""" | |||
conf.add_os_flags('CPPFLAGS', 'CFLAGS') | |||
conf.add_os_flags('CFLAGS') | |||
conf.add_os_flags('CPPFLAGS', dup=False) | |||
conf.add_os_flags('CFLAGS', dup=False) | |||
@conf | |||
def cxx_add_flags(conf): | |||
""" | |||
Add CXXFLAGS / CPPFLAGS from os.environ to conf.env | |||
""" | |||
conf.add_os_flags('CPPFLAGS', 'CXXFLAGS') | |||
conf.add_os_flags('CXXFLAGS') | |||
conf.add_os_flags('CPPFLAGS', dup=False) | |||
conf.add_os_flags('CXXFLAGS', dup=False) | |||
@conf | |||
def link_add_flags(conf): | |||
""" | |||
Add LINKFLAGS / LDFLAGS from os.environ to conf.env | |||
""" | |||
conf.add_os_flags('LINKFLAGS') | |||
conf.add_os_flags('LDFLAGS') | |||
conf.add_os_flags('LINKFLAGS', dup=False) | |||
conf.add_os_flags('LDFLAGS', dup=False) | |||
@conf | |||
def cc_load_tools(conf): | |||
@@ -978,15 +1005,10 @@ def get_cc_version(conf, cc, gcc=False, icc=False, clang=False): | |||
cmd = cc + ['-dM', '-E', '-'] | |||
env = conf.env.env or None | |||
try: | |||
p = Utils.subprocess.Popen(cmd, stdin=Utils.subprocess.PIPE, stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE, env=env) | |||
p.stdin.write('\n'.encode()) | |||
out = p.communicate()[0] | |||
out, err = conf.cmd_and_log(cmd, output=0, input='\n'.encode(), env=env) | |||
except Exception: | |||
conf.fatal('Could not determine the compiler version %r' % cmd) | |||
if not isinstance(out, str): | |||
out = out.decode(sys.stdout.encoding or 'iso8859-1') | |||
if gcc: | |||
if out.find('__INTEL_COMPILER') >= 0: | |||
conf.fatal('The intel compiler pretends to be gcc') | |||
@@ -999,7 +1021,7 @@ def get_cc_version(conf, cc, gcc=False, icc=False, clang=False): | |||
if clang and out.find('__clang__') < 0: | |||
conf.fatal('Not clang/clang++') | |||
if not clang and out.find('__clang__') >= 0: | |||
conf.fatal('Could not find g++, if renamed try eg: CXX=g++48 waf configure') | |||
conf.fatal('Could not find gcc/g++ (only Clang), if renamed try eg: CC=gcc48 CXX=g++48 waf configure') | |||
k = {} | |||
if icc or gcc or clang: | |||
@@ -1014,9 +1036,6 @@ def get_cc_version(conf, cc, gcc=False, icc=False, clang=False): | |||
def isD(var): | |||
return var in k | |||
def isT(var): | |||
return var in k and k[var] != '0' | |||
# Some documentation is available at http://predef.sourceforge.net | |||
# The names given to DEST_OS must match what Utils.unversioned_sys_platform() returns. | |||
if not conf.env.DEST_OS: | |||
@@ -1053,17 +1072,11 @@ def get_cc_version(conf, cc, gcc=False, icc=False, clang=False): | |||
ver = k['__INTEL_COMPILER'] | |||
conf.env['CC_VERSION'] = (ver[:-2], ver[-2], ver[-1]) | |||
else: | |||
if isD('__clang__'): | |||
try: | |||
conf.env['CC_VERSION'] = (k['__clang_major__'], k['__clang_minor__'], k['__clang_patchlevel__']) | |||
except KeyError: | |||
# Some versions of OSX have a faux-gcc "clang" without clang version defines | |||
conf.env['CC_VERSION'] = (k['__GNUC__'], k['__GNUC_MINOR__'], k['__GNUC_PATCHLEVEL__']) | |||
if isD('__clang__') and isD('__clang_major__'): | |||
conf.env['CC_VERSION'] = (k['__clang_major__'], k['__clang_minor__'], k['__clang_patchlevel__']) | |||
else: | |||
try: | |||
conf.env['CC_VERSION'] = (k['__GNUC__'], k['__GNUC_MINOR__'], k['__GNUC_PATCHLEVEL__']) | |||
except KeyError: | |||
conf.env['CC_VERSION'] = (k['__GNUC__'], k['__GNUC_MINOR__'], 0) | |||
# older clang versions and gcc | |||
conf.env['CC_VERSION'] = (k['__GNUC__'], k['__GNUC_MINOR__'], k.get('__GNUC_PATCHLEVEL__', '0')) | |||
return k | |||
@conf | |||
@@ -1165,6 +1178,7 @@ def multicheck(self, *k, **kw): | |||
self.keep = False | |||
self.returned_tasks = [] | |||
self.task_sigs = {} | |||
self.progress_bar = 0 | |||
def total(self): | |||
return len(tasks) | |||
def to_log(self, *k, **kw): | |||
@@ -1195,10 +1209,17 @@ def multicheck(self, *k, **kw): | |||
for x in tasks: | |||
x.logger.memhandler.flush() | |||
if p.error: | |||
for x in p.error: | |||
if getattr(x, 'err_msg', None): | |||
self.to_log(x.err_msg) | |||
self.end_msg('fail', color='RED') | |||
raise Errors.WafError('There is an error in the library, read config.log for more information') | |||
for x in tasks: | |||
if x.hasrun != Task.SUCCESS: | |||
self.end_msg(kw.get('errmsg', 'no'), color='YELLOW', **kw) | |||
self.fatal(kw.get('fatalmsg', None) or 'One of the tests has failed, see the config.log for more information') | |||
self.fatal(kw.get('fatalmsg', None) or 'One of the tests has failed, read config.log for more information') | |||
self.end_msg('ok', **kw) | |||
@@ -24,7 +24,7 @@ app_info = ''' | |||
<key>NOTE</key> | |||
<string>THIS IS A GENERATED FILE, DO NOT MODIFY</string> | |||
<key>CFBundleExecutable</key> | |||
<string>%s</string> | |||
<string>{app_name}</string> | |||
</dict> | |||
</plist> | |||
''' | |||
@@ -71,7 +71,7 @@ def create_task_macapp(self): | |||
To compile an executable into a Mac application (a .app), set its *mac_app* attribute:: | |||
def build(bld): | |||
bld.shlib(source='a.c', target='foo', mac_app = True) | |||
bld.shlib(source='a.c', target='foo', mac_app=True) | |||
To force *all* executables to be transformed into Mac applications:: | |||
@@ -91,7 +91,22 @@ def create_task_macapp(self): | |||
inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/MacOS/' % name | |||
self.bld.install_files(inst_to, n1, chmod=Utils.O755) | |||
if getattr(self, 'mac_files', None): | |||
# this only accepts files; they will be installed as seen from mac_files_root | |||
mac_files_root = getattr(self, 'mac_files_root', None) | |||
if isinstance(mac_files_root, str): | |||
mac_files_root = self.path.find_node(mac_files_root) | |||
if not mac_files_root: | |||
self.bld.fatal('Invalid mac_files_root %r' % self.mac_files_root) | |||
res_dir = n1.parent.parent.make_node('Resources') | |||
inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Resources' % name | |||
for node in self.to_nodes(self.mac_files): | |||
relpath = node.path_from(mac_files_root or node.parent) | |||
tsk = self.create_task('macapp', node, res_dir.make_node(relpath)) | |||
self.bld.install_as(os.path.join(inst_to, relpath), node) | |||
if getattr(self, 'mac_resources', None): | |||
# TODO remove in waf 1.9 | |||
res_dir = n1.parent.parent.make_node('Resources') | |||
inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Resources' % name | |||
for x in self.to_list(self.mac_resources): | |||
@@ -127,6 +142,14 @@ def create_task_macplist(self): | |||
dir = self.create_bundle_dirs(name, out) | |||
n1 = dir.find_or_declare(['Contents', 'Info.plist']) | |||
self.plisttask = plisttask = self.create_task('macplist', [], n1) | |||
plisttask.context = { | |||
'app_name': self.link_task.outputs[0].name, | |||
'env': self.env | |||
} | |||
plist_ctx = getattr(self, 'plist_context', None) | |||
if (plist_ctx): | |||
plisttask.context.update(plist_ctx) | |||
if getattr(self, 'mac_plist', False): | |||
node = self.path.find_resource(self.mac_plist) | |||
@@ -135,7 +158,7 @@ def create_task_macplist(self): | |||
else: | |||
plisttask.code = self.mac_plist | |||
else: | |||
plisttask.code = app_info % self.link_task.outputs[0].name | |||
plisttask.code = app_info | |||
inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/' % name | |||
self.bld.install_files(inst_to, n1) | |||
@@ -184,5 +207,6 @@ class macplist(Task.Task): | |||
txt = self.code | |||
else: | |||
txt = self.inputs[0].read() | |||
context = getattr(self, 'context', {}) | |||
txt = txt.format(**context) | |||
self.outputs[0].write(txt) | |||
@@ -489,10 +489,19 @@ def apply_vnum(self): | |||
def build(bld): | |||
bld.shlib(source='a.c', target='foo', vnum='14.15.16') | |||
In this example, ``libfoo.so`` is installed as ``libfoo.so.1.2.3``, and the following symbolic links are created: | |||
In this example on Linux platform, ``libfoo.so`` is installed as ``libfoo.so.14.15.16``, and the following symbolic links are created: | |||
* ``libfoo.so → libfoo.so.1.2.3`` | |||
* ``libfoo.so.1 → libfoo.so.1.2.3`` | |||
* ``libfoo.so → libfoo.so.14.15.16`` | |||
* ``libfoo.so.14 → libfoo.so.14.15.16`` | |||
By default, the library will be assigned SONAME ``libfoo.so.14``, effectively declaring ABI compatibility between all minor and patch releases for the major version of the library. When necessary, the compatibility can be explicitly defined using `cnum` parameter: | |||
def build(bld): | |||
bld.shlib(source='a.c', target='foo', vnum='14.15.16', cnum='14.15') | |||
In this case, the assigned SONAME will be ``libfoo.so.14.15`` with ABI compatibility only between path releases for a specific major and minor version of the library. | |||
On OS X platform, install-name parameter will follow the above logic for SONAME with exception that it also specifies an absolute path (based on install_path) of the library. | |||
""" | |||
if not getattr(self, 'vnum', '') or os.name != 'posix' or self.env.DEST_BINFMT not in ('elf', 'mac-o'): | |||
return | |||
@@ -503,13 +512,18 @@ def apply_vnum(self): | |||
nums = self.vnum.split('.') | |||
node = link.outputs[0] | |||
cnum = getattr(self, 'cnum', str(nums[0])) | |||
cnums = cnum.split('.') | |||
if len(cnums)>len(nums) or nums[0:len(cnums)] != cnums: | |||
raise Errors.WafError('invalid compatibility version %s' % cnum) | |||
libname = node.name | |||
if libname.endswith('.dylib'): | |||
name3 = libname.replace('.dylib', '.%s.dylib' % self.vnum) | |||
name2 = libname.replace('.dylib', '.%s.dylib' % nums[0]) | |||
name2 = libname.replace('.dylib', '.%s.dylib' % cnum) | |||
else: | |||
name3 = libname + '.' + self.vnum | |||
name2 = libname + '.' + nums[0] | |||
name2 = libname + '.' + cnum | |||
# add the so name for the ld linker - to disable, just unset env.SONAME_ST | |||
if self.env.SONAME_ST: | |||
@@ -548,8 +562,10 @@ def apply_vnum(self): | |||
inst_to = self.link_task.__class__.inst_to | |||
if inst_to: | |||
p = Utils.subst_vars(inst_to, self.env) | |||
path = os.path.join(p, self.link_task.outputs[0].name) | |||
path = os.path.join(p, name2) | |||
self.env.append_value('LINKFLAGS', ['-install_name', path]) | |||
self.env.append_value('LINKFLAGS', '-Wl,-compatibility_version,%s' % cnum) | |||
self.env.append_value('LINKFLAGS', '-Wl,-current_version,%s' % self.vnum) | |||
class vnum(Task.Task): | |||
""" | |||
@@ -21,6 +21,7 @@ def find_clang(conf): | |||
def configure(conf): | |||
conf.find_clang() | |||
conf.find_program(['llvm-ar', 'ar'], var='AR') | |||
conf.find_ar() | |||
conf.gcc_common_flags() | |||
conf.gcc_modifier_platform() | |||
@@ -21,6 +21,7 @@ def find_clangxx(conf): | |||
def configure(conf): | |||
conf.find_clangxx() | |||
conf.find_program(['llvm-ar', 'ar'], var='AR') | |||
conf.find_ar() | |||
conf.gxx_common_flags() | |||
conf.gxx_modifier_platform() | |||
@@ -64,7 +64,7 @@ def check_same_targets(self): | |||
if not dupe: | |||
for (k, v) in uids.items(): | |||
if len(v) > 1: | |||
Logs.error('* Several tasks use the same identifier. Please check the information on\n http://docs.waf.googlecode.com/git/apidocs_16/Task.html#waflib.Task.Task.uid') | |||
Logs.error('* Several tasks use the same identifier. Please check the information on\n https://waf.io/apidocs/Task.html?highlight=uid#waflib.Task.Task.uid') | |||
for tsk in v: | |||
Logs.error(' - object %r (%r) defined in %r' % (tsk.__class__.__name__, tsk, tsk.generator)) | |||
@@ -97,7 +97,7 @@ def gcc_modifier_darwin(conf): | |||
"""Configuration flags for executing gcc on MacOS""" | |||
v = conf.env | |||
v['CFLAGS_cshlib'] = ['-fPIC'] | |||
v['LINKFLAGS_cshlib'] = ['-dynamiclib', '-Wl,-compatibility_version,1', '-Wl,-current_version,1'] | |||
v['LINKFLAGS_cshlib'] = ['-dynamiclib'] | |||
v['cshlib_PATTERN'] = 'lib%s.dylib' | |||
v['FRAMEWORKPATH_ST'] = '-F%s' | |||
v['FRAMEWORK_ST'] = ['-framework'] | |||
@@ -97,7 +97,7 @@ def gxx_modifier_darwin(conf): | |||
"""Configuration flags for executing g++ on MacOS""" | |||
v = conf.env | |||
v['CXXFLAGS_cxxshlib'] = ['-fPIC'] | |||
v['LINKFLAGS_cxxshlib'] = ['-dynamiclib', '-Wl,-compatibility_version,1', '-Wl,-current_version,1'] | |||
v['LINKFLAGS_cxxshlib'] = ['-dynamiclib'] | |||
v['cxxshlib_PATTERN'] = 'lib%s.dylib' | |||
v['FRAMEWORKPATH_ST'] = '-F%s' | |||
v['FRAMEWORK_ST'] = ['-framework'] | |||
@@ -30,6 +30,10 @@ or:: | |||
Platforms and targets will be tested in the order they appear; | |||
the first good configuration will be used. | |||
To skip testing all the configurations that are not used, use the ``--msvc_lazy_autodetect`` option | |||
or set ``conf.env['MSVC_LAZY_AUTODETECT']=True``. | |||
Supported platforms: ia64, x64, x86, x86_amd64, x86_ia64, x86_arm, amd64_x86, amd64_arm | |||
Compilers supported: | |||
@@ -90,8 +94,18 @@ all_icl_platforms = [ ('intel64', 'amd64'), ('em64t', 'amd64'), ('ia32', 'x86'), | |||
def options(opt): | |||
opt.add_option('--msvc_version', type='string', help = 'msvc version, eg: "msvc 10.0,msvc 9.0"', default='') | |||
opt.add_option('--msvc_targets', type='string', help = 'msvc targets, eg: "x64,arm"', default='') | |||
opt.add_option('--msvc_lazy_autodetect', action='store_true', help = 'lazily check msvc target environments') | |||
def setup_msvc(conf, versions, arch = False): | |||
""" | |||
Checks installed compilers and targets and returns the first combination from the user's | |||
options, env, or the global supported lists that checks. | |||
:param versions: A list of tuples of all installed compilers and available targets. | |||
:param arch: Whether to return the target architecture. | |||
:return: the compiler, revision, path, include dirs, library paths, and (optionally) target architecture | |||
:rtype: tuple of strings | |||
""" | |||
platforms = getattr(Options.options, 'msvc_targets', '').split(',') | |||
if platforms == ['']: | |||
platforms=Utils.to_list(conf.env['MSVC_TARGETS']) or [i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms] | |||
@@ -102,15 +116,20 @@ def setup_msvc(conf, versions, arch = False): | |||
for version in desired_versions: | |||
try: | |||
targets = dict(versiondict [version]) | |||
targets = dict(versiondict[version]) | |||
for target in platforms: | |||
try: | |||
arch,(p1,p2,p3) = targets[target] | |||
compiler,revision = version.rsplit(' ', 1) | |||
if arch: | |||
return compiler,revision,p1,p2,p3,arch | |||
try: | |||
realtarget,(p1,p2,p3) = targets[target] | |||
except conf.errors.ConfigurationError: | |||
# lazytup target evaluation errors | |||
del(targets[target]) | |||
else: | |||
return compiler,revision,p1,p2,p3 | |||
compiler,revision = version.rsplit(' ', 1) | |||
if arch: | |||
return compiler,revision,p1,p2,p3,realtarget | |||
else: | |||
return compiler,revision,p1,p2,p3 | |||
except KeyError: continue | |||
except KeyError: continue | |||
conf.fatal('msvc: Impossible to find a valid architecture for building (in setup_msvc)') | |||
@@ -118,13 +137,14 @@ def setup_msvc(conf, versions, arch = False): | |||
@conf | |||
def get_msvc_version(conf, compiler, version, target, vcvars): | |||
""" | |||
Create a bat file to obtain the location of the libraries | |||
:param compiler: ? | |||
:param version: ? | |||
:target: ? | |||
:vcvars: ? | |||
:return: the location of msvc, the location of include dirs, and the library paths | |||
Checks that an installed compiler actually runs and uses vcvars to obtain the | |||
environment needed by the compiler. | |||
:param compiler: compiler type, for looking up the executable name | |||
:param version: compiler version, for debugging only | |||
:param target: target architecture | |||
:param vcvars: batch file to run to check the environment | |||
:return: the location of the compiler executable, the location of include dirs, and the library paths | |||
:rtype: tuple of strings | |||
""" | |||
debug('msvc: get_msvc_version: %r %r %r', compiler, version, target) | |||
@@ -219,7 +239,7 @@ def gather_wsdk_versions(conf, versions): | |||
targets = [] | |||
for target,arch in all_msvc_platforms: | |||
try: | |||
targets.append((target, (arch, conf.get_msvc_version('wsdk', version, '/'+target, os.path.join(path, 'bin', 'SetEnv.cmd'))))) | |||
targets.append((target, (arch, get_compiler_env(conf, 'wsdk', version, '/'+target, os.path.join(path, 'bin', 'SetEnv.cmd'))))) | |||
except conf.errors.ConfigurationError: | |||
pass | |||
versions.append(('wsdk ' + version[1:], targets)) | |||
@@ -262,12 +282,12 @@ def gather_wince_supported_platforms(): | |||
path,device = os.path.split(path) | |||
if not device: | |||
path,device = os.path.split(path) | |||
platforms = [] | |||
for arch,compiler in all_wince_platforms: | |||
platforms = [] | |||
if os.path.isdir(os.path.join(path, device, 'Lib', arch)): | |||
platforms.append((arch, compiler, os.path.join(path, device, 'Include', arch), os.path.join(path, device, 'Lib', arch))) | |||
if platforms: | |||
supported_wince_platforms.append((device, platforms)) | |||
if platforms: | |||
supported_wince_platforms.append((device, platforms)) | |||
return supported_wince_platforms | |||
def gather_msvc_detected_versions(): | |||
@@ -304,6 +324,65 @@ def gather_msvc_detected_versions(): | |||
detected_versions.sort(key = fun) | |||
return detected_versions | |||
def get_compiler_env(conf, compiler, version, bat_target, bat, select=None): | |||
""" | |||
Gets the compiler environment variables as a tuple. Evaluation is eager by default. | |||
If set to lazy with ``--msvc_lazy_autodetect`` or ``env.MSVC_LAZY_AUTODETECT`` | |||
the environment is evaluated when the tuple is destructured or iterated. This means | |||
destructuring can throw :py:class:`conf.errors.ConfigurationError`. | |||
:param conf: configuration context to use to eventually get the version environment | |||
:param compiler: compiler name | |||
:param version: compiler version number | |||
:param bat: path to the batch file to run | |||
:param select: optional function to take the realized environment variables tup and map it (e.g. to combine other constant paths) | |||
""" | |||
lazy = getattr(Options.options, 'msvc_lazy_autodetect', False) or conf.env['MSVC_LAZY_AUTODETECT'] | |||
def msvc_thunk(): | |||
vs = conf.get_msvc_version(compiler, version, bat_target, bat) | |||
if select: | |||
return select(vs) | |||
else: | |||
return vs | |||
return lazytup(msvc_thunk, lazy, ([], [], [])) | |||
class lazytup(object): | |||
""" | |||
A tuple that evaluates its elements from a function when iterated or destructured. | |||
:param fn: thunk to evaluate the tuple on demand | |||
:param lazy: whether to delay evaluation or evaluate in the constructor | |||
:param default: optional default for :py:func:`repr` if it should not evaluate | |||
""" | |||
def __init__(self, fn, lazy=True, default=None): | |||
self.fn = fn | |||
self.default = default | |||
if not lazy: | |||
self.evaluate() | |||
def __len__(self): | |||
self.evaluate() | |||
return len(self.value) | |||
def __iter__(self): | |||
self.evaluate() | |||
for i, v in enumerate(self.value): | |||
yield v | |||
def __getitem__(self, i): | |||
self.evaluate() | |||
return self.value[i] | |||
def __repr__(self): | |||
if hasattr(self, 'value'): | |||
return repr(self.value) | |||
elif self.default: | |||
return repr(self.default) | |||
else: | |||
self.evaluate() | |||
return repr(self.value) | |||
def evaluate(self): | |||
if hasattr(self, 'value'): | |||
return | |||
self.value = self.fn() | |||
@conf | |||
def gather_msvc_targets(conf, versions, version, vc_path): | |||
#Looking for normal MSVC compilers! | |||
@@ -311,17 +390,17 @@ def gather_msvc_targets(conf, versions, version, vc_path): | |||
if os.path.isfile(os.path.join(vc_path, 'vcvarsall.bat')): | |||
for target,realtarget in all_msvc_platforms[::-1]: | |||
try: | |||
targets.append((target, (realtarget, conf.get_msvc_version('msvc', version, target, os.path.join(vc_path, 'vcvarsall.bat'))))) | |||
targets.append((target, (realtarget, get_compiler_env(conf, 'msvc', version, target, os.path.join(vc_path, 'vcvarsall.bat'))))) | |||
except conf.errors.ConfigurationError: | |||
pass | |||
elif os.path.isfile(os.path.join(vc_path, 'Common7', 'Tools', 'vsvars32.bat')): | |||
try: | |||
targets.append(('x86', ('x86', conf.get_msvc_version('msvc', version, 'x86', os.path.join(vc_path, 'Common7', 'Tools', 'vsvars32.bat'))))) | |||
targets.append(('x86', ('x86', get_compiler_env(conf, 'msvc', version, 'x86', os.path.join(vc_path, 'Common7', 'Tools', 'vsvars32.bat'))))) | |||
except conf.errors.ConfigurationError: | |||
pass | |||
elif os.path.isfile(os.path.join(vc_path, 'Bin', 'vcvars32.bat')): | |||
try: | |||
targets.append(('x86', ('x86', conf.get_msvc_version('msvc', version, '', os.path.join(vc_path, 'Bin', 'vcvars32.bat'))))) | |||
targets.append(('x86', ('x86', get_compiler_env(conf, 'msvc', version, '', os.path.join(vc_path, 'Bin', 'vcvars32.bat'))))) | |||
except conf.errors.ConfigurationError: | |||
pass | |||
if targets: | |||
@@ -336,15 +415,18 @@ def gather_wince_targets(conf, versions, version, vc_path, vsvars, supported_pla | |||
winCEpath = os.path.join(vc_path, 'ce') | |||
if not os.path.isdir(winCEpath): | |||
continue | |||
try: | |||
common_bindirs,_1,_2 = conf.get_msvc_version('msvc', version, 'x86', vsvars) | |||
except conf.errors.ConfigurationError: | |||
continue | |||
if os.path.isdir(os.path.join(winCEpath, 'lib', platform)): | |||
bindirs = [os.path.join(winCEpath, 'bin', compiler), os.path.join(winCEpath, 'bin', 'x86_'+compiler)] + common_bindirs | |||
bindirs = [os.path.join(winCEpath, 'bin', compiler), os.path.join(winCEpath, 'bin', 'x86_'+compiler)] | |||
incdirs = [os.path.join(winCEpath, 'include'), os.path.join(winCEpath, 'atlmfc', 'include'), include] | |||
libdirs = [os.path.join(winCEpath, 'lib', platform), os.path.join(winCEpath, 'atlmfc', 'lib', platform), lib] | |||
cetargets.append((platform, (platform, (bindirs,incdirs,libdirs)))) | |||
def combine_common(compiler_env): | |||
(common_bindirs,_1,_2) = compiler_env | |||
return (bindirs + common_bindirs, incdirs, libdirs) | |||
try: | |||
cetargets.append((platform, (platform, get_compiler_env(conf, 'msvc', version, 'x86', vsvars, combine_common)))) | |||
except conf.errors.ConfigurationError: | |||
continue | |||
if cetargets: | |||
versions.append((device + ' ' + version, cetargets)) | |||
@@ -354,8 +436,8 @@ def gather_winphone_targets(conf, versions, version, vc_path, vsvars): | |||
targets = [] | |||
for target,realtarget in all_msvc_platforms[::-1]: | |||
try: | |||
targets.append((target, (realtarget, conf.get_msvc_version('winphone', version, target, vsvars)))) | |||
except conf.errors.ConfigurationError as e: | |||
targets.append((target, (realtarget, get_compiler_env(conf, 'winphone', version, target, vsvars)))) | |||
except conf.errors.ConfigurationError: | |||
pass | |||
if targets: | |||
versions.append(('winphone '+ version, targets)) | |||
@@ -382,9 +464,14 @@ def gather_msvc_versions(conf, versions): | |||
if wince_supported_platforms and os.path.isfile(vsvars): | |||
conf.gather_wince_targets(versions, version, vc_path, vsvars, wince_supported_platforms) | |||
# WP80 works with 11.0Exp and 11.0, both of which resolve to the same vc_path. | |||
# Stop after one is found. | |||
for version,vc_path in vc_paths: | |||
vs_path = os.path.dirname(vc_path) | |||
vsvars = os.path.join(vs_path, 'VC', 'WPSDK', 'WP80', 'vcvarsphoneall.bat') | |||
if os.path.isfile(vsvars): | |||
conf.gather_winphone_targets(versions, '8.0', vc_path, vsvars) | |||
break | |||
for version,vc_path in vc_paths: | |||
vs_path = os.path.dirname(vc_path) | |||
@@ -426,7 +513,7 @@ def gather_icl_versions(conf, versions): | |||
batch_file=os.path.join(path,'bin','iclvars.bat') | |||
if os.path.isfile(batch_file): | |||
try: | |||
targets.append((target,(arch,conf.get_msvc_version('intel',version,target,batch_file)))) | |||
targets.append((target,(arch,get_compiler_env(conf,'intel',version,target,batch_file)))) | |||
except conf.errors.ConfigurationError: | |||
pass | |||
except WindowsError: | |||
@@ -438,7 +525,7 @@ def gather_icl_versions(conf, versions): | |||
batch_file=os.path.join(path,'bin','iclvars.bat') | |||
if os.path.isfile(batch_file): | |||
try: | |||
targets.append((target, (arch, conf.get_msvc_version('intel', version, target, batch_file)))) | |||
targets.append((target, (arch, get_compiler_env(conf, 'intel', version, target, batch_file)))) | |||
except conf.errors.ConfigurationError: | |||
pass | |||
except WindowsError: | |||
@@ -490,8 +577,8 @@ def gather_intel_composer_versions(conf, versions): | |||
batch_file=os.path.join(path,'bin','iclvars.bat') | |||
if os.path.isfile(batch_file): | |||
try: | |||
targets.append((target,(arch,conf.get_msvc_version('intel',version,target,batch_file)))) | |||
except conf.errors.ConfigurationError as e: | |||
targets.append((target,(arch,get_compiler_env(conf,'intel',version,target,batch_file)))) | |||
except conf.errors.ConfigurationError: | |||
pass | |||
# The intel compilervar_arch.bat is broken when used with Visual Studio Express 2012 | |||
# http://software.intel.com/en-us/forums/topic/328487 | |||
@@ -516,19 +603,36 @@ def gather_intel_composer_versions(conf, versions): | |||
versions.append(('intel ' + major, targets)) | |||
@conf | |||
def get_msvc_versions(conf): | |||
def get_msvc_versions(conf, eval_and_save=True): | |||
""" | |||
:return: list of compilers installed | |||
:rtype: list of string | |||
""" | |||
if not conf.env['MSVC_INSTALLED_VERSIONS']: | |||
lst = [] | |||
conf.gather_icl_versions(lst) | |||
conf.gather_intel_composer_versions(lst) | |||
conf.gather_wsdk_versions(lst) | |||
conf.gather_msvc_versions(lst) | |||
if conf.env['MSVC_INSTALLED_VERSIONS']: | |||
return conf.env['MSVC_INSTALLED_VERSIONS'] | |||
# Gather all the compiler versions and targets. This phase can be lazy | |||
# per lazy detection settings. | |||
lst = [] | |||
conf.gather_icl_versions(lst) | |||
conf.gather_intel_composer_versions(lst) | |||
conf.gather_wsdk_versions(lst) | |||
conf.gather_msvc_versions(lst) | |||
# Override lazy detection by evaluating after the fact. | |||
if eval_and_save: | |||
def checked_target(t): | |||
target,(arch,paths) = t | |||
try: | |||
paths.evaluate() | |||
except conf.errors.ConfigurationError: | |||
return None | |||
else: | |||
return t | |||
lst = [(version, list(filter(checked_target, targets))) for version, targets in lst] | |||
conf.env['MSVC_INSTALLED_VERSIONS'] = lst | |||
return conf.env['MSVC_INSTALLED_VERSIONS'] | |||
return lst | |||
@conf | |||
def print_all_msvc_detected(conf): | |||
@@ -542,7 +646,9 @@ def print_all_msvc_detected(conf): | |||
@conf | |||
def detect_msvc(conf, arch = False): | |||
versions = get_msvc_versions(conf) | |||
# Save installed versions only if lazy detection is disabled. | |||
lazy_detect = getattr(Options.options, 'msvc_lazy_autodetect', False) or conf.env['MSVC_LAZY_AUTODETECT'] | |||
versions = get_msvc_versions(conf, not lazy_detect) | |||
return setup_msvc(conf, versions, arch) | |||
@conf | |||
@@ -3,7 +3,6 @@ | |||
# Thomas Nagy, 2006-2010 (ita) | |||
# Ralf Habacker, 2006 (rh) | |||
from waflib import Utils | |||
from waflib.Tools import ccroot, ar | |||
from waflib.Configure import conf | |||
@@ -53,7 +52,7 @@ def scc_common_flags(conf): | |||
v['cprogram_PATTERN'] = '%s' | |||
# shared library | |||
v['CFLAGS_cshlib'] = ['-Kpic', '-DPIC'] | |||
v['CFLAGS_cshlib'] = ['-xcode=pic32', '-DPIC'] | |||
v['LINKFLAGS_cshlib'] = ['-G'] | |||
v['cshlib_PATTERN'] = 'lib%s.so' | |||
@@ -68,4 +67,3 @@ def configure(conf): | |||
conf.cc_load_tools() | |||
conf.cc_add_flags() | |||
conf.link_add_flags() | |||
@@ -3,7 +3,6 @@ | |||
# Thomas Nagy, 2006-2010 (ita) | |||
# Ralf Habacker, 2006 (rh) | |||
from waflib import Utils | |||
from waflib.Tools import ccroot, ar | |||
from waflib.Configure import conf | |||
@@ -51,7 +50,7 @@ def sxx_common_flags(conf): | |||
v['cxxprogram_PATTERN'] = '%s' | |||
# shared library | |||
v['CXXFLAGS_cxxshlib'] = ['-Kpic', '-DPIC'] | |||
v['CXXFLAGS_cxxshlib'] = ['-xcode=pic32', '-DPIC'] | |||
v['LINKFLAGS_cxxshlib'] = ['-G'] | |||
v['cxxshlib_PATTERN'] = 'lib%s.so' | |||
@@ -284,7 +284,7 @@ else: | |||
wlock.release() | |||
def writeconsole(self, txt): | |||
chars_written = c_int() | |||
chars_written = c_ulong() | |||
writeconsole = windll.kernel32.WriteConsoleA | |||
if isinstance(txt, _type): | |||
writeconsole = windll.kernel32.WriteConsoleW | |||
@@ -1,98 +0,0 @@ | |||
#! /usr/bin/env python | |||
# encoding: utf-8 | |||
# Thomas Nagy 2011 | |||
import os, shutil, re | |||
from waflib import Options, Build, Logs | |||
""" | |||
Apply a least recently used policy to the Waf cache. | |||
For performance reasons, it is called after the build is complete. | |||
We assume that the the folders are written atomically | |||
Do export WAFCACHE=/tmp/foo_xyz where xyz represents the cache size in bytes | |||
If missing, the default cache size will be set to 10GB | |||
""" | |||
re_num = re.compile('[a-zA-Z_-]+(\d+)') | |||
CACHESIZE = 10*1024*1024*1024 # in bytes | |||
CLEANRATIO = 0.8 | |||
DIRSIZE = 4096 | |||
def compile(self): | |||
if Options.cache_global and not Options.options.nocache: | |||
try: | |||
os.makedirs(Options.cache_global) | |||
except: | |||
pass | |||
try: | |||
self.raw_compile() | |||
finally: | |||
if Options.cache_global and not Options.options.nocache: | |||
self.sweep() | |||
def sweep(self): | |||
global CACHESIZE | |||
CACHEDIR = Options.cache_global | |||
# get the cache max size from the WAFCACHE filename | |||
re_num = re.compile('[a-zA-Z_]+(\d+)') | |||
val = re_num.sub('\\1', os.path.basename(Options.cache_global)) | |||
try: | |||
CACHESIZE = int(val) | |||
except: | |||
pass | |||
# map folder names to timestamps | |||
flist = {} | |||
for x in os.listdir(CACHEDIR): | |||
j = os.path.join(CACHEDIR, x) | |||
if os.path.isdir(j) and len(x) == 64: # dir names are md5 hexdigests | |||
flist[x] = [os.stat(j).st_mtime, 0] | |||
for (x, v) in flist.items(): | |||
cnt = DIRSIZE # each entry takes 4kB | |||
d = os.path.join(CACHEDIR, x) | |||
for k in os.listdir(d): | |||
cnt += os.stat(os.path.join(d, k)).st_size | |||
flist[x][1] = cnt | |||
total = sum([x[1] for x in flist.values()]) | |||
Logs.debug('lru: Cache size is %r' % total) | |||
if total >= CACHESIZE: | |||
Logs.debug('lru: Trimming the cache since %r > %r' % (total, CACHESIZE)) | |||
# make a list to sort the folders by timestamp | |||
lst = [(p, v[0], v[1]) for (p, v) in flist.items()] | |||
lst.sort(key=lambda x: x[1]) # sort by timestamp | |||
lst.reverse() | |||
while total >= CACHESIZE * CLEANRATIO: | |||
(k, t, s) = lst.pop() | |||
p = os.path.join(CACHEDIR, k) | |||
v = p + '.del' | |||
try: | |||
os.rename(p, v) | |||
except: | |||
# someone already did it | |||
pass | |||
else: | |||
try: | |||
shutil.rmtree(v) | |||
except: | |||
# this should not happen, but who knows? | |||
Logs.warn('If you ever see this message, report it (%r)' % v) | |||
total -= s | |||
del flist[k] | |||
Logs.debug('lru: Total at the end %r' % total) | |||
Build.BuildContext.raw_compile = Build.BuildContext.compile | |||
Build.BuildContext.compile = compile | |||
Build.BuildContext.sweep = sweep | |||