Browse Source

Update to waf 2.0.11

- Migrate pkg-config checks from atleast_version.
- Check ppoll with a fragment, since the function_name argument has been
  removed.
tags/v1.9.13
Karl Linden 6 years ago
parent
commit
630c6145b8
No known key found for this signature in database GPG Key ID: C0F669D8CE2576AB
53 changed files with 5487 additions and 4479 deletions
  1. +12
    -16
      .wafupdaterc
  2. +1
    -1
      dbus/wscript
  3. +4
    -4
      waf
  4. +635
    -492
      waflib/Build.py
  5. +46
    -34
      waflib/ConfigSet.py
  6. +78
    -105
      waflib/Configure.py
  7. +169
    -139
      waflib/Context.py
  8. +9
    -11
      waflib/Errors.py
  9. +66
    -32
      waflib/Logs.py
  10. +376
    -254
      waflib/Node.py
  11. +116
    -47
      waflib/Options.py
  12. +407
    -169
      waflib/Runner.py
  13. +190
    -212
      waflib/Scripting.py
  14. +653
    -488
      waflib/Task.py
  15. +205
    -148
      waflib/TaskGen.py
  16. +1
    -1
      waflib/Tools/__init__.py
  17. +2
    -2
      waflib/Tools/ar.py
  18. +7
    -7
      waflib/Tools/c.py
  19. +21
    -12
      waflib/Tools/c_aliases.py
  20. +366
    -281
      waflib/Tools/c_config.py
  21. +18
    -36
      waflib/Tools/c_osx.py
  22. +260
    -210
      waflib/Tools/c_preproc.py
  23. +13
    -12
      waflib/Tools/c_tests.py
  24. +97
    -43
      waflib/Tools/ccroot.py
  25. +1
    -1
      waflib/Tools/clang.py
  26. +2
    -2
      waflib/Tools/clangxx.py
  27. +15
    -9
      waflib/Tools/compiler_c.py
  28. +14
    -8
      waflib/Tools/compiler_cxx.py
  29. +7
    -7
      waflib/Tools/cxx.py
  30. +51
    -37
      waflib/Tools/errcheck.py
  31. +55
    -58
      waflib/Tools/gcc.py
  32. +56
    -58
      waflib/Tools/gxx.py
  33. +3
    -6
      waflib/Tools/icc.py
  34. +3
    -6
      waflib/Tools/icpc.py
  35. +30
    -24
      waflib/Tools/irixcc.py
  36. +357
    -512
      waflib/Tools/msvc.py
  37. +27
    -29
      waflib/Tools/suncc.py
  38. +26
    -27
      waflib/Tools/suncxx.py
  39. +152
    -63
      waflib/Tools/waf_unit_test.py
  40. +26
    -28
      waflib/Tools/xlc.py
  41. +26
    -28
      waflib/Tools/xlcxx.py
  42. +448
    -204
      waflib/Utils.py
  43. +1
    -1
      waflib/__init__.py
  44. +3
    -3
      waflib/ansiterm.py
  45. +18
    -14
      waflib/extras/batched_cc.py
  46. +7
    -11
      waflib/extras/build_file_tracker.py
  47. +3
    -4
      waflib/extras/build_logs.py
  48. +36
    -33
      waflib/extras/c_nec.py
  49. +0
    -312
      waflib/extras/xcode.py
  50. +268
    -197
      waflib/extras/xcode6.py
  51. +13
    -21
      waflib/fixpy2.py
  52. +64
    -0
      waflib/processor.py
  53. +23
    -20
      wscript

+ 12
- 16
.wafupdaterc View File

@@ -25,13 +25,13 @@ WAFLIB_STRIP_TOOLS="
ifort ifort
intltool intltool
javaw javaw
kde4
ldc2 ldc2
lua lua
md5_tstamp
nasm nasm
nobuild
perl perl
python python
qt4
qt5 qt5
ruby ruby
tex tex
@@ -40,17 +40,16 @@ WAFLIB_STRIP_TOOLS="
" "


WAFLIB_STRIP_EXTRAS=" WAFLIB_STRIP_EXTRAS="
add_objects
biber biber
bjam bjam
blender blender
boo boo
boost boost
buildcopy
c_dumbpreproc c_dumbpreproc
c_emscripten c_emscripten
cabal cabal
cfg_altoptions cfg_altoptions
cfg_cross_gnu
clang_compilation_database clang_compilation_database
codelite codelite
compat15 compat15
@@ -58,6 +57,7 @@ WAFLIB_STRIP_EXTRAS="
color_rvct color_rvct
cppcheck cppcheck
cpplint cpplint
cross_gnu
cython cython
dcc dcc
distnet distnet
@@ -65,6 +65,7 @@ WAFLIB_STRIP_EXTRAS="
dpapi dpapi
eclipse eclipse
erlang erlang
fast_partial
fc_bgxlf fc_bgxlf
fc_cray fc_cray
fc_nag fc_nag
@@ -79,19 +80,17 @@ WAFLIB_STRIP_EXTRAS="
fsb fsb
fsc fsc
gccdeps gccdeps
go
gdbus
gob2 gob2
halide halide
javatest
kde4
local_rpath local_rpath
make make
md5_tstamp
mem_reducer
midl midl
misc
msvcdeps msvcdeps
msvs msvs
netcache_client netcache_client
nobuild
objcopy objcopy
ocaml ocaml
package package
@@ -100,13 +99,12 @@ WAFLIB_STRIP_EXTRAS="
pep8 pep8
pgicc pgicc
pgicxx pgicxx
prefork
preforkjava
preforkunix
print_commands
proc proc
protoc protoc
pyqt5
pytest
qnxnto qnxnto
qt4
relocation relocation
remote remote
resx resx
@@ -120,18 +118,16 @@ WAFLIB_STRIP_EXTRAS="
satellite_assembly satellite_assembly
scala scala
slow_qt4 slow_qt4
smart_continue
softlink_libs softlink_libs
stale stale
stracedeps stracedeps
swig swig
syms syms
sync_exec
ticgt ticgt
unc
unity unity
use_config use_config
valadoc valadoc
waf_xattr
why why
win32_opts win32_opts
wix wix


+ 1
- 1
dbus/wscript View File

@@ -12,7 +12,7 @@ def options(opt):
def configure(conf): def configure(conf):
conf.env['BUILD_JACKDBUS'] = False conf.env['BUILD_JACKDBUS'] = False


if not conf.check_cfg(package='dbus-1', atleast_version='1.0.0', args='--cflags --libs', mandatory=False):
if not conf.check_cfg(package='dbus-1 >= 1.0.0', args='--cflags --libs', mandatory=False):
print(Logs.colors.RED + 'ERROR !! jackdbus will not be built because libdbus-dev is missing' + Logs.colors.NORMAL) print(Logs.colors.RED + 'ERROR !! jackdbus will not be built because libdbus-dev is missing' + Logs.colors.NORMAL)
return return




+ 4
- 4
waf View File

@@ -1,7 +1,7 @@
#!/usr/bin/env python #!/usr/bin/env python
# encoding: ISO8859-1
# Thomas Nagy, 2005-2016
# encoding: latin-1
# Thomas Nagy, 2005-2018
#
""" """
Redistribution and use in source and binary forms, with or without Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions modification, are permitted provided that the following conditions
@@ -32,7 +32,7 @@ POSSIBILITY OF SUCH DAMAGE.


import os, sys, inspect import os, sys, inspect


VERSION="1.8.22"
VERSION="2.0.11"
REVISION="x" REVISION="x"
GIT="x" GIT="x"
INSTALL="x" INSTALL="x"


+ 635
- 492
waflib/Build.py
File diff suppressed because it is too large
View File


+ 46
- 34
waflib/ConfigSet.py View File

@@ -1,12 +1,12 @@
#!/usr/bin/env python #!/usr/bin/env python
# encoding: utf-8 # encoding: utf-8
# Thomas Nagy, 2005-2010 (ita)
# Thomas Nagy, 2005-2018 (ita)


""" """


ConfigSet: a special dict ConfigSet: a special dict


The values put in :py:class:`ConfigSet` must be lists
The values put in :py:class:`ConfigSet` must be serializable (dicts, lists, strings)
""" """


import copy, re, os import copy, re, os
@@ -15,7 +15,7 @@ re_imp = re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$', re.M)


class ConfigSet(object): class ConfigSet(object):
""" """
A dict that honor serialization and parent relationships. The serialization format
A copy-on-write dict with human-readable serialized format. The serialization format
is human-readable (python-like) and performed by using eval() and repr(). is human-readable (python-like) and performed by using eval() and repr().
For high performance prefer pickle. Do not store functions as they are not serializable. For high performance prefer pickle. Do not store functions as they are not serializable.


@@ -39,17 +39,20 @@ class ConfigSet(object):


def __contains__(self, key): def __contains__(self, key):
""" """
Enable the *in* syntax::
Enables the *in* syntax::


if 'foo' in env: if 'foo' in env:
print(env['foo']) print(env['foo'])
""" """
if key in self.table: return True
try: return self.parent.__contains__(key)
except AttributeError: return False # parent may not exist
if key in self.table:
return True
try:
return self.parent.__contains__(key)
except AttributeError:
return False # parent may not exist


def keys(self): def keys(self):
"""Dict interface (unknown purpose)"""
"""Dict interface"""
keys = set() keys = set()
cur = self cur = self
while cur: while cur:
@@ -59,6 +62,9 @@ class ConfigSet(object):
keys.sort() keys.sort()
return keys return keys


def __iter__(self):
return iter(self.keys())

def __str__(self): def __str__(self):
"""Text representation of the ConfigSet (for debugging purposes)""" """Text representation of the ConfigSet (for debugging purposes)"""
return "\n".join(["%r %r" % (x, self.__getitem__(x)) for x in self.keys()]) return "\n".join(["%r %r" % (x, self.__getitem__(x)) for x in self.keys()])
@@ -73,7 +79,7 @@ class ConfigSet(object):
""" """
try: try:
while 1: while 1:
x = self.table.get(key, None)
x = self.table.get(key)
if not x is None: if not x is None:
return x return x
self = self.parent self = self.parent
@@ -82,13 +88,13 @@ class ConfigSet(object):


def __setitem__(self, key, value): def __setitem__(self, key, value):
""" """
Dictionary interface: get value from key
Dictionary interface: set value from key
""" """
self.table[key] = value self.table[key] = value


def __delitem__(self, key): def __delitem__(self, key):
""" """
Dictionary interface: get value from key
Dictionary interface: mark the value as missing
""" """
self[key] = [] self[key] = []


@@ -101,7 +107,7 @@ class ConfigSet(object):
conf.env['value'] conf.env['value']
""" """
if name in self.__slots__: if name in self.__slots__:
return object.__getattr__(self, name)
return object.__getattribute__(self, name)
else: else:
return self[name] return self[name]


@@ -152,7 +158,7 @@ class ConfigSet(object):


def detach(self): def detach(self):
""" """
Detach self from its parent (if existing)
Detaches this instance from its parent (if present)


Modifying the parent :py:class:`ConfigSet` will not change the current object Modifying the parent :py:class:`ConfigSet` will not change the current object
Modifying this :py:class:`ConfigSet` will not modify the parent one. Modifying this :py:class:`ConfigSet` will not modify the parent one.
@@ -171,18 +177,19 @@ class ConfigSet(object):


def get_flat(self, key): def get_flat(self, key):
""" """
Return a value as a string. If the input is a list, the value returned is space-separated.
Returns a value as a string. If the input is a list, the value returned is space-separated.


:param key: key to use :param key: key to use
:type key: string :type key: string
""" """
s = self[key] s = self[key]
if isinstance(s, str): return s
if isinstance(s, str):
return s
return ' '.join(s) return ' '.join(s)


def _get_list_value_for_modification(self, key): def _get_list_value_for_modification(self, key):
""" """
Return a list value for further modification.
Returns a list value for further modification.


The list may be modified inplace and there is no need to do this afterwards:: The list may be modified inplace and there is no need to do this afterwards::


@@ -191,16 +198,20 @@ class ConfigSet(object):
try: try:
value = self.table[key] value = self.table[key]
except KeyError: except KeyError:
try: value = self.parent[key]
except AttributeError: value = []
if isinstance(value, list):
value = value[:]
try:
value = self.parent[key]
except AttributeError:
value = []
else: else:
value = [value]
if isinstance(value, list):
# force a copy
value = value[:]
else:
value = [value]
self.table[key] = value
else: else:
if not isinstance(value, list): if not isinstance(value, list):
value = [value]
self.table[key] = value
self.table[key] = value = [value]
return value return value


def append_value(self, var, val): def append_value(self, var, val):
@@ -232,7 +243,7 @@ class ConfigSet(object):


def append_unique(self, var, val): def append_unique(self, var, val):
""" """
Append a value to the specified item only if it's not already present::
Appends a value to the specified item only if it's not already present::


def build(bld): def build(bld):
bld.env.append_unique('CFLAGS', ['-O2', '-g']) bld.env.append_unique('CFLAGS', ['-O2', '-g'])
@@ -249,7 +260,7 @@ class ConfigSet(object):


def get_merged_dict(self): def get_merged_dict(self):
""" """
Compute the merged dictionary from the fusion of self and all its parent
Computes the merged dictionary from the fusion of self and all its parent


:rtype: a ConfigSet object :rtype: a ConfigSet object
""" """
@@ -257,8 +268,10 @@ class ConfigSet(object):
env = self env = self
while 1: while 1:
table_list.insert(0, env.table) table_list.insert(0, env.table)
try: env = env.parent
except AttributeError: break
try:
env = env.parent
except AttributeError:
break
merged_table = {} merged_table = {}
for table in table_list: for table in table_list:
merged_table.update(table) merged_table.update(table)
@@ -266,7 +279,7 @@ class ConfigSet(object):


def store(self, filename): def store(self, filename):
""" """
Write the :py:class:`ConfigSet` data into a file. See :py:meth:`ConfigSet.load` for reading such files.
Serializes the :py:class:`ConfigSet` data to a file. See :py:meth:`ConfigSet.load` for reading such files.


:param filename: file to use :param filename: file to use
:type filename: string :type filename: string
@@ -293,7 +306,7 @@ class ConfigSet(object):


def load(self, filename): def load(self, filename):
""" """
Retrieve the :py:class:`ConfigSet` data from a file. See :py:meth:`ConfigSet.store` for writing such files
Restores contents from a file (current values are not cleared). Files are written using :py:meth:`ConfigSet.store`.


:param filename: file to use :param filename: file to use
:type filename: string :type filename: string
@@ -303,21 +316,20 @@ class ConfigSet(object):
for m in re_imp.finditer(code): for m in re_imp.finditer(code):
g = m.group g = m.group
tbl[g(2)] = eval(g(3)) tbl[g(2)] = eval(g(3))
Logs.debug('env: %s' % str(self.table))
Logs.debug('env: %s', self.table)


def update(self, d): def update(self, d):
""" """
Dictionary interface: replace values from another dict
Dictionary interface: replace values with the ones from another dict


:param d: object to use the value from :param d: object to use the value from
:type d: dict-like object :type d: dict-like object
""" """
for k, v in d.items():
self[k] = v
self.table.update(d)


def stash(self): def stash(self):
""" """
Store the object state, to provide a kind of transaction support::
Stores the object state to provide transactionality semantics::


env = ConfigSet() env = ConfigSet()
env.stash() env.stash()


+ 78
- 105
waflib/Configure.py View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
# encoding: utf-8 # encoding: utf-8
# Thomas Nagy, 2005-2010 (ita)
# Thomas Nagy, 2005-2018 (ita)


""" """
Configuration system Configuration system
@@ -12,15 +12,9 @@ A :py:class:`waflib.Configure.ConfigurationContext` instance is created when ``w
* hold configuration routines such as ``find_program``, etc * hold configuration routines such as ``find_program``, etc
""" """


import os, shlex, sys, time, re, shutil
import os, re, shlex, shutil, sys, time, traceback
from waflib import ConfigSet, Utils, Options, Logs, Context, Build, Errors from waflib import ConfigSet, Utils, Options, Logs, Context, Build, Errors


BREAK = 'break'
"""In case of a configuration error, break"""

CONTINUE = 'continue'
"""In case of a configuration error, continue"""

WAF_CONFIG_LOG = 'config.log' WAF_CONFIG_LOG = 'config.log'
"""Name of the configuration log file""" """Name of the configuration log file"""


@@ -157,7 +151,7 @@ class ConfigurationContext(Context.Context):
self.msg('Setting out to', self.bldnode.abspath()) self.msg('Setting out to', self.bldnode.abspath())


if id(self.srcnode) == id(self.bldnode): if id(self.srcnode) == id(self.bldnode):
Logs.warn('Setting top == out (remember to use "update_outputs")')
Logs.warn('Setting top == out')
elif id(self.path) != id(self.srcnode): elif id(self.path) != id(self.srcnode):
if self.srcnode.is_child_of(self.path): if self.srcnode.is_child_of(self.path):
Logs.warn('Are you certain that you do not want to set top="." ?') Logs.warn('Are you certain that you do not want to set top="." ?')
@@ -173,8 +167,9 @@ class ConfigurationContext(Context.Context):
# consider the current path as the root directory (see prepare_impl). # consider the current path as the root directory (see prepare_impl).
# to remove: use 'waf distclean' # to remove: use 'waf distclean'
env = ConfigSet.ConfigSet() env = ConfigSet.ConfigSet()
env['argv'] = sys.argv
env['options'] = Options.options.__dict__
env.argv = sys.argv
env.options = Options.options.__dict__
env.config_cmd = self.cmd


env.run_dir = Context.run_dir env.run_dir = Context.run_dir
env.top_dir = Context.top_dir env.top_dir = Context.top_dir
@@ -182,15 +177,15 @@ class ConfigurationContext(Context.Context):


# conf.hash & conf.files hold wscript files paths and hash # conf.hash & conf.files hold wscript files paths and hash
# (used only by Configure.autoconfig) # (used only by Configure.autoconfig)
env['hash'] = self.hash
env['files'] = self.files
env['environ'] = dict(self.environ)
env.hash = self.hash
env.files = self.files
env.environ = dict(self.environ)


if not self.env.NO_LOCK_IN_RUN and not getattr(Options.options, 'no_lock_in_run'):
if not (self.env.NO_LOCK_IN_RUN or env.environ.get('NO_LOCK_IN_RUN') or getattr(Options.options, 'no_lock_in_run')):
env.store(os.path.join(Context.run_dir, Options.lockfile)) env.store(os.path.join(Context.run_dir, Options.lockfile))
if not self.env.NO_LOCK_IN_TOP and not getattr(Options.options, 'no_lock_in_top'):
if not (self.env.NO_LOCK_IN_TOP or env.environ.get('NO_LOCK_IN_TOP') or getattr(Options.options, 'no_lock_in_top')):
env.store(os.path.join(Context.top_dir, Options.lockfile)) env.store(os.path.join(Context.top_dir, Options.lockfile))
if not self.env.NO_LOCK_IN_OUT and not getattr(Options.options, 'no_lock_in_out'):
if not (self.env.NO_LOCK_IN_OUT or env.environ.get('NO_LOCK_IN_OUT') or getattr(Options.options, 'no_lock_in_out')):
env.store(os.path.join(Context.out_dir, Options.lockfile)) env.store(os.path.join(Context.out_dir, Options.lockfile))


def prepare_env(self, env): def prepare_env(self, env):
@@ -202,17 +197,17 @@ class ConfigurationContext(Context.Context):
""" """
if not env.PREFIX: if not env.PREFIX:
if Options.options.prefix or Utils.is_win32: if Options.options.prefix or Utils.is_win32:
env.PREFIX = Utils.sane_path(Options.options.prefix)
env.PREFIX = Options.options.prefix
else: else:
env.PREFIX = ''
env.PREFIX = '/'
if not env.BINDIR: if not env.BINDIR:
if Options.options.bindir: if Options.options.bindir:
env.BINDIR = Utils.sane_path(Options.options.bindir)
env.BINDIR = Options.options.bindir
else: else:
env.BINDIR = Utils.subst_vars('${PREFIX}/bin', env) env.BINDIR = Utils.subst_vars('${PREFIX}/bin', env)
if not env.LIBDIR: if not env.LIBDIR:
if Options.options.libdir: if Options.options.libdir:
env.LIBDIR = Utils.sane_path(Options.options.libdir)
env.LIBDIR = Options.options.libdir
else: else:
env.LIBDIR = Utils.subst_vars('${PREFIX}/lib%s' % Utils.lib64(), env) env.LIBDIR = Utils.subst_vars('${PREFIX}/lib%s' % Utils.lib64(), env)


@@ -228,38 +223,42 @@ class ConfigurationContext(Context.Context):
tmpenv = self.all_envs[key] tmpenv = self.all_envs[key]
tmpenv.store(os.path.join(self.cachedir.abspath(), key + Build.CACHE_SUFFIX)) tmpenv.store(os.path.join(self.cachedir.abspath(), key + Build.CACHE_SUFFIX))


def load(self, input, tooldir=None, funs=None, with_sys_path=True):
def load(self, tool_list, tooldir=None, funs=None, with_sys_path=True, cache=False):
""" """
Load Waf tools, which will be imported whenever a build is started. Load Waf tools, which will be imported whenever a build is started.


:param input: waf tools to import
:type input: list of string
:param tool_list: waf tools to import
:type tool_list: list of string
:param tooldir: paths for the imports :param tooldir: paths for the imports
:type tooldir: list of string :type tooldir: list of string
:param funs: functions to execute from the waf tools :param funs: functions to execute from the waf tools
:type funs: list of string :type funs: list of string
:param cache: whether to prevent the tool from running twice
:type cache: bool
""" """


tools = Utils.to_list(input)
if tooldir: tooldir = Utils.to_list(tooldir)
tools = Utils.to_list(tool_list)
if tooldir:
tooldir = Utils.to_list(tooldir)
for tool in tools: for tool in tools:
# avoid loading the same tool more than once with the same functions # avoid loading the same tool more than once with the same functions
# used by composite projects # used by composite projects


mag = (tool, id(self.env), tooldir, funs)
if mag in self.tool_cache:
self.to_log('(tool %s is already loaded, skipping)' % tool)
continue
self.tool_cache.append(mag)
if cache:
mag = (tool, id(self.env), tooldir, funs)
if mag in self.tool_cache:
self.to_log('(tool %s is already loaded, skipping)' % tool)
continue
self.tool_cache.append(mag)


module = None module = None
try: try:
module = Context.load_tool(tool, tooldir, ctx=self, with_sys_path=with_sys_path) module = Context.load_tool(tool, tooldir, ctx=self, with_sys_path=with_sys_path)
except ImportError as e: except ImportError as e:
self.fatal('Could not load the Waf tool %r from %r\n%s' % (tool, sys.path, e))
self.fatal('Could not load the Waf tool %r from %r\n%s' % (tool, getattr(e, 'waf_sys_path', sys.path), e))
except Exception as e: except Exception as e:
self.to_log('imp %r (%r & %r)' % (tool, tooldir, funs)) self.to_log('imp %r (%r & %r)' % (tool, tooldir, funs))
self.to_log(Utils.ex_stack())
self.to_log(traceback.format_exc())
raise raise


if funs is not None: if funs is not None:
@@ -267,8 +266,10 @@ class ConfigurationContext(Context.Context):
else: else:
func = getattr(module, 'configure', None) func = getattr(module, 'configure', None)
if func: if func:
if type(func) is type(Utils.readf): func(self)
else: self.eval_rules(func)
if type(func) is type(Utils.readf):
func(self)
else:
self.eval_rules(func)


self.tools.append({'tool':tool, 'tooldir':tooldir, 'funs':funs}) self.tools.append({'tool':tool, 'tooldir':tooldir, 'funs':funs})


@@ -285,8 +286,7 @@ class ConfigurationContext(Context.Context):


def eval_rules(self, rules): def eval_rules(self, rules):
""" """
Execute the configuration tests. The method :py:meth:`waflib.Configure.ConfigurationContext.err_handler`
is used to process the eventual exceptions
Execute configuration tests provided as list of functions to run


:param rules: list of configuration method names :param rules: list of configuration method names
:type rules: list of string :type rules: list of string
@@ -294,28 +294,9 @@ class ConfigurationContext(Context.Context):
self.rules = Utils.to_list(rules) self.rules = Utils.to_list(rules)
for x in self.rules: for x in self.rules:
f = getattr(self, x) f = getattr(self, x)
if not f: self.fatal("No such method '%s'." % x)
try:
f()
except Exception as e:
ret = self.err_handler(x, e)
if ret == BREAK:
break
elif ret == CONTINUE:
continue
else:
raise

def err_handler(self, fun, error):
"""
Error handler for the configuration tests, the default is to let the exception raise

:param fun: configuration test
:type fun: method
:param error: exception
:type error: exception
"""
pass
if not f:
self.fatal('No such configuration function %r' % x)
f()


def conf(f): def conf(f):
""" """
@@ -330,11 +311,7 @@ def conf(f):
:type f: function :type f: function
""" """
def fun(*k, **kw): def fun(*k, **kw):
mandatory = True
if 'mandatory' in kw:
mandatory = kw['mandatory']
del kw['mandatory']

mandatory = kw.pop('mandatory', True)
try: try:
return f(*k, **kw) return f(*k, **kw)
except Errors.ConfigurationError: except Errors.ConfigurationError:
@@ -347,7 +324,7 @@ def conf(f):
return f return f


@conf @conf
def add_os_flags(self, var, dest=None, dup=True):
def add_os_flags(self, var, dest=None, dup=False):
""" """
Import operating system environment values into ``conf.env`` dict:: Import operating system environment values into ``conf.env`` dict::


@@ -365,7 +342,6 @@ def add_os_flags(self, var, dest=None, dup=True):
flags = shlex.split(self.environ[var]) flags = shlex.split(self.environ[var])
except KeyError: except KeyError:
return return
# TODO: in waf 1.9, make dup=False the default
if dup or ''.join(flags) not in ''.join(Utils.to_list(self.env[dest or var])): if dup or ''.join(flags) not in ''.join(Utils.to_list(self.env[dest or var])):
self.env.append_value(dest or var, flags) self.env.append_value(dest or var, flags)


@@ -377,21 +353,26 @@ def cmd_to_list(self, cmd):
:param cmd: command :param cmd: command
:type cmd: a string or a list of string :type cmd: a string or a list of string
""" """
if isinstance(cmd, str) and cmd.find(' '):
try:
os.stat(cmd)
except OSError:
if isinstance(cmd, str):
if os.path.isfile(cmd):
# do not take any risk
return [cmd]
if os.sep == '/':
return shlex.split(cmd) return shlex.split(cmd)
else: else:
return [cmd]
try:
return shlex.split(cmd, posix=False)
except TypeError:
# Python 2.5 on windows?
return shlex.split(cmd)
return cmd return cmd


@conf @conf
def check_waf_version(self, mini='1.7.99', maxi='1.9.0', **kw):
def check_waf_version(self, mini='1.9.99', maxi='2.1.0', **kw):
""" """
Raise a Configuration error if the Waf version does not strictly match the given bounds:: Raise a Configuration error if the Waf version does not strictly match the given bounds::


conf.check_waf_version(mini='1.8.0', maxi='1.9.0')
conf.check_waf_version(mini='1.9.99', maxi='2.1.0')


:type mini: number, tuple or string :type mini: number, tuple or string
:param mini: Minimum required version :param mini: Minimum required version
@@ -413,7 +394,7 @@ def find_file(self, filename, path_list=[]):


:param filename: name of the file to search for :param filename: name of the file to search for
:param path_list: list of directories to search :param path_list: list of directories to search
:return: the first occurrence filename or '' if filename could not be found
:return: the first matching filename; else a configuration exception is raised
""" """
for n in Utils.to_list(filename): for n in Utils.to_list(filename):
for d in Utils.to_list(path_list): for d in Utils.to_list(path_list):
@@ -433,14 +414,17 @@ def find_program(self, filename, **kw):


:param path_list: paths to use for searching :param path_list: paths to use for searching
:type param_list: list of string :type param_list: list of string
:param var: store the result to conf.env[var], by default use filename.upper()
:param var: store the result to conf.env[var] where var defaults to filename.upper() if not provided; the result is stored as a list of strings
:type var: string :type var: string
:param ext: list of extensions for the binary (do not add an extension for portability)
:type ext: list of string
:param value: obtain the program from the value passed exclusively
:type value: list or string (list is preferred)
:param exts: list of extensions for the binary (do not add an extension for portability)
:type exts: list of string
:param msg: name to display in the log, by default filename is used :param msg: name to display in the log, by default filename is used
:type msg: string :type msg: string
:param interpreter: interpreter for the program :param interpreter: interpreter for the program
:type interpreter: ConfigSet variable key :type interpreter: ConfigSet variable key
:raises: :py:class:`waflib.Errors.ConfigurationError`
""" """


exts = kw.get('exts', Utils.is_win32 and '.exe,.com,.bat,.cmd' or ',.sh,.pl,.py') exts = kw.get('exts', Utils.is_win32 and '.exe,.com,.bat,.cmd' or ',.sh,.pl,.py')
@@ -462,18 +446,15 @@ def find_program(self, filename, **kw):
else: else:
path_list = environ.get('PATH', '').split(os.pathsep) path_list = environ.get('PATH', '').split(os.pathsep)


if var in environ:
filename = environ[var]
if os.path.isfile(filename):
# typical CC=/usr/bin/gcc waf configure build
ret = [filename]
else:
# case CC='ccache gcc' waf configure build
ret = self.cmd_to_list(filename)
if kw.get('value'):
# user-provided in command-line options and passed to find_program
ret = self.cmd_to_list(kw['value'])
elif environ.get(var):
# user-provided in the os environment
ret = self.cmd_to_list(environ[var])
elif self.env[var]: elif self.env[var]:
# set by the user in the wscript file
ret = self.env[var]
ret = self.cmd_to_list(ret)
# a default option in the wscript file
ret = self.cmd_to_list(self.env[var])
else: else:
if not ret: if not ret:
ret = self.find_binary(filename, exts.split(','), path_list) ret = self.find_binary(filename, exts.split(','), path_list)
@@ -483,7 +464,6 @@ def find_program(self, filename, **kw):
ret = Utils.get_registry_app_path(Utils.winreg.HKEY_LOCAL_MACHINE, filename) ret = Utils.get_registry_app_path(Utils.winreg.HKEY_LOCAL_MACHINE, filename)
ret = self.cmd_to_list(ret) ret = self.cmd_to_list(ret)



if ret: if ret:
if len(ret) == 1: if len(ret) == 1:
retmsg = ret[0] retmsg = ret[0]
@@ -492,14 +472,14 @@ def find_program(self, filename, **kw):
else: else:
retmsg = False retmsg = False


self.msg("Checking for program '%s'" % msg, retmsg, **kw)
if not kw.get('quiet', None):
self.msg('Checking for program %r' % msg, retmsg, **kw)
if not kw.get('quiet'):
self.to_log('find program=%r paths=%r var=%r -> %r' % (filename, path_list, var, ret)) self.to_log('find program=%r paths=%r var=%r -> %r' % (filename, path_list, var, ret))


if not ret: if not ret:
self.fatal(kw.get('errmsg', '') or 'Could not find the program %r' % filename) self.fatal(kw.get('errmsg', '') or 'Could not find the program %r' % filename)


interpreter = kw.get('interpreter', None)
interpreter = kw.get('interpreter')
if interpreter is None: if interpreter is None:
if not Utils.check_exe(ret[0], env=environ): if not Utils.check_exe(ret[0], env=environ):
self.fatal('Program %r is not executable' % ret) self.fatal('Program %r is not executable' % ret)
@@ -554,7 +534,6 @@ def run_build(self, *k, **kw):
$ waf configure --confcache $ waf configure --confcache


""" """

lst = [str(v) for (p, v) in kw.items() if p != 'env'] lst = [str(v) for (p, v) in kw.items() if p != 'env']
h = Utils.h_list(lst) h = Utils.h_list(lst)
dir = self.bldnode.abspath() + os.sep + (not Utils.is_win32 and '.' or '') + 'conf_check_' + Utils.to_hex(h) dir = self.bldnode.abspath() + os.sep + (not Utils.is_win32 and '.' or '') + 'conf_check_' + Utils.to_hex(h)
@@ -573,9 +552,7 @@ def run_build(self, *k, **kw):
if cachemode == 1: if cachemode == 1:
try: try:
proj = ConfigSet.ConfigSet(os.path.join(dir, 'cache_run_build')) proj = ConfigSet.ConfigSet(os.path.join(dir, 'cache_run_build'))
except OSError:
pass
except IOError:
except EnvironmentError:
pass pass
else: else:
ret = proj['cache_run_build'] ret = proj['cache_run_build']
@@ -588,7 +565,8 @@ def run_build(self, *k, **kw):
if not os.path.exists(bdir): if not os.path.exists(bdir):
os.makedirs(bdir) os.makedirs(bdir)


self.test_bld = bld = Build.BuildContext(top_dir=dir, out_dir=bdir)
cls_name = kw.get('run_build_cls') or getattr(self, 'run_build_cls', 'build')
self.test_bld = bld = Context.create_context(cls_name, top_dir=dir, out_dir=bdir)
bld.init_dirs() bld.init_dirs()
bld.progress_bar = 0 bld.progress_bar = 0
bld.targets = '*' bld.targets = '*'
@@ -597,17 +575,15 @@ def run_build(self, *k, **kw):
bld.all_envs.update(self.all_envs) # not really necessary bld.all_envs.update(self.all_envs) # not really necessary
bld.env = kw['env'] bld.env = kw['env']


# OMG huge hack
bld.kw = kw bld.kw = kw
bld.conf = self bld.conf = self
kw['build_fun'](bld) kw['build_fun'](bld)

ret = -1 ret = -1
try: try:
try: try:
bld.compile() bld.compile()
except Errors.WafError: except Errors.WafError:
ret = 'Test does not build: %s' % Utils.ex_stack()
ret = 'Test does not build: %s' % traceback.format_exc()
self.fatal(ret) self.fatal(ret)
else: else:
ret = getattr(bld, 'retval', 0) ret = getattr(bld, 'retval', 0)
@@ -619,7 +595,6 @@ def run_build(self, *k, **kw):
proj.store(os.path.join(dir, 'cache_run_build')) proj.store(os.path.join(dir, 'cache_run_build'))
else: else:
shutil.rmtree(dir) shutil.rmtree(dir)

return ret return ret


@conf @conf
@@ -635,7 +610,7 @@ def test(self, *k, **kw):
kw['env'] = self.env.derive() kw['env'] = self.env.derive()


# validate_c for example # validate_c for example
if kw.get('validate', None):
if kw.get('validate'):
kw['validate'](kw) kw['validate'](kw)


self.start_msg(kw['msg'], **kw) self.start_msg(kw['msg'], **kw)
@@ -651,7 +626,7 @@ def test(self, *k, **kw):
else: else:
kw['success'] = ret kw['success'] = ret


if kw.get('post_check', None):
if kw.get('post_check'):
ret = kw['post_check'](kw) ret = kw['post_check'](kw)


if ret: if ret:
@@ -661,5 +636,3 @@ def test(self, *k, **kw):
self.end_msg(self.ret_msg(kw['okmsg'], kw), **kw) self.end_msg(self.ret_msg(kw['okmsg'], kw), **kw)
return ret return ret





+ 169
- 139
waflib/Context.py View File

@@ -1,9 +1,9 @@
#!/usr/bin/env python #!/usr/bin/env python
# encoding: utf-8 # encoding: utf-8
# Thomas Nagy, 2010-2016 (ita)
# Thomas Nagy, 2010-2018 (ita)


""" """
Classes and functions required for waf commands
Classes and functions enabling the command system
""" """


import os, re, imp, sys import os, re, imp, sys
@@ -11,16 +11,16 @@ from waflib import Utils, Errors, Logs
import waflib.Node import waflib.Node


# the following 3 constants are updated on each new release (do not touch) # the following 3 constants are updated on each new release (do not touch)
HEXVERSION=0x1081600
HEXVERSION=0x2000b00
"""Constant updated on new releases""" """Constant updated on new releases"""


WAFVERSION="1.8.22"
WAFVERSION="2.0.11"
"""Constant updated on new releases""" """Constant updated on new releases"""


WAFREVISION="17d4d4faa52c454eb3580e482df69b2a80e19fa7"
WAFREVISION="a97f6fb0941091b4966b625f15ec32fa783a8bec"
"""Git revision when the waf version is updated""" """Git revision when the waf version is updated"""


ABI = 98
ABI = 20
"""Version of the build data cache file format (used in :py:const:`waflib.Context.DBFILE`)""" """Version of the build data cache file format (used in :py:const:`waflib.Context.DBFILE`)"""


DBFILE = '.wafpickle-%s-%d-%d' % (sys.platform, sys.hexversion, ABI) DBFILE = '.wafpickle-%s-%d-%d' % (sys.platform, sys.hexversion, ABI)
@@ -41,7 +41,6 @@ OUT = 'out'
WSCRIPT_FILE = 'wscript' WSCRIPT_FILE = 'wscript'
"""Name of the waf script files""" """Name of the waf script files"""



launch_dir = '' launch_dir = ''
"""Directory from which waf has been called""" """Directory from which waf has been called"""
run_dir = '' run_dir = ''
@@ -53,23 +52,12 @@ out_dir = ''
waf_dir = '' waf_dir = ''
"""Directory containing the waf modules""" """Directory containing the waf modules"""


local_repo = ''
"""Local repository containing additional Waf tools (plugins)"""
remote_repo = 'https://raw.githubusercontent.com/waf-project/waf/master/'
"""
Remote directory containing downloadable waf tools. The missing tools can be downloaded by using::

$ waf configure --download
"""

remote_locs = ['waflib/extras', 'waflib/Tools']
"""
Remote directories for use with :py:const:`waflib.Context.remote_repo`
"""
default_encoding = Utils.console_encoding()
"""Encoding to use when reading outputs from other processes"""


g_module = None g_module = None
""" """
Module representing the main wscript file (see :py:const:`waflib.Context.run_dir`)
Module representing the top-level wscript file (see :py:const:`waflib.Context.run_dir`)
""" """


STDOUT = 1 STDOUT = 1
@@ -82,20 +70,20 @@ List of :py:class:`waflib.Context.Context` subclasses that can be used as waf co
are added automatically by a metaclass. are added automatically by a metaclass.
""" """



def create_context(cmd_name, *k, **kw): def create_context(cmd_name, *k, **kw):
""" """
Create a new :py:class:`waflib.Context.Context` instance corresponding to the given command.
Returns a new :py:class:`waflib.Context.Context` instance corresponding to the given command.
Used in particular by :py:func:`waflib.Scripting.run_command` Used in particular by :py:func:`waflib.Scripting.run_command`


:param cmd_name: command
:param cmd_name: command name
:type cmd_name: string :type cmd_name: string
:param k: arguments to give to the context class initializer :param k: arguments to give to the context class initializer
:type k: list :type k: list
:param k: keyword arguments to give to the context class initializer :param k: keyword arguments to give to the context class initializer
:type k: dict :type k: dict
:return: Context object
:rtype: :py:class:`waflib.Context.Context`
""" """
global classes
for x in classes: for x in classes:
if x.cmd == cmd_name: if x.cmd == cmd_name:
return x(*k, **kw) return x(*k, **kw)
@@ -105,14 +93,15 @@ def create_context(cmd_name, *k, **kw):


class store_context(type): class store_context(type):
""" """
Metaclass for storing the command classes into the list :py:const:`waflib.Context.classes`
Context classes must provide an attribute 'cmd' representing the command to execute
Metaclass that registers command classes into the list :py:const:`waflib.Context.classes`
Context classes must provide an attribute 'cmd' representing the command name, and a function
attribute 'fun' representing the function name that the command uses.
""" """
def __init__(cls, name, bases, dict):
super(store_context, cls).__init__(name, bases, dict)
def __init__(cls, name, bases, dct):
super(store_context, cls).__init__(name, bases, dct)
name = cls.__name__ name = cls.__name__


if name == 'ctx' or name == 'Context':
if name in ('ctx', 'Context'):
return return


try: try:
@@ -123,11 +112,10 @@ class store_context(type):
if not getattr(cls, 'fun', None): if not getattr(cls, 'fun', None):
cls.fun = cls.cmd cls.fun = cls.cmd


global classes
classes.insert(0, cls) classes.insert(0, cls)


ctx = store_context('ctx', (object,), {}) ctx = store_context('ctx', (object,), {})
"""Base class for the :py:class:`waflib.Context.Context` classes"""
"""Base class for all :py:class:`waflib.Context.Context` classes"""


class Context(ctx): class Context(ctx):
""" """
@@ -138,7 +126,7 @@ class Context(ctx):
def foo(ctx): def foo(ctx):
print(ctx.__class__.__name__) # waflib.Context.Context print(ctx.__class__.__name__) # waflib.Context.Context


Subclasses must define the attribute 'cmd':
Subclasses must define the class attributes 'cmd' and 'fun':


:param cmd: command to execute as in ``waf cmd`` :param cmd: command to execute as in ``waf cmd``
:type cmd: string :type cmd: string
@@ -156,19 +144,18 @@ class Context(ctx):


tools = {} tools = {}
""" """
A cache for modules (wscript files) read by :py:meth:`Context.Context.load`
A module cache for wscript files; see :py:meth:`Context.Context.load`
""" """


def __init__(self, **kw): def __init__(self, **kw):
try: try:
rd = kw['run_dir'] rd = kw['run_dir']
except KeyError: except KeyError:
global run_dir
rd = run_dir rd = run_dir


# binds the context to the nodes in use to avoid a context singleton # binds the context to the nodes in use to avoid a context singleton
self.node_class = type("Nod3", (waflib.Node.Node,), {})
self.node_class.__module__ = "waflib.Node"
self.node_class = type('Nod3', (waflib.Node.Node,), {})
self.node_class.__module__ = 'waflib.Node'
self.node_class.ctx = self self.node_class.ctx = self


self.root = self.node_class('', None) self.root = self.node_class('', None)
@@ -179,18 +166,9 @@ class Context(ctx):
self.exec_dict = {'ctx':self, 'conf':self, 'bld':self, 'opt':self} self.exec_dict = {'ctx':self, 'conf':self, 'bld':self, 'opt':self}
self.logger = None self.logger = None


def __hash__(self):
"""
Return a hash value for storing context objects in dicts or sets. The value is not persistent.

:return: hash value
:rtype: int
"""
return id(self)

def finalize(self): def finalize(self):
""" """
Use to free resources such as open files potentially held by the logger
Called to free resources such as logger files
""" """
try: try:
logger = self.logger logger = self.logger
@@ -202,11 +180,11 @@ class Context(ctx):


def load(self, tool_list, *k, **kw): def load(self, tool_list, *k, **kw):
""" """
Load a Waf tool as a module, and try calling the function named :py:const:`waflib.Context.Context.fun` from it.
A ``tooldir`` value may be provided as a list of module paths.
Loads a Waf tool as a module, and try calling the function named :py:const:`waflib.Context.Context.fun`
from it. A ``tooldir`` argument may be provided as a list of module paths.


:param tool_list: list of Waf tool names to load
:type tool_list: list of string or space-separated string :type tool_list: list of string or space-separated string
:param tool_list: list of Waf tools to use
""" """
tools = Utils.to_list(tool_list) tools = Utils.to_list(tool_list)
path = Utils.to_list(kw.get('tooldir', '')) path = Utils.to_list(kw.get('tooldir', ''))
@@ -220,15 +198,16 @@ class Context(ctx):


def execute(self): def execute(self):
""" """
Execute the command. Redefine this method in subclasses.
Here, it calls the function name in the top-level wscript file. Most subclasses
redefine this method to provide additional functionality.
""" """
global g_module
self.recurse([os.path.dirname(g_module.root_path)]) self.recurse([os.path.dirname(g_module.root_path)])


def pre_recurse(self, node): def pre_recurse(self, node):
""" """
Method executed immediately before a folder is read by :py:meth:`waflib.Context.Context.recurse`. The node given is set
as an attribute ``self.cur_script``, and as the current path ``self.path``
Method executed immediately before a folder is read by :py:meth:`waflib.Context.Context.recurse`.
The current script is bound as a Node object on ``self.cur_script``, and the current path
is bound to ``self.path``


:param node: script :param node: script
:type node: :py:class:`waflib.Node.Node` :type node: :py:class:`waflib.Node.Node`
@@ -240,7 +219,7 @@ class Context(ctx):


def post_recurse(self, node): def post_recurse(self, node):
""" """
Restore ``self.cur_script`` and ``self.path`` right after :py:meth:`waflib.Context.Context.recurse` terminates.
Restores ``self.cur_script`` and ``self.path`` right after :py:meth:`waflib.Context.Context.recurse` terminates.


:param node: script :param node: script
:type node: :py:class:`waflib.Node.Node` :type node: :py:class:`waflib.Node.Node`
@@ -251,10 +230,13 @@ class Context(ctx):


def recurse(self, dirs, name=None, mandatory=True, once=True, encoding=None): def recurse(self, dirs, name=None, mandatory=True, once=True, encoding=None):
""" """
Run user code from the supplied list of directories.
Runs user-provided functions from the supplied list of directories.
The directories can be either absolute, or relative to the directory The directories can be either absolute, or relative to the directory
of the wscript file. The methods :py:meth:`waflib.Context.Context.pre_recurse` and :py:meth:`waflib.Context.Context.post_recurse`
are called immediately before and after a script has been executed.
of the wscript file

The methods :py:meth:`waflib.Context.Context.pre_recurse` and
:py:meth:`waflib.Context.Context.post_recurse` are called immediately before
and after a script has been executed.


:param dirs: List of directories to visit :param dirs: List of directories to visit
:type dirs: list of string or space-separated string :type dirs: list of string or space-separated string
@@ -300,7 +282,7 @@ class Context(ctx):
if not user_function: if not user_function:
if not mandatory: if not mandatory:
continue continue
raise Errors.WafError('No function %s defined in %s' % (name or self.fun, node.abspath()))
raise Errors.WafError('No function %r defined in %s' % (name or self.fun, node.abspath()))
user_function(self) user_function(self)
finally: finally:
self.post_recurse(node) self.post_recurse(node)
@@ -313,25 +295,39 @@ class Context(ctx):
raise Errors.WafError('Cannot read the folder %r' % d) raise Errors.WafError('Cannot read the folder %r' % d)
raise Errors.WafError('No wscript file in directory %s' % d) raise Errors.WafError('No wscript file in directory %s' % d)


def log_command(self, cmd, kw):
if Logs.verbose:
fmt = os.environ.get('WAF_CMD_FORMAT')
if fmt == 'string':
if not isinstance(cmd, str):
cmd = Utils.shell_escape(cmd)
Logs.debug('runner: %r', cmd)
Logs.debug('runner_env: kw=%s', kw)

def exec_command(self, cmd, **kw): def exec_command(self, cmd, **kw):
""" """
Execute a command and return the exit status. If the context has the attribute 'log',
capture and log the process stderr/stdout for logging purposes::
Runs an external process and returns the exit status::


def run(tsk): def run(tsk):
ret = tsk.generator.bld.exec_command('touch foo.txt') ret = tsk.generator.bld.exec_command('touch foo.txt')
return ret return ret


This method captures the standard/error outputs (Issue 1101), but it does not return the values
unlike :py:meth:`waflib.Context.Context.cmd_and_log`
If the context has the attribute 'log', then captures and logs the process stderr/stdout.
Unlike :py:meth:`waflib.Context.Context.cmd_and_log`, this method does not return the
stdout/stderr values captured.


:param cmd: command argument for subprocess.Popen :param cmd: command argument for subprocess.Popen
:type cmd: string or list
:param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate. :param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate.
:type kw: dict
:returns: process exit status
:rtype: integer
:raises: :py:class:`waflib.Errors.WafError` if an invalid executable is specified for a non-shell process
:raises: :py:class:`waflib.Errors.WafError` in case of execution failure
""" """
subprocess = Utils.subprocess subprocess = Utils.subprocess
kw['shell'] = isinstance(cmd, str) kw['shell'] = isinstance(cmd, str)
Logs.debug('runner: %r' % (cmd,))
Logs.debug('runner_env: kw=%s' % kw)
self.log_command(cmd, kw)


if self.logger: if self.logger:
self.logger.info(cmd) self.logger.info(cmd)
@@ -342,40 +338,42 @@ class Context(ctx):
kw['stderr'] = subprocess.PIPE kw['stderr'] = subprocess.PIPE


if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]): if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
raise Errors.WafError("Program %s not found!" % cmd[0])
raise Errors.WafError('Program %s not found!' % cmd[0])


wargs = {}
cargs = {}
if 'timeout' in kw: if 'timeout' in kw:
if kw['timeout'] is not None:
wargs['timeout'] = kw['timeout']
if sys.hexversion >= 0x3030000:
cargs['timeout'] = kw['timeout']
if not 'start_new_session' in kw:
kw['start_new_session'] = True
del kw['timeout'] del kw['timeout']
if 'input' in kw: if 'input' in kw:
if kw['input']: if kw['input']:
wargs['input'] = kw['input']
cargs['input'] = kw['input']
kw['stdin'] = subprocess.PIPE kw['stdin'] = subprocess.PIPE
del kw['input'] del kw['input']


if 'cwd' in kw:
if not isinstance(kw['cwd'], str):
kw['cwd'] = kw['cwd'].abspath()

encoding = kw.pop('decode_as', default_encoding)

try: try:
if kw['stdout'] or kw['stderr']:
p = subprocess.Popen(cmd, **kw)
(out, err) = p.communicate(**wargs)
ret = p.returncode
else:
out, err = (None, None)
ret = subprocess.Popen(cmd, **kw).wait(**wargs)
ret, out, err = Utils.run_process(cmd, kw, cargs)
except Exception as e: except Exception as e:
raise Errors.WafError('Execution failure: %s' % str(e), ex=e) raise Errors.WafError('Execution failure: %s' % str(e), ex=e)


if out: if out:
if not isinstance(out, str): if not isinstance(out, str):
out = out.decode(sys.stdout.encoding or 'iso8859-1')
out = out.decode(encoding, errors='replace')
if self.logger: if self.logger:
self.logger.debug('out: %s' % out)
self.logger.debug('out: %s', out)
else: else:
Logs.info(out, extra={'stream':sys.stdout, 'c1': ''}) Logs.info(out, extra={'stream':sys.stdout, 'c1': ''})
if err: if err:
if not isinstance(err, str): if not isinstance(err, str):
err = err.decode(sys.stdout.encoding or 'iso8859-1')
err = err.decode(encoding, errors='replace')
if self.logger: if self.logger:
self.logger.error('err: %s' % err) self.logger.error('err: %s' % err)
else: else:
@@ -385,9 +383,9 @@ class Context(ctx):


def cmd_and_log(self, cmd, **kw): def cmd_and_log(self, cmd, **kw):
""" """
Execute a command and return stdout/stderr if the execution is successful.
Executes a process and returns stdout/stderr if the execution is successful.
An exception is thrown when the exit status is non-0. In that case, both stderr and stdout An exception is thrown when the exit status is non-0. In that case, both stderr and stdout
will be bound to the WafError object::
will be bound to the WafError object (configuration tests)::


def configure(conf): def configure(conf):
out = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.STDOUT, quiet=waflib.Context.BOTH) out = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.STDOUT, quiet=waflib.Context.BOTH)
@@ -395,65 +393,69 @@ class Context(ctx):
(out, err) = conf.cmd_and_log(cmd, input='\\n'.encode(), output=waflib.Context.STDOUT) (out, err) = conf.cmd_and_log(cmd, input='\\n'.encode(), output=waflib.Context.STDOUT)
try: try:
conf.cmd_and_log(['which', 'someapp'], output=waflib.Context.BOTH) conf.cmd_and_log(['which', 'someapp'], output=waflib.Context.BOTH)
except Exception as e:
except Errors.WafError as e:
print(e.stdout, e.stderr) print(e.stdout, e.stderr)


:param cmd: args for subprocess.Popen :param cmd: args for subprocess.Popen
:type cmd: list or string
:param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate. :param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate.
:type kw: dict
:returns: a tuple containing the contents of stdout and stderr
:rtype: string
:raises: :py:class:`waflib.Errors.WafError` if an invalid executable is specified for a non-shell process
:raises: :py:class:`waflib.Errors.WafError` in case of execution failure; stdout/stderr/returncode are bound to the exception object
""" """
subprocess = Utils.subprocess subprocess = Utils.subprocess
kw['shell'] = isinstance(cmd, str) kw['shell'] = isinstance(cmd, str)
Logs.debug('runner: %r' % (cmd,))

if 'quiet' in kw:
quiet = kw['quiet']
del kw['quiet']
else:
quiet = None
self.log_command(cmd, kw)


if 'output' in kw:
to_ret = kw['output']
del kw['output']
else:
to_ret = STDOUT
quiet = kw.pop('quiet', None)
to_ret = kw.pop('output', STDOUT)


if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]): if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
raise Errors.WafError("Program %s not found!" % cmd[0])
raise Errors.WafError('Program %r not found!' % cmd[0])


kw['stdout'] = kw['stderr'] = subprocess.PIPE kw['stdout'] = kw['stderr'] = subprocess.PIPE
if quiet is None: if quiet is None:
self.to_log(cmd) self.to_log(cmd)


wargs = {}
cargs = {}
if 'timeout' in kw: if 'timeout' in kw:
if kw['timeout'] is not None:
wargs['timeout'] = kw['timeout']
if sys.hexversion >= 0x3030000:
cargs['timeout'] = kw['timeout']
if not 'start_new_session' in kw:
kw['start_new_session'] = True
del kw['timeout'] del kw['timeout']
if 'input' in kw: if 'input' in kw:
if kw['input']: if kw['input']:
wargs['input'] = kw['input']
cargs['input'] = kw['input']
kw['stdin'] = subprocess.PIPE kw['stdin'] = subprocess.PIPE
del kw['input'] del kw['input']


if 'cwd' in kw:
if not isinstance(kw['cwd'], str):
kw['cwd'] = kw['cwd'].abspath()

encoding = kw.pop('decode_as', default_encoding)

try: try:
p = subprocess.Popen(cmd, **kw)
(out, err) = p.communicate(**wargs)
ret, out, err = Utils.run_process(cmd, kw, cargs)
except Exception as e: except Exception as e:
raise Errors.WafError('Execution failure: %s' % str(e), ex=e) raise Errors.WafError('Execution failure: %s' % str(e), ex=e)


if not isinstance(out, str): if not isinstance(out, str):
out = out.decode(sys.stdout.encoding or 'iso8859-1')
out = out.decode(encoding, errors='replace')
if not isinstance(err, str): if not isinstance(err, str):
err = err.decode(sys.stdout.encoding or 'iso8859-1')
err = err.decode(encoding, errors='replace')


if out and quiet != STDOUT and quiet != BOTH: if out and quiet != STDOUT and quiet != BOTH:
self.to_log('out: %s' % out) self.to_log('out: %s' % out)
if err and quiet != STDERR and quiet != BOTH: if err and quiet != STDERR and quiet != BOTH:
self.to_log('err: %s' % err) self.to_log('err: %s' % err)


if p.returncode:
e = Errors.WafError('Command %r returned %r' % (cmd, p.returncode))
e.returncode = p.returncode
if ret:
e = Errors.WafError('Command %r returned %r' % (cmd, ret))
e.returncode = ret
e.stderr = err e.stderr = err
e.stdout = out e.stdout = out
raise e raise e
@@ -466,7 +468,8 @@ class Context(ctx):


def fatal(self, msg, ex=None): def fatal(self, msg, ex=None):
""" """
Raise a configuration error to interrupt the execution immediately::
Prints an error message in red and stops command execution; this is
usually used in the configuration section::


def configure(conf): def configure(conf):
conf.fatal('a requirement is missing') conf.fatal('a requirement is missing')
@@ -475,24 +478,31 @@ class Context(ctx):
:type msg: string :type msg: string
:param ex: optional exception object :param ex: optional exception object
:type ex: exception :type ex: exception
:raises: :py:class:`waflib.Errors.ConfigurationError`
""" """
if self.logger: if self.logger:
self.logger.info('from %s: %s' % (self.path.abspath(), msg)) self.logger.info('from %s: %s' % (self.path.abspath(), msg))
try: try:
msg = '%s\n(complete log in %s)' % (msg, self.logger.handlers[0].baseFilename)
except Exception:
logfile = self.logger.handlers[0].baseFilename
except AttributeError:
pass pass
else:
if os.environ.get('WAF_PRINT_FAILURE_LOG'):
# see #1930
msg = 'Log from (%s):\n%s\n' % (logfile, Utils.readf(logfile))
else:
msg = '%s\n(complete log in %s)' % (msg, logfile)
raise self.errors.ConfigurationError(msg, ex=ex) raise self.errors.ConfigurationError(msg, ex=ex)


def to_log(self, msg): def to_log(self, msg):
""" """
Log some information to the logger (if present), or to stderr. If the message is empty,
it is not printed::
Logs information to the logger (if present), or to stderr.
Empty messages are not printed::


def build(bld): def build(bld):
bld.to_log('starting the build') bld.to_log('starting the build')


When in doubt, override this method, or provide a logger on the context class.
Provide a logger on the context class or override this method if necessary.


:param msg: message :param msg: message
:type msg: string :type msg: string
@@ -508,7 +518,7 @@ class Context(ctx):


def msg(self, *k, **kw): def msg(self, *k, **kw):
""" """
Print a configuration message of the form ``msg: result``.
Prints a configuration message of the form ``msg: result``.
The second part of the message will be in colors. The output The second part of the message will be in colors. The output
can be disabled easly by setting ``in_msg`` to a positive value:: can be disabled easly by setting ``in_msg`` to a positive value::


@@ -536,7 +546,7 @@ class Context(ctx):
except KeyError: except KeyError:
result = k[1] result = k[1]


color = kw.get('color', None)
color = kw.get('color')
if not isinstance(color, str): if not isinstance(color, str):
color = result and 'GREEN' or 'YELLOW' color = result and 'GREEN' or 'YELLOW'


@@ -544,12 +554,12 @@ class Context(ctx):


def start_msg(self, *k, **kw): def start_msg(self, *k, **kw):
""" """
Print the beginning of a 'Checking for xxx' message. See :py:meth:`waflib.Context.Context.msg`
Prints the beginning of a 'Checking for xxx' message. See :py:meth:`waflib.Context.Context.msg`
""" """
if kw.get('quiet', None):
if kw.get('quiet'):
return return


msg = kw.get('msg', None) or k[0]
msg = kw.get('msg') or k[0]
try: try:
if self.in_msg: if self.in_msg:
self.in_msg += 1 self.in_msg += 1
@@ -567,19 +577,19 @@ class Context(ctx):
Logs.pprint('NORMAL', "%s :" % msg.ljust(self.line_just), sep='') Logs.pprint('NORMAL', "%s :" % msg.ljust(self.line_just), sep='')


def end_msg(self, *k, **kw): def end_msg(self, *k, **kw):
"""Print the end of a 'Checking for' message. See :py:meth:`waflib.Context.Context.msg`"""
if kw.get('quiet', None):
"""Prints the end of a 'Checking for' message. See :py:meth:`waflib.Context.Context.msg`"""
if kw.get('quiet'):
return return
self.in_msg -= 1 self.in_msg -= 1
if self.in_msg: if self.in_msg:
return return


result = kw.get('result', None) or k[0]
result = kw.get('result') or k[0]


defcolor = 'GREEN' defcolor = 'GREEN'
if result == True:
if result is True:
msg = 'ok' msg = 'ok'
elif result == False:
elif not result:
msg = 'not found' msg = 'not found'
defcolor = 'YELLOW' defcolor = 'YELLOW'
else: else:
@@ -597,7 +607,17 @@ class Context(ctx):
Logs.pprint(color, msg) Logs.pprint(color, msg)


def load_special_tools(self, var, ban=[]): def load_special_tools(self, var, ban=[]):
global waf_dir
"""
Loads third-party extensions modules for certain programming languages
by trying to list certain files in the extras/ directory. This method
is typically called once for a programming language group, see for
example :py:mod:`waflib.Tools.compiler_c`

:param var: glob expression, for example 'cxx\_\*.py'
:type var: string
:param ban: list of exact file names to exclude
:type ban: list of string
"""
if os.path.isdir(waf_dir): if os.path.isdir(waf_dir):
lst = self.root.find_node(waf_dir).find_node('waflib/extras').ant_glob(var) lst = self.root.find_node(waf_dir).find_node('waflib/extras').ant_glob(var)
for x in lst: for x in lst:
@@ -608,12 +628,12 @@ class Context(ctx):
waflibs = PyZipFile(waf_dir) waflibs = PyZipFile(waf_dir)
lst = waflibs.namelist() lst = waflibs.namelist()
for x in lst: for x in lst:
if not re.match("waflib/extras/%s" % var.replace("*", ".*"), var):
if not re.match('waflib/extras/%s' % var.replace('*', '.*'), var):
continue continue
f = os.path.basename(x) f = os.path.basename(x)
doban = False doban = False
for b in ban: for b in ban:
r = b.replace("*", ".*")
r = b.replace('*', '.*')
if re.match(r, f): if re.match(r, f):
doban = True doban = True
if not doban: if not doban:
@@ -622,13 +642,13 @@ class Context(ctx):


cache_modules = {} cache_modules = {}
""" """
Dictionary holding already loaded modules, keyed by their absolute path.
Dictionary holding already loaded modules (wscript), indexed by their absolute path.
The modules are added automatically by :py:func:`waflib.Context.load_module` The modules are added automatically by :py:func:`waflib.Context.load_module`
""" """


def load_module(path, encoding=None): def load_module(path, encoding=None):
""" """
Load a source file as a python module.
Loads a wscript file as a python module. This method caches results in :py:attr:`waflib.Context.cache_modules`


:param path: file path :param path: file path
:type path: string :type path: string
@@ -648,17 +668,17 @@ def load_module(path, encoding=None):


module_dir = os.path.dirname(path) module_dir = os.path.dirname(path)
sys.path.insert(0, module_dir) sys.path.insert(0, module_dir)

try : exec(compile(code, path, 'exec'), module.__dict__)
finally: sys.path.remove(module_dir)
try:
exec(compile(code, path, 'exec'), module.__dict__)
finally:
sys.path.remove(module_dir)


cache_modules[path] = module cache_modules[path] = module

return module return module


def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True): def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True):
""" """
Import a Waf tool (python module), and store it in the dict :py:const:`waflib.Context.Context.tools`
Importx a Waf tool as a python module, and stores it in the dict :py:const:`waflib.Context.Context.tools`


:type tool: string :type tool: string
:param tool: Name of the tool :param tool: Name of the tool
@@ -672,14 +692,18 @@ def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True):
else: else:
tool = tool.replace('++', 'xx') tool = tool.replace('++', 'xx')


origSysPath = sys.path
if not with_sys_path: sys.path = []
if not with_sys_path:
back_path = sys.path
sys.path = []
try: try:
if tooldir: if tooldir:
assert isinstance(tooldir, list) assert isinstance(tooldir, list)
sys.path = tooldir + sys.path sys.path = tooldir + sys.path
try: try:
__import__(tool) __import__(tool)
except ImportError as e:
e.waf_sys_path = list(sys.path)
raise
finally: finally:
for d in tooldir: for d in tooldir:
sys.path.remove(d) sys.path.remove(d)
@@ -687,7 +711,8 @@ def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True):
Context.tools[tool] = ret Context.tools[tool] = ret
return ret return ret
else: else:
if not with_sys_path: sys.path.insert(0, waf_dir)
if not with_sys_path:
sys.path.insert(0, waf_dir)
try: try:
for x in ('waflib.Tools.%s', 'waflib.extras.%s', 'waflib.%s', '%s'): for x in ('waflib.Tools.%s', 'waflib.extras.%s', 'waflib.%s', '%s'):
try: try:
@@ -695,13 +720,18 @@ def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True):
break break
except ImportError: except ImportError:
x = None x = None
if x is None: # raise an exception
else: # raise an exception
__import__(tool) __import__(tool)
except ImportError as e:
e.waf_sys_path = list(sys.path)
raise
finally: finally:
if not with_sys_path: sys.path.remove(waf_dir)
if not with_sys_path:
sys.path.remove(waf_dir)
ret = sys.modules[x % tool] ret = sys.modules[x % tool]
Context.tools[tool] = ret Context.tools[tool] = ret
return ret return ret
finally: finally:
if not with_sys_path: sys.path += origSysPath
if not with_sys_path:
sys.path += back_path



+ 9
- 11
waflib/Errors.py View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
# encoding: utf-8 # encoding: utf-8
# Thomas Nagy, 2010 (ita)
# Thomas Nagy, 2010-2018 (ita)


""" """
Exceptions used in the Waf code Exceptions used in the Waf code
@@ -17,6 +17,7 @@ class WafError(Exception):
:param ex: exception causing this error (optional) :param ex: exception causing this error (optional)
:type ex: exception :type ex: exception
""" """
Exception.__init__(self)
self.msg = msg self.msg = msg
assert not isinstance(msg, Exception) assert not isinstance(msg, Exception)


@@ -35,9 +36,7 @@ class WafError(Exception):
return str(self.msg) return str(self.msg)


class BuildError(WafError): class BuildError(WafError):
"""
Errors raised during the build and install phases
"""
"""Error raised during the build and install phases"""
def __init__(self, error_tasks=[]): def __init__(self, error_tasks=[]):
""" """
:param error_tasks: tasks that could not complete normally :param error_tasks: tasks that could not complete normally
@@ -47,24 +46,23 @@ class BuildError(WafError):
WafError.__init__(self, self.format_error()) WafError.__init__(self, self.format_error())


def format_error(self): def format_error(self):
"""format the error messages from the tasks that failed"""
"""Formats the error messages from the tasks that failed"""
lst = ['Build failed'] lst = ['Build failed']
for tsk in self.tasks: for tsk in self.tasks:
txt = tsk.format_error() txt = tsk.format_error()
if txt: lst.append(txt)
if txt:
lst.append(txt)
return '\n'.join(lst) return '\n'.join(lst)


class ConfigurationError(WafError): class ConfigurationError(WafError):
"""
Configuration exception raised in particular by :py:meth:`waflib.Context.Context.fatal`
"""
"""Configuration exception raised in particular by :py:meth:`waflib.Context.Context.fatal`"""
pass pass


class TaskRescan(WafError): class TaskRescan(WafError):
"""task-specific exception type, trigger a signature recomputation"""
"""Task-specific exception type signalling required signature recalculations"""
pass pass


class TaskNotReady(WafError): class TaskNotReady(WafError):
"""task-specific exception type, raised when the task signature cannot be computed"""
"""Task-specific exception type signalling that task signatures cannot be computed"""
pass pass



+ 66
- 32
waflib/Logs.py View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
# encoding: utf-8 # encoding: utf-8
# Thomas Nagy, 2005-2010 (ita)
# Thomas Nagy, 2005-2018 (ita)


""" """
logging, colors, terminal width and pretty-print logging, colors, terminal width and pretty-print
@@ -23,8 +23,15 @@ import logging
LOG_FORMAT = os.environ.get('WAF_LOG_FORMAT', '%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s') LOG_FORMAT = os.environ.get('WAF_LOG_FORMAT', '%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s')
HOUR_FORMAT = os.environ.get('WAF_HOUR_FORMAT', '%H:%M:%S') HOUR_FORMAT = os.environ.get('WAF_HOUR_FORMAT', '%H:%M:%S')


zones = ''
zones = []
"""
See :py:class:`waflib.Logs.log_filter`
"""

verbose = 0 verbose = 0
"""
Global verbosity level, see :py:func:`waflib.Logs.debug` and :py:func:`waflib.Logs.error`
"""


colors_lst = { colors_lst = {
'USE' : True, 'USE' : True,
@@ -49,6 +56,15 @@ except NameError:
unicode = None unicode = None


def enable_colors(use): def enable_colors(use):
"""
If *1* is given, then the system will perform a few verifications
before enabling colors, such as checking whether the interpreter
is running in a terminal. A value of zero will disable colors,
and a value above *1* will force colors.

:param use: whether to enable colors or not
:type use: integer
"""
if use == 1: if use == 1:
if not (sys.stderr.isatty() or sys.stdout.isatty()): if not (sys.stderr.isatty() or sys.stdout.isatty()):
use = 0 use = 0
@@ -74,15 +90,23 @@ except AttributeError:
return 80 return 80


get_term_cols.__doc__ = """ get_term_cols.__doc__ = """
Get the console width in characters.
Returns the console width in characters.


:return: the number of characters per line :return: the number of characters per line
:rtype: int :rtype: int
""" """


def get_color(cl): def get_color(cl):
if not colors_lst['USE']: return ''
return colors_lst.get(cl, '')
"""
Returns the ansi sequence corresponding to the given color name.
An empty string is returned when coloring is globally disabled.

:param cl: color name in capital letters
:type cl: string
"""
if colors_lst['USE']:
return colors_lst.get(cl, '')
return ''


class color_dict(object): class color_dict(object):
"""attribute-based color access, eg: colors.PINK""" """attribute-based color access, eg: colors.PINK"""
@@ -96,7 +120,7 @@ colors = color_dict()
re_log = re.compile(r'(\w+): (.*)', re.M) re_log = re.compile(r'(\w+): (.*)', re.M)
class log_filter(logging.Filter): class log_filter(logging.Filter):
""" """
The waf logs are of the form 'name: message', and can be filtered by 'waf --zones=name'.
Waf logs are of the form 'name: message', and can be filtered by 'waf --zones=name'.
For example, the following:: For example, the following::


from waflib import Logs from waflib import Logs
@@ -106,17 +130,14 @@ class log_filter(logging.Filter):


$ waf --zones=test $ waf --zones=test
""" """
def __init__(self, name=None):
pass
def __init__(self, name=''):
logging.Filter.__init__(self, name)


def filter(self, rec): def filter(self, rec):
""" """
filter a record, adding the colors automatically
Filters log records by zone and by logging level


* error: red
* warning: yellow

:param rec: message to record
:param rec: log entry
""" """
rec.zone = rec.module rec.zone = rec.module
if rec.levelno >= logging.INFO: if rec.levelno >= logging.INFO:
@@ -136,6 +157,9 @@ class log_filter(logging.Filter):
class log_handler(logging.StreamHandler): class log_handler(logging.StreamHandler):
"""Dispatches messages to stderr/stdout depending on the severity level""" """Dispatches messages to stderr/stdout depending on the severity level"""
def emit(self, record): def emit(self, record):
"""
Delegates the functionality to :py:meth:`waflib.Log.log_handler.emit_override`
"""
# default implementation # default implementation
try: try:
try: try:
@@ -153,6 +177,9 @@ class log_handler(logging.StreamHandler):
self.handleError(record) self.handleError(record)


def emit_override(self, record, **kw): def emit_override(self, record, **kw):
"""
Writes the log record to the desired stream (stderr/stdout)
"""
self.terminator = getattr(record, 'terminator', '\n') self.terminator = getattr(record, 'terminator', '\n')
stream = self.stream stream = self.stream
if unicode: if unicode:
@@ -179,7 +206,10 @@ class formatter(logging.Formatter):
logging.Formatter.__init__(self, LOG_FORMAT, HOUR_FORMAT) logging.Formatter.__init__(self, LOG_FORMAT, HOUR_FORMAT)


def format(self, rec): def format(self, rec):
"""Messages in warning, error or info mode are displayed in color by default"""
"""
Formats records and adds colors as needed. The records do not get
a leading hour format if the logging level is above *INFO*.
"""
try: try:
msg = rec.msg.decode('utf-8') msg = rec.msg.decode('utf-8')
except Exception: except Exception:
@@ -204,7 +234,10 @@ class formatter(logging.Formatter):
# and other terminal commands # and other terminal commands
msg = re.sub(r'\r(?!\n)|\x1B\[(K|.*?(m|h|l))', '', msg) msg = re.sub(r'\r(?!\n)|\x1B\[(K|.*?(m|h|l))', '', msg)


if rec.levelno >= logging.INFO: # ??
if rec.levelno >= logging.INFO:
# the goal of this is to format without the leading "Logs, hour" prefix
if rec.args:
return msg % rec.args
return msg return msg


rec.msg = msg rec.msg = msg
@@ -217,19 +250,17 @@ log = None


def debug(*k, **kw): def debug(*k, **kw):
""" """
Wrap logging.debug, the output is filtered for performance reasons
Wraps logging.debug and discards messages if the verbosity level :py:attr:`waflib.Logs.verbose` ≤ 0
""" """
if verbose: if verbose:
k = list(k) k = list(k)
k[0] = k[0].replace('\n', ' ') k[0] = k[0].replace('\n', ' ')
global log
log.debug(*k, **kw) log.debug(*k, **kw)


def error(*k, **kw): def error(*k, **kw):
""" """
Wrap logging.errors, display the origin of the message when '-vv' is set
Wrap logging.errors, adds the stack trace when the verbosity level :py:attr:`waflib.Logs.verbose` ≥ 2
""" """
global log
log.error(*k, **kw) log.error(*k, **kw)
if verbose > 2: if verbose > 2:
st = traceback.extract_stack() st = traceback.extract_stack()
@@ -237,28 +268,27 @@ def error(*k, **kw):
st = st[:-1] st = st[:-1]
buf = [] buf = []
for filename, lineno, name, line in st: for filename, lineno, name, line in st:
buf.append(' File "%s", line %d, in %s' % (filename, lineno, name))
buf.append(' File %r, line %d, in %s' % (filename, lineno, name))
if line: if line:
buf.append(' %s' % line.strip()) buf.append(' %s' % line.strip())
if buf: log.error("\n".join(buf))
if buf:
log.error('\n'.join(buf))


def warn(*k, **kw): def warn(*k, **kw):
""" """
Wrap logging.warn
Wraps logging.warn
""" """
global log
log.warn(*k, **kw) log.warn(*k, **kw)


def info(*k, **kw): def info(*k, **kw):
""" """
Wrap logging.info
Wraps logging.info
""" """
global log
log.info(*k, **kw) log.info(*k, **kw)


def init_log(): def init_log():
""" """
Initialize the loggers globally
Initializes the logger :py:attr:`waflib.Logs.log`
""" """
global log global log
log = logging.getLogger('waflib') log = logging.getLogger('waflib')
@@ -272,7 +302,7 @@ def init_log():


def make_logger(path, name): def make_logger(path, name):
""" """
Create a simple logger, which is often used to redirect the context command output::
Creates a simple logger, which is often used to redirect the context command output::


from waflib import Logs from waflib import Logs
bld.logger = Logs.make_logger('test.log', 'build') bld.logger = Logs.make_logger('test.log', 'build')
@@ -292,7 +322,11 @@ def make_logger(path, name):
:type name: string :type name: string
""" """
logger = logging.getLogger(name) logger = logging.getLogger(name)
hdlr = logging.FileHandler(path, 'w')
if sys.hexversion > 0x3000000:
encoding = sys.stdout.encoding
else:
encoding = None
hdlr = logging.FileHandler(path, 'w', encoding=encoding)
formatter = logging.Formatter('%(message)s') formatter = logging.Formatter('%(message)s')
hdlr.setFormatter(formatter) hdlr.setFormatter(formatter)
logger.addHandler(hdlr) logger.addHandler(hdlr)
@@ -301,7 +335,7 @@ def make_logger(path, name):


def make_mem_logger(name, to_log, size=8192): def make_mem_logger(name, to_log, size=8192):
""" """
Create a memory logger to avoid writing concurrently to the main logger
Creates a memory logger to avoid writing concurrently to the main logger
""" """
from logging.handlers import MemoryHandler from logging.handlers import MemoryHandler
logger = logging.getLogger(name) logger = logging.getLogger(name)
@@ -315,7 +349,7 @@ def make_mem_logger(name, to_log, size=8192):


def free_logger(logger): def free_logger(logger):
""" """
Free the resources held by the loggers created through make_logger or make_mem_logger.
Frees the resources held by the loggers created through make_logger or make_mem_logger.
This is used for file cleanup and for handler removal (logger objects are re-used). This is used for file cleanup and for handler removal (logger objects are re-used).
""" """
try: try:
@@ -327,7 +361,7 @@ def free_logger(logger):


def pprint(col, msg, label='', sep='\n'): def pprint(col, msg, label='', sep='\n'):
""" """
Print messages in color immediately on stderr::
Prints messages in color immediately on stderr::


from waflib import Logs from waflib import Logs
Logs.pprint('RED', 'Something bad just happened') Logs.pprint('RED', 'Something bad just happened')
@@ -341,5 +375,5 @@ def pprint(col, msg, label='', sep='\n'):
:param sep: a string to append at the end (line separator) :param sep: a string to append at the end (line separator)
:type sep: string :type sep: string
""" """
info("%s%s%s %s" % (colors(col), msg, colors.NORMAL, label), extra={'terminator':sep})
info('%s%s%s %s', colors(col), msg, colors.NORMAL, label, extra={'terminator':sep})



+ 376
- 254
waflib/Node.py
File diff suppressed because it is too large
View File


+ 116
- 47
waflib/Options.py View File

@@ -1,66 +1,75 @@
#!/usr/bin/env python #!/usr/bin/env python
# encoding: utf-8 # encoding: utf-8
# Scott Newton, 2005 (scottn) # Scott Newton, 2005 (scottn)
# Thomas Nagy, 2006-2010 (ita)
# Thomas Nagy, 2006-2018 (ita)


""" """
Support for waf command-line options Support for waf command-line options


Provides default command-line options,
as well as custom ones, used by the ``options`` wscript function.

Provides default and command-line options, as well the command
that reads the ``options`` wscript function.
""" """


import os, tempfile, optparse, sys, re import os, tempfile, optparse, sys, re
from waflib import Logs, Utils, Context
from waflib import Logs, Utils, Context, Errors


cmds = 'distclean configure build install clean uninstall check dist distcheck'.split()
options = optparse.Values()
""" """
Constant representing the default waf commands displayed in::

$ waf --help

"""

options = {}
"""
A dictionary representing the command-line options::
A global dictionary representing user-provided command-line options::


$ waf --foo=bar $ waf --foo=bar

""" """


commands = [] commands = []
""" """
List of commands to execute extracted from the command-line. This list is consumed during the execution, see :py:func:`waflib.Scripting.run_commands`.
List of commands to execute extracted from the command-line. This list
is consumed during the execution by :py:func:`waflib.Scripting.run_commands`.
""" """


envvars = [] envvars = []
""" """
List of environment variable declarations placed after the Waf executable name. List of environment variable declarations placed after the Waf executable name.
These are detected by searching for "=" in the rest arguments.
These are detected by searching for "=" in the remaining arguments.
You probably do not want to use this.
""" """


lockfile = os.environ.get('WAFLOCK', '.lock-waf_%s_build' % sys.platform) lockfile = os.environ.get('WAFLOCK', '.lock-waf_%s_build' % sys.platform)
platform = Utils.unversioned_sys_platform()

"""
Name of the lock file that marks a project as configured
"""


class opt_parser(optparse.OptionParser): class opt_parser(optparse.OptionParser):
""" """
Command-line options parser. Command-line options parser.
""" """
def __init__(self, ctx):
optparse.OptionParser.__init__(self, conflict_handler="resolve", version='waf %s (%s)' % (Context.WAFVERSION, Context.WAFREVISION))
def __init__(self, ctx, allow_unknown=False):
optparse.OptionParser.__init__(self, conflict_handler='resolve', add_help_option=False,
version='waf %s (%s)' % (Context.WAFVERSION, Context.WAFREVISION))
self.formatter.width = Logs.get_term_cols() self.formatter.width = Logs.get_term_cols()
self.ctx = ctx self.ctx = ctx
self.allow_unknown = allow_unknown

def _process_args(self, largs, rargs, values):
"""
Custom _process_args to allow unknown options according to the allow_unknown status
"""
while rargs:
try:
optparse.OptionParser._process_args(self,largs,rargs,values)
except (optparse.BadOptionError, optparse.AmbiguousOptionError) as e:
if self.allow_unknown:
largs.append(e.opt_str)
else:
self.error(str(e))


def print_usage(self, file=None): def print_usage(self, file=None):
return self.print_help(file) return self.print_help(file)


def get_usage(self): def get_usage(self):
""" """
Return the message to print on ``waf --help``
Builds the message to print on ``waf --help``

:rtype: string
""" """
cmds_str = {} cmds_str = {}
for cls in Context.classes: for cls in Context.classes:
@@ -96,10 +105,9 @@ Main commands (example: ./waf build -j4)


class OptionsContext(Context.Context): class OptionsContext(Context.Context):
""" """
Collect custom options from wscript files and parses the command line.
Set the global :py:const:`waflib.Options.commands` and :py:const:`waflib.Options.options` values.
Collects custom options from wscript files and parses the command line.
Sets the global :py:const:`waflib.Options.commands` and :py:const:`waflib.Options.options` values.
""" """

cmd = 'options' cmd = 'options'
fun = 'options' fun = 'options'


@@ -114,11 +122,18 @@ class OptionsContext(Context.Context):
jobs = self.jobs() jobs = self.jobs()
p = self.add_option p = self.add_option
color = os.environ.get('NOCOLOR', '') and 'no' or 'auto' color = os.environ.get('NOCOLOR', '') and 'no' or 'auto'
if os.environ.get('CLICOLOR', '') == '0':
color = 'no'
elif os.environ.get('CLICOLOR_FORCE', '') == '1':
color = 'yes'
p('-c', '--color', dest='colors', default=color, action='store', help='whether to use colors (yes/no/auto) [default: auto]', choices=('yes', 'no', 'auto')) p('-c', '--color', dest='colors', default=color, action='store', help='whether to use colors (yes/no/auto) [default: auto]', choices=('yes', 'no', 'auto'))
p('-j', '--jobs', dest='jobs', default=jobs, type='int', help='amount of parallel jobs (%r)' % jobs)
p('-j', '--jobs', dest='jobs', default=jobs, type='int', help='amount of parallel jobs (%r)' % jobs)
p('-k', '--keep', dest='keep', default=0, action='count', help='continue despite errors (-kk to try harder)') p('-k', '--keep', dest='keep', default=0, action='count', help='continue despite errors (-kk to try harder)')
p('-v', '--verbose', dest='verbose', default=0, action='count', help='verbosity level -v -vv or -vvv [default: 0]') p('-v', '--verbose', dest='verbose', default=0, action='count', help='verbosity level -v -vv or -vvv [default: 0]')
p('--zones', dest='zones', default='', action='store', help='debugging zones (task_gen, deps, tasks, etc)') p('--zones', dest='zones', default='', action='store', help='debugging zones (task_gen, deps, tasks, etc)')
p('--profile', dest='profile', default=0, action='store_true', help=optparse.SUPPRESS_HELP)
p('--pdb', dest='pdb', default=0, action='store_true', help=optparse.SUPPRESS_HELP)
p('-h', '--help', dest='whelp', default=0, action='store_true', help="show this help message and exit")


gr = self.add_option_group('Configuration options') gr = self.add_option_group('Configuration options')
self.option_groups['configure options'] = gr self.option_groups['configure options'] = gr
@@ -132,7 +147,7 @@ class OptionsContext(Context.Context):


default_prefix = getattr(Context.g_module, 'default_prefix', os.environ.get('PREFIX')) default_prefix = getattr(Context.g_module, 'default_prefix', os.environ.get('PREFIX'))
if not default_prefix: if not default_prefix:
if platform == 'win32':
if Utils.unversioned_sys_platform() == 'win32':
d = tempfile.gettempdir() d = tempfile.gettempdir()
default_prefix = d[0].upper() + d[1:] default_prefix = d[0].upper() + d[1:]
# win32 preserves the case, but gettempdir does not # win32 preserves the case, but gettempdir does not
@@ -161,8 +176,8 @@ class OptionsContext(Context.Context):


def jobs(self): def jobs(self):
""" """
Find the amount of cpu cores to set the default amount of tasks executed in parallel. At
runtime the options can be obtained from :py:const:`waflib.Options.options` ::
Finds the optimal amount of cpu cores to use for parallel jobs.
At runtime the options can be obtained from :py:const:`waflib.Options.options` ::


from waflib.Options import options from waflib.Options import options
njobs = options.jobs njobs = options.jobs
@@ -185,7 +200,7 @@ class OptionsContext(Context.Context):
if not count and os.name not in ('nt', 'java'): if not count and os.name not in ('nt', 'java'):
try: try:
tmp = self.cmd_and_log(['sysctl', '-n', 'hw.ncpu'], quiet=0) tmp = self.cmd_and_log(['sysctl', '-n', 'hw.ncpu'], quiet=0)
except Exception:
except Errors.WafError:
pass pass
else: else:
if re.match('^[0-9]+$', tmp): if re.match('^[0-9]+$', tmp):
@@ -198,21 +213,25 @@ class OptionsContext(Context.Context):


def add_option(self, *k, **kw): def add_option(self, *k, **kw):
""" """
Wrapper for optparse.add_option::
Wraps ``optparse.add_option``::


def options(ctx): def options(ctx):
ctx.add_option('-u', '--use', dest='use', default=False, action='store_true',
help='a boolean option')
ctx.add_option('-u', '--use', dest='use', default=False,
action='store_true', help='a boolean option')

:rtype: optparse option object
""" """
return self.parser.add_option(*k, **kw) return self.parser.add_option(*k, **kw)


def add_option_group(self, *k, **kw): def add_option_group(self, *k, **kw):
""" """
Wrapper for optparse.add_option_group::
Wraps ``optparse.add_option_group``::


def options(ctx): def options(ctx):
gr = ctx.add_option_group('some options') gr = ctx.add_option_group('some options')
gr.add_option('-u', '--use', dest='use', default=False, action='store_true') gr.add_option('-u', '--use', dest='use', default=False, action='store_true')

:rtype: optparse option group object
""" """
try: try:
gr = self.option_groups[k[0]] gr = self.option_groups[k[0]]
@@ -223,13 +242,14 @@ class OptionsContext(Context.Context):


def get_option_group(self, opt_str): def get_option_group(self, opt_str):
""" """
Wrapper for optparse.get_option_group::
Wraps ``optparse.get_option_group``::


def options(ctx): def options(ctx):
gr = ctx.get_option_group('configure options') gr = ctx.get_option_group('configure options')
gr.add_option('-o', '--out', action='store', default='', gr.add_option('-o', '--out', action='store', default='',
help='build dir for the project', dest='out') help='build dir for the project', dest='out')


:rtype: optparse option group object
""" """
try: try:
return self.option_groups[opt_str] return self.option_groups[opt_str]
@@ -239,35 +259,84 @@ class OptionsContext(Context.Context):
return group return group
return None return None


def parse_args(self, _args=None):
"""
Parse arguments from a list (not bound to the command-line).
def sanitize_path(self, path, cwd=None):
if not cwd:
cwd = Context.launch_dir
p = os.path.expanduser(path)
p = os.path.join(cwd, p)
p = os.path.normpath(p)
p = os.path.abspath(p)
return p


:param _args: arguments
:type _args: list of strings
def parse_cmd_args(self, _args=None, cwd=None, allow_unknown=False):
"""
Just parse the arguments
""" """
global options, commands, envvars
self.parser.allow_unknown = allow_unknown
(options, leftover_args) = self.parser.parse_args(args=_args) (options, leftover_args) = self.parser.parse_args(args=_args)

envvars = []
commands = []
for arg in leftover_args: for arg in leftover_args:
if '=' in arg: if '=' in arg:
envvars.append(arg) envvars.append(arg)
else:
elif arg != 'options':
commands.append(arg) commands.append(arg)


if options.destdir:
options.destdir = Utils.sane_path(options.destdir)
for name in 'top out destdir prefix bindir libdir'.split():
# those paths are usually expanded from Context.launch_dir
if getattr(options, name, None):
path = self.sanitize_path(getattr(options, name), cwd)
setattr(options, name, path)
return options, commands, envvars

def init_module_vars(self, arg_options, arg_commands, arg_envvars):
options.__dict__.clear()
del commands[:]
del envvars[:]

options.__dict__.update(arg_options.__dict__)
commands.extend(arg_commands)
envvars.extend(arg_envvars)


for var in envvars:
(name, value) = var.split('=', 1)
os.environ[name.strip()] = value

def init_logs(self, options, commands, envvars):
Logs.verbose = options.verbose
if options.verbose >= 1: if options.verbose >= 1:
self.load('errcheck') self.load('errcheck')


colors = {'yes' : 2, 'auto' : 1, 'no' : 0}[options.colors] colors = {'yes' : 2, 'auto' : 1, 'no' : 0}[options.colors]
Logs.enable_colors(colors) Logs.enable_colors(colors)


if options.zones:
Logs.zones = options.zones.split(',')
if not Logs.verbose:
Logs.verbose = 1
elif Logs.verbose > 0:
Logs.zones = ['runner']
if Logs.verbose > 2:
Logs.zones = ['*']

def parse_args(self, _args=None):
"""
Parses arguments from a list which is not necessarily the command-line.
Initializes the module variables options, commands and envvars
If help is requested, prints it and exit the application

:param _args: arguments
:type _args: list of strings
"""
options, commands, envvars = self.parse_cmd_args()
self.init_logs(options, commands, envvars)
self.init_module_vars(options, commands, envvars)

def execute(self): def execute(self):
""" """
See :py:func:`waflib.Context.Context.execute` See :py:func:`waflib.Context.Context.execute`
""" """
super(OptionsContext, self).execute() super(OptionsContext, self).execute()
self.parse_args() self.parse_args()
Utils.alloc_process_pool(options.jobs)



+ 407
- 169
waflib/Runner.py View File

@@ -1,98 +1,125 @@
#!/usr/bin/env python #!/usr/bin/env python
# encoding: utf-8 # encoding: utf-8
# Thomas Nagy, 2005-2010 (ita)
# Thomas Nagy, 2005-2018 (ita)


""" """
Runner.py: Task scheduling and execution Runner.py: Task scheduling and execution

""" """


import random, atexit
import heapq, traceback
try: try:
from queue import Queue
from queue import Queue, PriorityQueue
except ImportError: except ImportError:
from Queue import Queue from Queue import Queue
try:
from Queue import PriorityQueue
except ImportError:
class PriorityQueue(Queue):
def _init(self, maxsize):
self.maxsize = maxsize
self.queue = []
def _put(self, item):
heapq.heappush(self.queue, item)
def _get(self):
return heapq.heappop(self.queue)

from waflib import Utils, Task, Errors, Logs from waflib import Utils, Task, Errors, Logs


GAP = 10
GAP = 5
""" """
Wait for free tasks if there are at least ``GAP * njobs`` in queue
Wait for at least ``GAP * njobs`` before trying to enqueue more tasks to run
""" """


class TaskConsumer(Utils.threading.Thread):
"""
Task consumers belong to a pool of workers
class PriorityTasks(object):
def __init__(self):
self.lst = []
def __len__(self):
return len(self.lst)
def __iter__(self):
return iter(self.lst)
def clear(self):
self.lst = []
def append(self, task):
heapq.heappush(self.lst, task)
def appendleft(self, task):
"Deprecated, do not use"
heapq.heappush(self.lst, task)
def pop(self):
return heapq.heappop(self.lst)
def extend(self, lst):
if self.lst:
for x in lst:
self.append(x)
else:
if isinstance(lst, list):
self.lst = lst
heapq.heapify(lst)
else:
self.lst = lst.lst


They wait for tasks in the queue and then use ``task.process(...)``
class Consumer(Utils.threading.Thread):
""" """
def __init__(self):
Daemon thread object that executes a task. It shares a semaphore with
the coordinator :py:class:`waflib.Runner.Spawner`. There is one
instance per task to consume.
"""
def __init__(self, spawner, task):
Utils.threading.Thread.__init__(self) Utils.threading.Thread.__init__(self)
self.ready = Queue()
self.task = task
"""Task to execute"""
self.spawner = spawner
"""Coordinator object"""
self.setDaemon(1)
self.start()
def run(self):
""" """
Obtain :py:class:`waflib.Task.TaskBase` instances from this queue.
Processes a single task
""" """
try:
if not self.spawner.master.stop:
self.spawner.master.process_task(self.task)
finally:
self.spawner.sem.release()
self.spawner.master.out.put(self.task)
self.task = None
self.spawner = None

class Spawner(Utils.threading.Thread):
"""
Daemon thread that consumes tasks from :py:class:`waflib.Runner.Parallel` producer and
spawns a consuming thread :py:class:`waflib.Runner.Consumer` for each
:py:class:`waflib.Task.Task` instance.
"""
def __init__(self, master):
Utils.threading.Thread.__init__(self)
self.master = master
""":py:class:`waflib.Runner.Parallel` producer instance"""
self.sem = Utils.threading.Semaphore(master.numjobs)
"""Bounded semaphore that prevents spawning more than *n* concurrent consumers"""
self.setDaemon(1) self.setDaemon(1)
self.start() self.start()

def run(self): def run(self):
""" """
Loop over the tasks to execute
Spawns new consumers to execute tasks by delegating to :py:meth:`waflib.Runner.Spawner.loop`
""" """
try: try:
self.loop() self.loop()
except Exception: except Exception:
# Python 2 prints unnecessary messages when shutting down
# we also want to stop the thread properly
pass pass

def loop(self): def loop(self):
""" """
Obtain tasks from :py:attr:`waflib.Runner.TaskConsumer.ready` and call
:py:meth:`waflib.Task.TaskBase.process`. If the object is a function, execute it.
Consumes task objects from the producer; ends when the producer has no more
task to provide.
""" """
master = self.master
while 1: while 1:
tsk = self.ready.get()
if not isinstance(tsk, Task.TaskBase):
tsk(self)
else:
tsk.process()

pool = Queue()
"""
Pool of task consumer objects
"""

def get_pool():
"""
Obtain a task consumer from :py:attr:`waflib.Runner.pool`.
Do not forget to put it back by using :py:func:`waflib.Runner.put_pool`
and reset properly (original waiting queue).

:rtype: :py:class:`waflib.Runner.TaskConsumer`
"""
try:
return pool.get(False)
except Exception:
return TaskConsumer()

def put_pool(x):
"""
Return a task consumer to the thread pool :py:attr:`waflib.Runner.pool`

:param x: task consumer object
:type x: :py:class:`waflib.Runner.TaskConsumer`
"""
pool.put(x)

def _free_resources():
global pool
lst = []
while pool.qsize():
lst.append(pool.get())
for x in lst:
x.ready.put(None)
for x in lst:
x.join()
pool = None
atexit.register(_free_resources)
task = master.ready.get()
self.sem.acquire()
if not master.stop:
task.log_display(task.generator.bld)
Consumer(self, task)


class Parallel(object): class Parallel(object):
""" """
@@ -106,7 +133,7 @@ class Parallel(object):


self.numjobs = j self.numjobs = j
""" """
Number of consumers in the pool
Amount of parallel consumers to use
""" """


self.bld = bld self.bld = bld
@@ -114,19 +141,25 @@ class Parallel(object):
Instance of :py:class:`waflib.Build.BuildContext` Instance of :py:class:`waflib.Build.BuildContext`
""" """


self.outstanding = []
"""List of :py:class:`waflib.Task.TaskBase` that may be ready to be executed"""
self.outstanding = PriorityTasks()
"""Heap of :py:class:`waflib.Task.Task` that may be ready to be executed"""

self.postponed = PriorityTasks()
"""Heap of :py:class:`waflib.Task.Task` which are not ready to run for non-DAG reasons"""


self.frozen = []
"""List of :py:class:`waflib.Task.TaskBase` that cannot be executed immediately"""
self.incomplete = set()
"""List of :py:class:`waflib.Task.Task` waiting for dependent tasks to complete (DAG)"""

self.ready = PriorityQueue(0)
"""List of :py:class:`waflib.Task.Task` ready to be executed by consumers"""


self.out = Queue(0) self.out = Queue(0)
"""List of :py:class:`waflib.Task.TaskBase` returned by the task consumers"""
"""List of :py:class:`waflib.Task.Task` returned by the task consumers"""


self.count = 0 self.count = 0
"""Amount of tasks that may be processed by :py:class:`waflib.Runner.TaskConsumer`""" """Amount of tasks that may be processed by :py:class:`waflib.Runner.TaskConsumer`"""


self.processed = 1
self.processed = 0
"""Amount of tasks processed""" """Amount of tasks processed"""


self.stop = False self.stop = False
@@ -139,33 +172,44 @@ class Parallel(object):
"""Task iterator which must give groups of parallelizable tasks when calling ``next()``""" """Task iterator which must give groups of parallelizable tasks when calling ``next()``"""


self.dirty = False self.dirty = False
"""Flag to indicate that tasks have been executed, and that the build cache must be saved (call :py:meth:`waflib.Build.BuildContext.store`)"""
"""
Flag that indicates that the build cache must be saved when a task was executed
(calls :py:meth:`waflib.Build.BuildContext.store`)"""

self.revdeps = Utils.defaultdict(set)
"""
The reverse dependency graph of dependencies obtained from Task.run_after
"""

self.spawner = Spawner(self)
"""
Coordinating daemon thread that spawns thread consumers
"""


def get_next_task(self): def get_next_task(self):
""" """
Obtain the next task to execute.
Obtains the next Task instance to run


:rtype: :py:class:`waflib.Task.TaskBase`
:rtype: :py:class:`waflib.Task.Task`
""" """
if not self.outstanding: if not self.outstanding:
return None return None
return self.outstanding.pop(0)
return self.outstanding.pop()


def postpone(self, tsk): def postpone(self, tsk):
""" """
A task cannot be executed at this point, put it in the list :py:attr:`waflib.Runner.Parallel.frozen`.
Adds the task to the list :py:attr:`waflib.Runner.Parallel.postponed`.
The order is scrambled so as to consume as many tasks in parallel as possible.


:param tsk: task
:type tsk: :py:class:`waflib.Task.TaskBase`
:param tsk: task instance
:type tsk: :py:class:`waflib.Task.Task`
""" """
if random.randint(0, 1):
self.frozen.insert(0, tsk)
else:
self.frozen.append(tsk)
self.postponed.append(tsk)


def refill_task_list(self): def refill_task_list(self):
""" """
Put the next group of tasks to execute in :py:attr:`waflib.Runner.Parallel.outstanding`.
Pulls a next group of tasks to execute in :py:attr:`waflib.Runner.Parallel.outstanding`.
Ensures that all tasks in the current build group are complete before processing the next one.
""" """
while self.count > self.numjobs * GAP: while self.count > self.numjobs * GAP:
self.get_out() self.get_out()
@@ -173,132 +217,224 @@ class Parallel(object):
while not self.outstanding: while not self.outstanding:
if self.count: if self.count:
self.get_out() self.get_out()
elif self.frozen:
if self.outstanding:
break
elif self.postponed:
try: try:
cond = self.deadlock == self.processed cond = self.deadlock == self.processed
except AttributeError: except AttributeError:
pass pass
else: else:
if cond: if cond:
msg = 'check the build order for the tasks'
for tsk in self.frozen:
if not tsk.run_after:
msg = 'check the methods runnable_status'
break
# The most common reason is conflicting build order declaration
# for example: "X run_after Y" and "Y run_after X"
# Another can be changing "run_after" dependencies while the build is running
# for example: updating "tsk.run_after" in the "runnable_status" method
lst = [] lst = []
for tsk in self.frozen:
lst.append('%s\t-> %r' % (repr(tsk), [id(x) for x in tsk.run_after]))
raise Errors.WafError('Deadlock detected: %s%s' % (msg, ''.join(lst)))
for tsk in self.postponed:
deps = [id(x) for x in tsk.run_after if not x.hasrun]
lst.append('%s\t-> %r' % (repr(tsk), deps))
if not deps:
lst.append('\n task %r dependencies are done, check its *runnable_status*?' % id(tsk))
raise Errors.WafError('Deadlock detected: check the task build order%s' % ''.join(lst))
self.deadlock = self.processed self.deadlock = self.processed


if self.frozen:
self.outstanding += self.frozen
self.frozen = []
if self.postponed:
self.outstanding.extend(self.postponed)
self.postponed.clear()
elif not self.count: elif not self.count:
self.outstanding.extend(next(self.biter))
self.total = self.bld.total()
break
if self.incomplete:
for x in self.incomplete:
for k in x.run_after:
if not k.hasrun:
break
else:
# dependency added after the build started without updating revdeps
self.incomplete.remove(x)
self.outstanding.append(x)
break
else:
raise Errors.WafError('Broken revdeps detected on %r' % self.incomplete)
else:
tasks = next(self.biter)
ready, waiting = self.prio_and_split(tasks)
self.outstanding.extend(ready)
self.incomplete.update(waiting)
self.total = self.bld.total()
break


def add_more_tasks(self, tsk): def add_more_tasks(self, tsk):
""" """
Tasks may be added dynamically during the build by binding them to the task :py:attr:`waflib.Task.TaskBase.more_tasks`
If a task provides :py:attr:`waflib.Task.Task.more_tasks`, then the tasks contained
in that list are added to the current build and will be processed before the next build group.


:param tsk: task
:type tsk: :py:attr:`waflib.Task.TaskBase`
The priorities for dependent tasks are not re-calculated globally

:param tsk: task instance
:type tsk: :py:attr:`waflib.Task.Task`
""" """
if getattr(tsk, 'more_tasks', None): if getattr(tsk, 'more_tasks', None):
self.outstanding += tsk.more_tasks
more = set(tsk.more_tasks)
groups_done = set()
def iteri(a, b):
for x in a:
yield x
for x in b:
yield x

# Update the dependency tree
# this assumes that task.run_after values were updated
for x in iteri(self.outstanding, self.incomplete):
for k in x.run_after:
if isinstance(k, Task.TaskGroup):
if k not in groups_done:
groups_done.add(k)
for j in k.prev & more:
self.revdeps[j].add(k)
elif k in more:
self.revdeps[k].add(x)

ready, waiting = self.prio_and_split(tsk.more_tasks)
self.outstanding.extend(ready)
self.incomplete.update(waiting)
self.total += len(tsk.more_tasks) self.total += len(tsk.more_tasks)


def mark_finished(self, tsk):
def try_unfreeze(x):
# DAG ancestors are likely to be in the incomplete set
# This assumes that the run_after contents have not changed
# after the build starts, else a deadlock may occur
if x in self.incomplete:
# TODO remove dependencies to free some memory?
# x.run_after.remove(tsk)
for k in x.run_after:
if not k.hasrun:
break
else:
self.incomplete.remove(x)
self.outstanding.append(x)

if tsk in self.revdeps:
for x in self.revdeps[tsk]:
if isinstance(x, Task.TaskGroup):
x.prev.remove(tsk)
if not x.prev:
for k in x.next:
# TODO necessary optimization?
k.run_after.remove(x)
try_unfreeze(k)
# TODO necessary optimization?
x.next = []
else:
try_unfreeze(x)
del self.revdeps[tsk]

if hasattr(tsk, 'semaphore'):
sem = tsk.semaphore
sem.release(tsk)
while sem.waiting and not sem.is_locked():
# take a frozen task, make it ready to run
x = sem.waiting.pop()
self._add_task(x)

def get_out(self): def get_out(self):
""" """
Obtain one task returned from the task consumers, and update the task count. Add more tasks if necessary through
:py:attr:`waflib.Runner.Parallel.add_more_tasks`.
Waits for a Task that task consumers add to :py:attr:`waflib.Runner.Parallel.out` after execution.
Adds more Tasks if necessary through :py:attr:`waflib.Runner.Parallel.add_more_tasks`.


:rtype: :py:attr:`waflib.Task.TaskBase`
:rtype: :py:attr:`waflib.Task.Task`
""" """
tsk = self.out.get() tsk = self.out.get()
if not self.stop: if not self.stop:
self.add_more_tasks(tsk) self.add_more_tasks(tsk)
self.mark_finished(tsk)

self.count -= 1 self.count -= 1
self.dirty = True self.dirty = True
return tsk return tsk


def add_task(self, tsk): def add_task(self, tsk):
""" """
Pass a task to a consumer.
Enqueue a Task to :py:attr:`waflib.Runner.Parallel.ready` so that consumers can run them.


:param tsk: task
:type tsk: :py:attr:`waflib.Task.TaskBase`
:param tsk: task instance
:type tsk: :py:attr:`waflib.Task.Task`
""" """
try:
self.pool
except AttributeError:
self.init_task_pool()
# TODO change in waf 2.1
self.ready.put(tsk) self.ready.put(tsk)


def init_task_pool(self):
# lazy creation, and set a common pool for all task consumers
pool = self.pool = [get_pool() for i in range(self.numjobs)]
self.ready = Queue(0)
def setq(consumer):
consumer.ready = self.ready
for x in pool:
x.ready.put(setq)
return pool

def free_task_pool(self):
# return the consumers, setting a different queue for each of them
def setq(consumer):
consumer.ready = Queue(0)
self.out.put(self)
try:
pool = self.pool
except AttributeError:
pass
def _add_task(self, tsk):
if hasattr(tsk, 'semaphore'):
sem = tsk.semaphore
try:
sem.acquire(tsk)
except IndexError:
sem.waiting.add(tsk)
return

self.count += 1
self.processed += 1
if self.numjobs == 1:
tsk.log_display(tsk.generator.bld)
try:
self.process_task(tsk)
finally:
self.out.put(tsk)
else: else:
for x in pool:
self.ready.put(setq)
for x in pool:
self.get_out()
for x in pool:
put_pool(x)
self.pool = []
self.add_task(tsk)

def process_task(self, tsk):
"""
Processes a task and attempts to stop the build in case of errors
"""
tsk.process()
if tsk.hasrun != Task.SUCCESS:
self.error_handler(tsk)


def skip(self, tsk): def skip(self, tsk):
"""
Mark a task as skipped/up-to-date
"""
tsk.hasrun = Task.SKIPPED tsk.hasrun = Task.SKIPPED
self.mark_finished(tsk)

def cancel(self, tsk):
"""
Mark a task as failed because of unsatisfiable dependencies
"""
tsk.hasrun = Task.CANCELED
self.mark_finished(tsk)


def error_handler(self, tsk): def error_handler(self, tsk):
""" """
Called when a task cannot be executed. The flag :py:attr:`waflib.Runner.Parallel.stop` is set, unless
the build is executed with::
Called when a task cannot be executed. The flag :py:attr:`waflib.Runner.Parallel.stop` is set,
unless the build is executed with::


$ waf build -k $ waf build -k


:param tsk: task
:type tsk: :py:attr:`waflib.Task.TaskBase`
:param tsk: task instance
:type tsk: :py:attr:`waflib.Task.Task`
""" """
if hasattr(tsk, 'scan') and hasattr(tsk, 'uid'):
# TODO waf 1.9 - this breaks encapsulation
key = (tsk.uid(), 'imp')
try:
del self.bld.task_sigs[key]
except KeyError:
pass
if not self.bld.keep: if not self.bld.keep:
self.stop = True self.stop = True
self.error.append(tsk) self.error.append(tsk)


def task_status(self, tsk): def task_status(self, tsk):
"""
Obtains the task status to decide whether to run it immediately or not.

:return: the exit status, for example :py:attr:`waflib.Task.ASK_LATER`
:rtype: integer
"""
try: try:
return tsk.runnable_status() return tsk.runnable_status()
except Exception: except Exception:
self.processed += 1 self.processed += 1
tsk.err_msg = Utils.ex_stack()
tsk.err_msg = traceback.format_exc()
if not self.stop and self.bld.keep: if not self.stop and self.bld.keep:
self.skip(tsk) self.skip(tsk)
if self.bld.keep == 1: if self.bld.keep == 1:
# if -k stop at the first exception, if -kk try to go as far as possible
# if -k stop on the first exception, if -kk try to go as far as possible
if Logs.verbose > 1 or not self.error: if Logs.verbose > 1 or not self.error:
self.error.append(tsk) self.error.append(tsk)
self.stop = True self.stop = True
@@ -306,17 +442,20 @@ class Parallel(object):
if Logs.verbose > 1: if Logs.verbose > 1:
self.error.append(tsk) self.error.append(tsk)
return Task.EXCEPTION return Task.EXCEPTION
tsk.hasrun = Task.EXCEPTION


tsk.hasrun = Task.EXCEPTION
self.error_handler(tsk) self.error_handler(tsk)

return Task.EXCEPTION return Task.EXCEPTION


def start(self): def start(self):
""" """
Give tasks to :py:class:`waflib.Runner.TaskConsumer` instances until the build finishes or the ``stop`` flag is set.
If only one job is used, then execute the tasks one by one, without consumers.
Obtains Task instances from the BuildContext instance and adds the ones that need to be executed to
:py:class:`waflib.Runner.Parallel.ready` so that the :py:class:`waflib.Runner.Spawner` consumer thread
has them executed. Obtains the executed Tasks back from :py:class:`waflib.Runner.Parallel.out`
and marks the build as failed by setting the ``stop`` flag.
If only one job is used, then executes the tasks one by one, without consumers.
""" """

self.total = self.bld.total() self.total = self.bld.total()


while not self.stop: while not self.stop:
@@ -338,36 +477,135 @@ class Parallel(object):
self.processed += 1 self.processed += 1
continue continue


if self.stop: # stop immediately after a failure was detected
if self.stop: # stop immediately after a failure is detected
break break



st = self.task_status(tsk) st = self.task_status(tsk)
if st == Task.RUN_ME: if st == Task.RUN_ME:
tsk.position = (self.processed, self.total)
self.count += 1
tsk.master = self
self.processed += 1

if self.numjobs == 1:
tsk.process()
else:
self.add_task(tsk)
if st == Task.ASK_LATER:
self._add_task(tsk)
elif st == Task.ASK_LATER:
self.postpone(tsk) self.postpone(tsk)
elif st == Task.SKIP_ME: elif st == Task.SKIP_ME:
self.processed += 1 self.processed += 1
self.skip(tsk) self.skip(tsk)
self.add_more_tasks(tsk) self.add_more_tasks(tsk)
elif st == Task.CANCEL_ME:
# A dependency problem has occurred, and the
# build is most likely run with `waf -k`
if Logs.verbose > 1:
self.error.append(tsk)
self.processed += 1
self.cancel(tsk)


# self.count represents the tasks that have been made available to the consumer threads # self.count represents the tasks that have been made available to the consumer threads
# collect all the tasks after an error else the message may be incomplete # collect all the tasks after an error else the message may be incomplete
while self.error and self.count: while self.error and self.count:
self.get_out() self.get_out()


#print loop
assert (self.count == 0 or self.stop)
self.ready.put(None)
if not self.stop:
assert not self.count
assert not self.postponed
assert not self.incomplete

def prio_and_split(self, tasks):
"""
Label input tasks with priority values, and return a pair containing
the tasks that are ready to run and the tasks that are necessarily
waiting for other tasks to complete.

The priority system is really meant as an optional layer for optimization:
dependency cycles are found quickly, and builds should be more efficient.
A high priority number means that a task is processed first.

This method can be overridden to disable the priority system::

def prio_and_split(self, tasks):
return tasks, []


# free the task pool, if any
self.free_task_pool()
:return: A pair of task lists
:rtype: tuple
"""
# to disable:
#return tasks, []
for x in tasks:
x.visited = 0

reverse = self.revdeps

groups_done = set()
for x in tasks:
for k in x.run_after:
if isinstance(k, Task.TaskGroup):
if k not in groups_done:
groups_done.add(k)
for j in k.prev:
reverse[j].add(k)
else:
reverse[k].add(x)

# the priority number is not the tree depth
def visit(n):
if isinstance(n, Task.TaskGroup):
return sum(visit(k) for k in n.next)

if n.visited == 0:
n.visited = 1

if n in reverse:
rev = reverse[n]
n.prio_order = n.tree_weight + len(rev) + sum(visit(k) for k in rev)
else:
n.prio_order = n.tree_weight

n.visited = 2
elif n.visited == 1:
raise Errors.WafError('Dependency cycle found!')
return n.prio_order

for x in tasks:
if x.visited != 0:
# must visit all to detect cycles
continue
try:
visit(x)
except Errors.WafError:
self.debug_cycles(tasks, reverse)

ready = []
waiting = []
for x in tasks:
for k in x.run_after:
if not k.hasrun:
waiting.append(x)
break
else:
ready.append(x)
return (ready, waiting)

def debug_cycles(self, tasks, reverse):
tmp = {}
for x in tasks:
tmp[x] = 0

def visit(n, acc):
if isinstance(n, Task.TaskGroup):
for k in n.next:
visit(k, acc)
return
if tmp[n] == 0:
tmp[n] = 1
for k in reverse.get(n, []):
visit(k, [n] + acc)
tmp[n] = 2
elif tmp[n] == 1:
lst = []
for tsk in acc:
lst.append(repr(tsk))
if tsk is n:
# exclude prior nodes, we want the minimum cycle
break
raise Errors.WafError('Task dependency cycle in "run_after" constraints: %s' % ''.join(lst))
for x in tasks:
visit(x, [])



+ 190
- 212
waflib/Scripting.py View File

@@ -1,9 +1,11 @@
#!/usr/bin/env python #!/usr/bin/env python
# encoding: utf-8 # encoding: utf-8
# Thomas Nagy, 2005-2010 (ita)
# Thomas Nagy, 2005-2018 (ita)


"Module called for configuring, compiling and installing targets" "Module called for configuring, compiling and installing targets"


from __future__ import with_statement

import os, shlex, shutil, traceback, errno, sys, stat import os, shlex, shutil, traceback, errno, sys, stat
from waflib import Utils, Configure, Logs, Options, ConfigSet, Context, Errors, Build, Node from waflib import Utils, Configure, Logs, Options, ConfigSet, Context, Errors, Build, Node


@@ -24,73 +26,66 @@ def waf_entry_point(current_directory, version, wafdir):
:param wafdir: absolute path representing the directory of the waf library :param wafdir: absolute path representing the directory of the waf library
:type wafdir: string :type wafdir: string
""" """

Logs.init_log() Logs.init_log()


if Context.WAFVERSION != version: if Context.WAFVERSION != version:
Logs.error('Waf script %r and library %r do not match (directory %r)' % (version, Context.WAFVERSION, wafdir))
Logs.error('Waf script %r and library %r do not match (directory %r)', version, Context.WAFVERSION, wafdir)
sys.exit(1) sys.exit(1)


if '--version' in sys.argv:
Context.run_dir = current_directory
ctx = Context.create_context('options')
ctx.curdir = current_directory
ctx.parse_args()
sys.exit(0)
# Store current directory before any chdir
Context.waf_dir = wafdir
Context.run_dir = Context.launch_dir = current_directory
start_dir = current_directory
no_climb = os.environ.get('NOCLIMB')


if len(sys.argv) > 1: if len(sys.argv) > 1:
# os.path.join handles absolute paths in sys.argv[1] accordingly (it discards the previous ones)
# os.path.join handles absolute paths
# if sys.argv[1] is not an absolute path, then it is relative to the current working directory # if sys.argv[1] is not an absolute path, then it is relative to the current working directory
potential_wscript = os.path.join(current_directory, sys.argv[1]) potential_wscript = os.path.join(current_directory, sys.argv[1])
# maybe check if the file is executable
# perhaps extract 'wscript' as a constant
if os.path.basename(potential_wscript) == 'wscript' and os.path.isfile(potential_wscript):
if os.path.basename(potential_wscript) == Context.WSCRIPT_FILE and os.path.isfile(potential_wscript):
# need to explicitly normalize the path, as it may contain extra '/.' # need to explicitly normalize the path, as it may contain extra '/.'
# TODO abspath?
current_directory = os.path.normpath(os.path.dirname(potential_wscript))
path = os.path.normpath(os.path.dirname(potential_wscript))
start_dir = os.path.abspath(path)
no_climb = True
sys.argv.pop(1) sys.argv.pop(1)


Context.waf_dir = wafdir
Context.launch_dir = current_directory
ctx = Context.create_context('options')
(options, commands, env) = ctx.parse_cmd_args(allow_unknown=True)
if options.top:
start_dir = Context.run_dir = Context.top_dir = options.top
no_climb = True
if options.out:
Context.out_dir = options.out


# if 'configure' is in the commands, do not search any further # if 'configure' is in the commands, do not search any further
no_climb = os.environ.get('NOCLIMB', None)
if not no_climb: if not no_climb:
for k in no_climb_commands: for k in no_climb_commands:
for y in sys.argv:
for y in commands:
if y.startswith(k): if y.startswith(k):
no_climb = True no_climb = True
break break


# if --top is provided assume the build started in the top directory
for i, x in enumerate(sys.argv):
# WARNING: this modifies sys.argv
if x.startswith('--top='):
Context.run_dir = Context.top_dir = Utils.sane_path(x[6:])
sys.argv[i] = '--top=' + Context.run_dir
if x.startswith('--out='):
Context.out_dir = Utils.sane_path(x[6:])
sys.argv[i] = '--out=' + Context.out_dir

# try to find a lock file (if the project was configured) # try to find a lock file (if the project was configured)
# at the same time, store the first wscript file seen # at the same time, store the first wscript file seen
cur = current_directory
while cur and not Context.top_dir:
cur = start_dir
while cur:
try: try:
lst = os.listdir(cur) lst = os.listdir(cur)
except OSError: except OSError:
lst = [] lst = []
Logs.error('Directory %r is unreadable!' % cur)
Logs.error('Directory %r is unreadable!', cur)
if Options.lockfile in lst: if Options.lockfile in lst:
env = ConfigSet.ConfigSet() env = ConfigSet.ConfigSet()
try: try:
env.load(os.path.join(cur, Options.lockfile)) env.load(os.path.join(cur, Options.lockfile))
ino = os.stat(cur)[stat.ST_INO] ino = os.stat(cur)[stat.ST_INO]
except Exception:
except EnvironmentError:
pass pass
else: else:
# check if the folder was not moved # check if the folder was not moved
for x in (env.run_dir, env.top_dir, env.out_dir): for x in (env.run_dir, env.top_dir, env.out_dir):
if not x:
continue
if Utils.is_win32: if Utils.is_win32:
if cur == x: if cur == x:
load = True load = True
@@ -106,7 +101,7 @@ def waf_entry_point(current_directory, version, wafdir):
load = True load = True
break break
else: else:
Logs.warn('invalid lock file in %s' % cur)
Logs.warn('invalid lock file in %s', cur)
load = False load = False


if load: if load:
@@ -127,56 +122,62 @@ def waf_entry_point(current_directory, version, wafdir):
if no_climb: if no_climb:
break break


if not Context.run_dir:
if '-h' in sys.argv or '--help' in sys.argv:
Logs.warn('No wscript file found: the help message may be incomplete')
Context.run_dir = current_directory
ctx = Context.create_context('options')
ctx.curdir = current_directory
ctx.parse_args()
wscript = os.path.normpath(os.path.join(Context.run_dir, Context.WSCRIPT_FILE))
if not os.path.exists(wscript):
if options.whelp:
Logs.warn('These are the generic options (no wscript/project found)')
ctx.parser.print_help()
sys.exit(0) sys.exit(0)
Logs.error('Waf: Run from a directory containing a file named %r' % Context.WSCRIPT_FILE)
Logs.error('Waf: Run from a folder containing a %r file (or try -h for the generic options)', Context.WSCRIPT_FILE)
sys.exit(1) sys.exit(1)


try: try:
os.chdir(Context.run_dir) os.chdir(Context.run_dir)
except OSError: except OSError:
Logs.error('Waf: The folder %r is unreadable' % Context.run_dir)
Logs.error('Waf: The folder %r is unreadable', Context.run_dir)
sys.exit(1) sys.exit(1)


try: try:
set_main_module(os.path.normpath(os.path.join(Context.run_dir, Context.WSCRIPT_FILE)))
set_main_module(wscript)
except Errors.WafError as e: except Errors.WafError as e:
Logs.pprint('RED', e.verbose_msg) Logs.pprint('RED', e.verbose_msg)
Logs.error(str(e)) Logs.error(str(e))
sys.exit(1) sys.exit(1)
except Exception as e: except Exception as e:
Logs.error('Waf: The wscript in %r is unreadable' % Context.run_dir, e)
Logs.error('Waf: The wscript in %r is unreadable', Context.run_dir)
traceback.print_exc(file=sys.stdout) traceback.print_exc(file=sys.stdout)
sys.exit(2) sys.exit(2)


"""
import cProfile, pstats
cProfile.runctx("from waflib import Scripting; Scripting.run_commands()", {}, {}, 'profi.txt')
p = pstats.Stats('profi.txt')
p.sort_stats('time').print_stats(75) # or 'cumulative'
"""
try:
run_commands()
except Errors.WafError as e:
if Logs.verbose > 1:
Logs.pprint('RED', e.verbose_msg)
Logs.error(e.msg)
sys.exit(1)
except SystemExit:
raise
except Exception as e:
traceback.print_exc(file=sys.stdout)
sys.exit(2)
except KeyboardInterrupt:
Logs.pprint('RED', 'Interrupted')
sys.exit(68)
#"""
if options.profile:
import cProfile, pstats
cProfile.runctx('from waflib import Scripting; Scripting.run_commands()', {}, {}, 'profi.txt')
p = pstats.Stats('profi.txt')
p.sort_stats('time').print_stats(75) # or 'cumulative'
else:
try:
try:
run_commands()
except:
if options.pdb:
import pdb
type, value, tb = sys.exc_info()
traceback.print_exc()
pdb.post_mortem(tb)
else:
raise
except Errors.WafError as e:
if Logs.verbose > 1:
Logs.pprint('RED', e.verbose_msg)
Logs.error(e.msg)
sys.exit(1)
except SystemExit:
raise
except Exception as e:
traceback.print_exc(file=sys.stdout)
sys.exit(2)
except KeyboardInterrupt:
Logs.pprint('RED', 'Interrupted')
sys.exit(68)


def set_main_module(file_path): def set_main_module(file_path):
""" """
@@ -197,7 +198,7 @@ def set_main_module(file_path):
name = obj.__name__ name = obj.__name__
if not name in Context.g_module.__dict__: if not name in Context.g_module.__dict__:
setattr(Context.g_module, name, obj) setattr(Context.g_module, name, obj)
for k in (update, dist, distclean, distcheck):
for k in (dist, distclean, distcheck):
set_def(k) set_def(k)
# add dummy init and shutdown functions if they're not defined # add dummy init and shutdown functions if they're not defined
if not 'init' in Context.g_module.__dict__: if not 'init' in Context.g_module.__dict__:
@@ -209,36 +210,20 @@ def set_main_module(file_path):


def parse_options(): def parse_options():
""" """
Parse the command-line options and initialize the logging system.
Parses the command-line options and initialize the logging system.
Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization. Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization.
""" """
Context.create_context('options').execute()

for var in Options.envvars:
(name, value) = var.split('=', 1)
os.environ[name.strip()] = value

ctx = Context.create_context('options')
ctx.execute()
if not Options.commands: if not Options.commands:
Options.commands = [default_cmd]
Options.commands = [x for x in Options.commands if x != 'options'] # issue 1076

# process some internal Waf options
Logs.verbose = Options.options.verbose
#Logs.init_log()

if Options.options.zones:
Logs.zones = Options.options.zones.split(',')
if not Logs.verbose:
Logs.verbose = 1
elif Logs.verbose > 0:
Logs.zones = ['runner']

if Logs.verbose > 2:
Logs.zones = ['*']
Options.commands.append(default_cmd)
if Options.options.whelp:
ctx.parser.print_help()
sys.exit(0)


def run_command(cmd_name): def run_command(cmd_name):
""" """
Execute a single command. Called by :py:func:`waflib.Scripting.run_commands`.
Executes a single Waf command. Called by :py:func:`waflib.Scripting.run_commands`.


:param cmd_name: command to execute, like ``build`` :param cmd_name: command to execute, like ``build``
:type cmd_name: string :type cmd_name: string
@@ -256,7 +241,7 @@ def run_command(cmd_name):


def run_commands(): def run_commands():
""" """
Execute the commands that were given on the command-line, and the other options
Execute the Waf commands that were given on the command-line, and the other options
Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization, and executed Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization, and executed
after :py:func:`waflib.Scripting.parse_options`. after :py:func:`waflib.Scripting.parse_options`.
""" """
@@ -265,18 +250,11 @@ def run_commands():
while Options.commands: while Options.commands:
cmd_name = Options.commands.pop(0) cmd_name = Options.commands.pop(0)
ctx = run_command(cmd_name) ctx = run_command(cmd_name)
Logs.info('%r finished successfully (%s)' % (cmd_name, str(ctx.log_timer)))
Logs.info('%r finished successfully (%s)', cmd_name, ctx.log_timer)
run_command('shutdown') run_command('shutdown')


########################################################################################### ###########################################################################################


def _can_distclean(name):
# WARNING: this method may disappear anytime
for k in '.o .moc .exe'.split():
if name.endswith(k):
return True
return False

def distclean_dir(dirname): def distclean_dir(dirname):
""" """
Distclean function called in the particular case when:: Distclean function called in the particular case when::
@@ -288,12 +266,12 @@ def distclean_dir(dirname):
""" """
for (root, dirs, files) in os.walk(dirname): for (root, dirs, files) in os.walk(dirname):
for f in files: for f in files:
if _can_distclean(f):
if f.endswith(('.o', '.moc', '.exe')):
fname = os.path.join(root, f) fname = os.path.join(root, f)
try: try:
os.remove(fname) os.remove(fname)
except OSError: except OSError:
Logs.warn('Could not remove %r' % fname)
Logs.warn('Could not remove %r', fname)


for x in (Context.DBFILE, 'config.log'): for x in (Context.DBFILE, 'config.log'):
try: try:
@@ -307,40 +285,53 @@ def distclean_dir(dirname):
pass pass


def distclean(ctx): def distclean(ctx):
'''removes the build directory'''
lst = os.listdir('.')
for f in lst:
if f == Options.lockfile:
try:
proj = ConfigSet.ConfigSet(f)
except IOError:
Logs.warn('Could not read %r' % f)
continue
'''removes build folders and data'''


if proj['out_dir'] != proj['top_dir']:
try:
shutil.rmtree(proj['out_dir'])
except IOError:
pass
except OSError as e:
if e.errno != errno.ENOENT:
Logs.warn('Could not remove %r' % proj['out_dir'])
else:
distclean_dir(proj['out_dir'])
def remove_and_log(k, fun):
try:
fun(k)
except EnvironmentError as e:
if e.errno != errno.ENOENT:
Logs.warn('Could not remove %r', k)


for k in (proj['out_dir'], proj['top_dir'], proj['run_dir']):
p = os.path.join(k, Options.lockfile)
try:
os.remove(p)
except OSError as e:
if e.errno != errno.ENOENT:
Logs.warn('Could not remove %r' % p)
# remove waf cache folders on the top-level
if not Options.commands:
for k in os.listdir('.'):
for x in '.waf-2 waf-2 .waf3-2 waf3-2'.split():
if k.startswith(x):
remove_and_log(k, shutil.rmtree)

# remove a build folder, if any
cur = '.'
if ctx.options.no_lock_in_top:
cur = ctx.options.out

try:
lst = os.listdir(cur)
except OSError:
Logs.warn('Could not read %r', cur)
return

if Options.lockfile in lst:
f = os.path.join(cur, Options.lockfile)
try:
env = ConfigSet.ConfigSet(f)
except EnvironmentError:
Logs.warn('Could not read %r', f)
return

if not env.out_dir or not env.top_dir:
Logs.warn('Invalid lock file %r', f)
return

if env.out_dir == env.top_dir:
distclean_dir(env.out_dir)
else:
remove_and_log(env.out_dir, shutil.rmtree)


# remove local waf cache folders
if not Options.commands:
for x in '.waf-1. waf-1. .waf3-1. waf3-1.'.split():
if f.startswith(x):
shutil.rmtree(f, ignore_errors=True)
for k in (env.out_dir, env.top_dir, env.run_dir):
p = os.path.join(k, Options.lockfile)
remove_and_log(p, os.remove)


class Dist(Context.Context): class Dist(Context.Context):
'''creates an archive containing the project source code''' '''creates an archive containing the project source code'''
@@ -358,7 +349,7 @@ class Dist(Context.Context):


def archive(self): def archive(self):
""" """
Create the archive.
Creates the source archive.
""" """
import tarfile import tarfile


@@ -378,14 +369,14 @@ class Dist(Context.Context):
files = self.get_files() files = self.get_files()


if self.algo.startswith('tar.'): if self.algo.startswith('tar.'):
tar = tarfile.open(arch_name, 'w:' + self.algo.replace('tar.', ''))
tar = tarfile.open(node.abspath(), 'w:' + self.algo.replace('tar.', ''))


for x in files: for x in files:
self.add_tar_file(x, tar) self.add_tar_file(x, tar)
tar.close() tar.close()
elif self.algo == 'zip': elif self.algo == 'zip':
import zipfile import zipfile
zip = zipfile.ZipFile(arch_name, 'w', compression=zipfile.ZIP_DEFLATED)
zip = zipfile.ZipFile(node.abspath(), 'w', compression=zipfile.ZIP_DEFLATED)


for x in files: for x in files:
archive_name = self.get_base_name() + '/' + x.path_from(self.base_path) archive_name = self.get_base_name() + '/' + x.path_from(self.base_path)
@@ -395,26 +386,30 @@ class Dist(Context.Context):
self.fatal('Valid algo types are tar.bz2, tar.gz, tar.xz or zip') self.fatal('Valid algo types are tar.bz2, tar.gz, tar.xz or zip')


try: try:
from hashlib import sha1 as sha
from hashlib import sha256
except ImportError: except ImportError:
from sha import sha
try:
digest = " (sha=%r)" % sha(node.read()).hexdigest()
except Exception:
digest = '' digest = ''
else:
digest = ' (sha256=%r)' % sha256(node.read(flags='rb')).hexdigest()


Logs.info('New archive created: %s%s' % (self.arch_name, digest))
Logs.info('New archive created: %s%s', self.arch_name, digest)


def get_tar_path(self, node): def get_tar_path(self, node):
""" """
return the path to use for a node in the tar archive, the purpose of this
Return the path to use for a node in the tar archive, the purpose of this
is to let subclases resolve symbolic links or to change file names is to let subclases resolve symbolic links or to change file names

:return: absolute path
:rtype: string
""" """
return node.abspath() return node.abspath()


def add_tar_file(self, x, tar): def add_tar_file(self, x, tar):
""" """
Add a file to the tar archive. Transform symlinks into files if the files lie out of the project tree.
Adds a file to the tar archive. Symlinks are not verified.

:param x: file path
:param tar: tar file object
""" """
p = self.get_tar_path(x) p = self.get_tar_path(x)
tinfo = tar.gettarinfo(name=p, arcname=self.get_tar_prefix() + '/' + x.path_from(self.base_path)) tinfo = tar.gettarinfo(name=p, arcname=self.get_tar_prefix() + '/' + x.path_from(self.base_path))
@@ -423,15 +418,18 @@ class Dist(Context.Context):
tinfo.uname = 'root' tinfo.uname = 'root'
tinfo.gname = 'root' tinfo.gname = 'root'


fu = None
try:
fu = open(p, 'rb')
tar.addfile(tinfo, fileobj=fu)
finally:
if fu:
fu.close()
if os.path.isfile(p):
with open(p, 'rb') as f:
tar.addfile(tinfo, fileobj=f)
else:
tar.addfile(tinfo)


def get_tar_prefix(self): def get_tar_prefix(self):
"""
Returns the base path for files added into the archive tar file

:rtype: string
"""
try: try:
return self.tar_prefix return self.tar_prefix
except AttributeError: except AttributeError:
@@ -439,7 +437,8 @@ class Dist(Context.Context):


def get_arch_name(self): def get_arch_name(self):
""" """
Return the name of the archive to create. Change the default value by setting *arch_name*::
Returns the archive file name.
Set the attribute *arch_name* to change the default value::


def dist(ctx): def dist(ctx):
ctx.arch_name = 'ctx.tar.bz2' ctx.arch_name = 'ctx.tar.bz2'
@@ -454,7 +453,7 @@ class Dist(Context.Context):


def get_base_name(self): def get_base_name(self):
""" """
Return the default name of the main directory in the archive, which is set to *appname-version*.
Returns the default name of the main directory in the archive, which is set to *appname-version*.
Set the attribute *base_name* to change the default value:: Set the attribute *base_name* to change the default value::


def dist(ctx): def dist(ctx):
@@ -472,8 +471,8 @@ class Dist(Context.Context):


def get_excl(self): def get_excl(self):
""" """
Return the patterns to exclude for finding the files in the top-level directory. Set the attribute *excl*
to change the default value::
Returns the patterns to exclude for finding the files in the top-level directory.
Set the attribute *excl* to change the default value::


def dist(ctx): def dist(ctx):
ctx.excl = 'build **/*.o **/*.class' ctx.excl = 'build **/*.o **/*.class'
@@ -483,7 +482,7 @@ class Dist(Context.Context):
try: try:
return self.excl return self.excl
except AttributeError: except AttributeError:
self.excl = Node.exclude_regs + ' **/waf-1.8.* **/.waf-1.8* **/waf3-1.8.* **/.waf3-1.8* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*'
self.excl = Node.exclude_regs + ' **/waf-2.* **/.waf-2.* **/waf3-2.* **/.waf3-2.* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*'
if Context.out_dir: if Context.out_dir:
nd = self.root.find_node(Context.out_dir) nd = self.root.find_node(Context.out_dir)
if nd: if nd:
@@ -492,13 +491,13 @@ class Dist(Context.Context):


def get_files(self): def get_files(self):
""" """
The files to package are searched automatically by :py:func:`waflib.Node.Node.ant_glob`. Set
*files* to prevent this behaviour::
Files to package are searched automatically by :py:func:`waflib.Node.Node.ant_glob`.
Set *files* to prevent this behaviour::


def dist(ctx): def dist(ctx):
ctx.files = ctx.path.find_node('wscript') ctx.files = ctx.path.find_node('wscript')


The files are searched from the directory 'base_path', to change it, set::
Files are also searched from the directory 'base_path', to change it, set::


def dist(ctx): def dist(ctx):
ctx.base_path = path ctx.base_path = path
@@ -511,18 +510,12 @@ class Dist(Context.Context):
files = self.base_path.ant_glob('**/*', excl=self.get_excl()) files = self.base_path.ant_glob('**/*', excl=self.get_excl())
return files return files



def dist(ctx): def dist(ctx):
'''makes a tarball for redistributing the sources''' '''makes a tarball for redistributing the sources'''
pass pass


class DistCheck(Dist): class DistCheck(Dist):
"""
Create an archive of the project, and try to build the project in a temporary directory::

$ waf distcheck
"""

"""creates an archive with dist, then tries to build it"""
fun = 'distcheck' fun = 'distcheck'
cmd = 'distcheck' cmd = 'distcheck'


@@ -534,32 +527,30 @@ class DistCheck(Dist):
self.archive() self.archive()
self.check() self.check()


def make_distcheck_cmd(self, tmpdir):
cfg = []
if Options.options.distcheck_args:
cfg = shlex.split(Options.options.distcheck_args)
else:
cfg = [x for x in sys.argv if x.startswith('-')]
cmd = [sys.executable, sys.argv[0], 'configure', 'build', 'install', 'uninstall', '--destdir=' + tmpdir] + cfg
return cmd

def check(self): def check(self):
""" """
Create the archive, uncompress it and try to build the project
Creates the archive, uncompresses it and tries to build the project
""" """
import tempfile, tarfile import tempfile, tarfile


t = None
try:
t = tarfile.open(self.get_arch_name())
with tarfile.open(self.get_arch_name()) as t:
for x in t: for x in t:
t.extract(x) t.extract(x)
finally:
if t:
t.close()

cfg = []

if Options.options.distcheck_args:
cfg = shlex.split(Options.options.distcheck_args)
else:
cfg = [x for x in sys.argv if x.startswith('-')]


instdir = tempfile.mkdtemp('.inst', self.get_base_name()) instdir = tempfile.mkdtemp('.inst', self.get_base_name())
ret = Utils.subprocess.Popen([sys.executable, sys.argv[0], 'configure', 'install', 'uninstall', '--destdir=' + instdir] + cfg, cwd=self.get_base_name()).wait()
cmd = self.make_distcheck_cmd(instdir)
ret = Utils.subprocess.Popen(cmd, cwd=self.get_base_name()).wait()
if ret: if ret:
raise Errors.WafError('distcheck failed with code %i' % ret)
raise Errors.WafError('distcheck failed with code %r' % ret)


if os.path.exists(instdir): if os.path.exists(instdir):
raise Errors.WafError('distcheck succeeded, but files were left in %s' % instdir) raise Errors.WafError('distcheck succeeded, but files were left in %s' % instdir)
@@ -571,33 +562,14 @@ def distcheck(ctx):
'''checks if the project compiles (tarball from 'dist')''' '''checks if the project compiles (tarball from 'dist')'''
pass pass


def update(ctx):
lst = Options.options.files
if lst:
lst = lst.split(',')
else:
path = os.path.join(Context.waf_dir, 'waflib', 'extras')
lst = [x for x in Utils.listdir(path) if x.endswith('.py')]
for x in lst:
tool = x.replace('.py', '')
if not tool:
continue
try:
dl = Configure.download_tool
except AttributeError:
ctx.fatal('The command "update" is dangerous; include the tool "use_config" in your project!')
try:
dl(tool, force=True, ctx=ctx)
except Errors.WafError:
Logs.error('Could not find the tool %r in the remote repository' % x)
else:
Logs.warn('Updated %r' % tool)

def autoconfigure(execute_method): def autoconfigure(execute_method):
""" """
Decorator used to set the commands that can be configured automatically
Decorator that enables context commands to run *configure* as needed.
""" """
def execute(self): def execute(self):
"""
Wraps :py:func:`waflib.Context.Context.execute` on the context class
"""
if not Configure.autoconfig: if not Configure.autoconfig:
return execute_method(self) return execute_method(self)


@@ -605,7 +577,7 @@ def autoconfigure(execute_method):
do_config = False do_config = False
try: try:
env.load(os.path.join(Context.top_dir, Options.lockfile)) env.load(os.path.join(Context.top_dir, Options.lockfile))
except Exception:
except EnvironmentError:
Logs.warn('Configuring the project') Logs.warn('Configuring the project')
do_config = True do_config = True
else: else:
@@ -613,15 +585,21 @@ def autoconfigure(execute_method):
do_config = True do_config = True
else: else:
h = 0 h = 0
for f in env['files']:
h = Utils.h_list((h, Utils.readf(f, 'rb')))
do_config = h != env.hash
for f in env.files:
try:
h = Utils.h_list((h, Utils.readf(f, 'rb')))
except EnvironmentError:
do_config = True
break
else:
do_config = h != env.hash


if do_config: if do_config:
cmd = env['config_cmd'] or 'configure'
cmd = env.config_cmd or 'configure'
if Configure.autoconfig == 'clobber': if Configure.autoconfig == 'clobber':
tmp = Options.options.__dict__ tmp = Options.options.__dict__
Options.options.__dict__ = env.options
if env.options:
Options.options.__dict__ = env.options
try: try:
run_command(cmd) run_command(cmd)
finally: finally:


+ 653
- 488
waflib/Task.py
File diff suppressed because it is too large
View File


+ 205
- 148
waflib/TaskGen.py View File

@@ -1,18 +1,16 @@
#!/usr/bin/env python #!/usr/bin/env python
# encoding: utf-8 # encoding: utf-8
# Thomas Nagy, 2005-2010 (ita)
# Thomas Nagy, 2005-2018 (ita)


""" """
Task generators Task generators


The class :py:class:`waflib.TaskGen.task_gen` encapsulates the creation of task objects (low-level code) The class :py:class:`waflib.TaskGen.task_gen` encapsulates the creation of task objects (low-level code)
The instances can have various parameters, but the creation of task nodes (Task.py) The instances can have various parameters, but the creation of task nodes (Task.py)
is always postponed. To achieve this, various methods are called from the method "apply"


is deferred. To achieve this, various methods are called from the method "apply"
""" """


import copy, re, os
import copy, re, os, functools
from waflib import Task, Utils, Logs, Errors, ConfigSet, Node from waflib import Task, Utils, Logs, Errors, ConfigSet, Node


feats = Utils.defaultdict(set) feats = Utils.defaultdict(set)
@@ -22,7 +20,7 @@ HEADER_EXTS = ['.h', '.hpp', '.hxx', '.hh']


class task_gen(object): class task_gen(object):
""" """
Instances of this class create :py:class:`waflib.Task.TaskBase` when
Instances of this class create :py:class:`waflib.Task.Task` when
calling the method :py:meth:`waflib.TaskGen.task_gen.post` from the main thread. calling the method :py:meth:`waflib.TaskGen.task_gen.post` from the main thread.
A few notes: A few notes:


@@ -34,42 +32,28 @@ class task_gen(object):
""" """


mappings = Utils.ordered_iter_dict() mappings = Utils.ordered_iter_dict()
"""Mappings are global file extension mappings, they are retrieved in the order of definition"""
"""Mappings are global file extension mappings that are retrieved in the order of definition"""


prec = Utils.defaultdict(list)
"""Dict holding the precedence rules for task generator methods"""
prec = Utils.defaultdict(set)
"""Dict that holds the precedence execution rules for task generator methods"""


def __init__(self, *k, **kw): def __init__(self, *k, **kw):
""" """
The task generator objects predefine various attributes (source, target) for possible
Task generator objects predefine various attributes (source, target) for possible
processing by process_rule (make-like rules) or process_source (extensions, misc methods) processing by process_rule (make-like rules) or process_source (extensions, misc methods)


The tasks are stored on the attribute 'tasks'. They are created by calling methods
listed in self.meths *or* referenced in the attribute features
A topological sort is performed to ease the method re-use.
Tasks are stored on the attribute 'tasks'. They are created by calling methods
listed in ``self.meths`` or referenced in the attribute ``features``
A topological sort is performed to execute the methods in correct order.


The extra key/value elements passed in kw are set as attributes
The extra key/value elements passed in ``kw`` are set as attributes
""" """

# so we will have to play with directed acyclic graphs
# detect cycles, etc
self.source = ''
self.source = []
self.target = '' self.target = ''


self.meths = [] self.meths = []
""" """
List of method names to execute (it is usually a good idea to avoid touching this)
"""

self.prec = Utils.defaultdict(list)
"""
Precedence table for sorting the methods in self.meths
"""

self.mappings = {}
"""
List of mappings {extension -> function} for processing files by extension
This is very rarely used, so we do not use an ordered dict here
List of method names to execute (internal)
""" """


self.features = [] self.features = []
@@ -79,7 +63,7 @@ class task_gen(object):


self.tasks = [] self.tasks = []
""" """
List of tasks created.
Tasks created are added to this list
""" """


if not 'bld' in kw: if not 'bld' in kw:
@@ -92,31 +76,50 @@ class task_gen(object):
self.env = self.bld.env.derive() self.env = self.bld.env.derive()
self.path = self.bld.path # emulate chdir when reading scripts self.path = self.bld.path # emulate chdir when reading scripts


# provide a unique id
# Provide a unique index per folder
# This is part of a measure to prevent output file name collisions
path = self.path.abspath()
try: try:
self.idx = self.bld.idx[id(self.path)] = self.bld.idx.get(id(self.path), 0) + 1
self.idx = self.bld.idx[path] = self.bld.idx.get(path, 0) + 1
except AttributeError: except AttributeError:
self.bld.idx = {} self.bld.idx = {}
self.idx = self.bld.idx[id(self.path)] = 1
self.idx = self.bld.idx[path] = 1

# Record the global task generator count
try:
self.tg_idx_count = self.bld.tg_idx_count = self.bld.tg_idx_count + 1
except AttributeError:
self.tg_idx_count = self.bld.tg_idx_count = 1


for key, val in kw.items(): for key, val in kw.items():
setattr(self, key, val) setattr(self, key, val)


def __str__(self): def __str__(self):
"""for debugging purposes"""
"""Debugging helper"""
return "<task_gen %r declared in %s>" % (self.name, self.path.abspath()) return "<task_gen %r declared in %s>" % (self.name, self.path.abspath())


def __repr__(self): def __repr__(self):
"""for debugging purposes"""
"""Debugging helper"""
lst = [] lst = []
for x in self.__dict__.keys():
for x in self.__dict__:
if x not in ('env', 'bld', 'compiled_tasks', 'tasks'): if x not in ('env', 'bld', 'compiled_tasks', 'tasks'):
lst.append("%s=%s" % (x, repr(getattr(self, x)))) lst.append("%s=%s" % (x, repr(getattr(self, x))))
return "bld(%s) in %s" % (", ".join(lst), self.path.abspath()) return "bld(%s) in %s" % (", ".join(lst), self.path.abspath())


def get_cwd(self):
"""
Current working directory for the task generator, defaults to the build directory.
This is still used in a few places but it should disappear at some point as the classes
define their own working directory.

:rtype: :py:class:`waflib.Node.Node`
"""
return self.bld.bldnode

def get_name(self): def get_name(self):
""" """
If not set, the name is computed from the target name::
If the attribute ``name`` is not set on the instance,
the name is computed from the target name::


def build(bld): def build(bld):
x = bld(name='foo') x = bld(name='foo')
@@ -143,18 +146,20 @@ class task_gen(object):


def to_list(self, val): def to_list(self, val):
""" """
Ensure that a parameter is a list
Ensures that a parameter is a list, see :py:func:`waflib.Utils.to_list`


:type val: string or list of string :type val: string or list of string
:param val: input to return as a list :param val: input to return as a list
:rtype: list :rtype: list
""" """
if isinstance(val, str): return val.split()
else: return val
if isinstance(val, str):
return val.split()
else:
return val


def post(self): def post(self):
""" """
Create task objects. The following operations are performed:
Creates tasks for this task generators. The following operations are performed:


#. The body of this method is called only once and sets the attribute ``posted`` #. The body of this method is called only once and sets the attribute ``posted``
#. The attribute ``features`` is used to add more methods in ``self.meths`` #. The attribute ``features`` is used to add more methods in ``self.meths``
@@ -162,27 +167,25 @@ class task_gen(object):
#. The methods are then executed in order #. The methods are then executed in order
#. The tasks created are added to :py:attr:`waflib.TaskGen.task_gen.tasks` #. The tasks created are added to :py:attr:`waflib.TaskGen.task_gen.tasks`
""" """

# we could add a decorator to let the task run once, but then python 2.3 will be difficult to support
if getattr(self, 'posted', None): if getattr(self, 'posted', None):
#error("OBJECT ALREADY POSTED" + str( self))
return False return False
self.posted = True self.posted = True


keys = set(self.meths) keys = set(self.meths)
keys.update(feats['*'])


# add the methods listed in the features # add the methods listed in the features
self.features = Utils.to_list(self.features) self.features = Utils.to_list(self.features)
for x in self.features + ['*']:
for x in self.features:
st = feats[x] st = feats[x]
if not st:
if not x in Task.classes:
Logs.warn('feature %r does not exist - bind at least one method to it' % x)
keys.update(list(st)) # ironpython 2.7 wants the cast to list
if st:
keys.update(st)
elif not x in Task.classes:
Logs.warn('feature %r does not exist - bind at least one method to it?', x)


# copy the precedence table # copy the precedence table
prec = {} prec = {}
prec_tbl = self.prec or task_gen.prec
prec_tbl = self.prec
for x in prec_tbl: for x in prec_tbl:
if x in keys: if x in keys:
prec[x] = prec_tbl[x] prec[x] = prec_tbl[x]
@@ -191,17 +194,19 @@ class task_gen(object):
tmp = [] tmp = []
for a in keys: for a in keys:
for x in prec.values(): for x in prec.values():
if a in x: break
if a in x:
break
else: else:
tmp.append(a) tmp.append(a)


tmp.sort()
tmp.sort(reverse=True)


# topological sort # topological sort
out = [] out = []
while tmp: while tmp:
e = tmp.pop() e = tmp.pop()
if e in keys: out.append(e)
if e in keys:
out.append(e)
try: try:
nlst = prec[e] nlst = prec[e]
except KeyError: except KeyError:
@@ -214,47 +219,52 @@ class task_gen(object):
break break
else: else:
tmp.append(x) tmp.append(x)
tmp.sort(reverse=True)


if prec: if prec:
txt = '\n'.join(['- %s after %s' % (k, repr(v)) for k, v in prec.items()])
raise Errors.WafError('Cycle detected in the method execution\n%s' % txt)
out.reverse()
buf = ['Cycle detected in the method execution:']
for k, v in prec.items():
buf.append('- %s after %s' % (k, [x for x in v if x in prec]))
raise Errors.WafError('\n'.join(buf))
self.meths = out self.meths = out


# then we run the methods in order # then we run the methods in order
Logs.debug('task_gen: posting %s %d' % (self, id(self)))
Logs.debug('task_gen: posting %s %d', self, id(self))
for x in out: for x in out:
try: try:
v = getattr(self, x) v = getattr(self, x)
except AttributeError: except AttributeError:
raise Errors.WafError('%r is not a valid task generator method' % x) raise Errors.WafError('%r is not a valid task generator method' % x)
Logs.debug('task_gen: -> %s (%d)' % (x, id(self)))
Logs.debug('task_gen: -> %s (%d)', x, id(self))
v() v()


Logs.debug('task_gen: posted %s' % self.name)
Logs.debug('task_gen: posted %s', self.name)
return True return True


def get_hook(self, node): def get_hook(self, node):
""" """
Returns the ``@extension`` method to call for a Node of a particular extension.

:param node: Input file to process :param node: Input file to process
:type node: :py:class:`waflib.Tools.Node.Node` :type node: :py:class:`waflib.Tools.Node.Node`
:return: A method able to process the input node by looking at the extension :return: A method able to process the input node by looking at the extension
:rtype: function :rtype: function
""" """
name = node.name name = node.name
if self.mappings:
for k in self.mappings:
for k in self.mappings:
try:
if name.endswith(k): if name.endswith(k):
return self.mappings[k] return self.mappings[k]
for k in task_gen.mappings:
if name.endswith(k):
return task_gen.mappings[k]
raise Errors.WafError("File %r has no mapping in %r (have you forgotten to load a waf tool?)" % (node, task_gen.mappings.keys()))
except TypeError:
# regexps objects
if k.match(name):
return self.mappings[k]
keys = list(self.mappings.keys())
raise Errors.WafError("File %r has no mapping in %r (load a waf tool?)" % (node, keys))


def create_task(self, name, src=None, tgt=None, **kw): def create_task(self, name, src=None, tgt=None, **kw):
""" """
Wrapper for creating task instances. The classes are retrieved from the
context class if possible, then from the global dict Task.classes.
Creates task instances.


:param name: task class name :param name: task class name
:type name: string :type name: string
@@ -263,7 +273,7 @@ class task_gen(object):
:param tgt: output nodes :param tgt: output nodes
:type tgt: list of :py:class:`waflib.Tools.Node.Node` :type tgt: list of :py:class:`waflib.Tools.Node.Node`
:return: A task object :return: A task object
:rtype: :py:class:`waflib.Task.TaskBase`
:rtype: :py:class:`waflib.Task.Task`
""" """
task = Task.classes[name](env=self.env.derive(), generator=self) task = Task.classes[name](env=self.env.derive(), generator=self)
if src: if src:
@@ -276,7 +286,7 @@ class task_gen(object):


def clone(self, env): def clone(self, env):
""" """
Make a copy of a task generator. Once the copy is made, it is necessary to ensure that the
Makes a copy of a task generator. Once the copy is made, it is necessary to ensure that the
it does not create the same output files as the original, or the same files may it does not create the same output files as the original, or the same files may
be compiled several times. be compiled several times.


@@ -305,7 +315,7 @@ class task_gen(object):
def declare_chain(name='', rule=None, reentrant=None, color='BLUE', def declare_chain(name='', rule=None, reentrant=None, color='BLUE',
ext_in=[], ext_out=[], before=[], after=[], decider=None, scan=None, install_path=None, shell=False): ext_in=[], ext_out=[], before=[], after=[], decider=None, scan=None, install_path=None, shell=False):
""" """
Create a new mapping and a task class for processing files by extension.
Creates a new mapping and a task class for processing files by extension.
See Tools/flex.py for an example. See Tools/flex.py for an example.


:param name: name for the task class :param name: name for the task class
@@ -324,7 +334,7 @@ def declare_chain(name='', rule=None, reentrant=None, color='BLUE',
:type before: list of string :type before: list of string
:param after: execute instances of this task after classes of the given names :param after: execute instances of this task after classes of the given names
:type after: list of string :type after: list of string
:param decider: if present, use it to create the output nodes for the task
:param decider: if present, function that returns a list of output file extensions (overrides ext_out for output files, but not for the build order)
:type decider: function :type decider: function
:param scan: scanner function for the task :param scan: scanner function for the task
:type scan: function :type scan: function
@@ -338,14 +348,13 @@ def declare_chain(name='', rule=None, reentrant=None, color='BLUE',
cls = Task.task_factory(name, rule, color=color, ext_in=ext_in, ext_out=ext_out, before=before, after=after, scan=scan, shell=shell) cls = Task.task_factory(name, rule, color=color, ext_in=ext_in, ext_out=ext_out, before=before, after=after, scan=scan, shell=shell)


def x_file(self, node): def x_file(self, node):
ext = decider and decider(self, node) or cls.ext_out
if ext_in: if ext_in:
_ext_in = ext_in[0] _ext_in = ext_in[0]


tsk = self.create_task(name, node) tsk = self.create_task(name, node)
cnt = 0 cnt = 0


keys = set(self.mappings.keys()) | set(self.__class__.mappings.keys())
ext = decider(self, node) if decider else cls.ext_out
for x in ext: for x in ext:
k = node.change_ext(x, ext_in=_ext_in) k = node.change_ext(x, ext_in=_ext_in)
tsk.outputs.append(k) tsk.outputs.append(k)
@@ -355,14 +364,14 @@ def declare_chain(name='', rule=None, reentrant=None, color='BLUE',
self.source.append(k) self.source.append(k)
else: else:
# reinject downstream files into the build # reinject downstream files into the build
for y in keys: # ~ nfile * nextensions :-/
for y in self.mappings: # ~ nfile * nextensions :-/
if k.name.endswith(y): if k.name.endswith(y):
self.source.append(k) self.source.append(k)
break break
cnt += 1 cnt += 1


if install_path: if install_path:
self.bld.install_files(install_path, tsk.outputs)
self.install_task = self.add_install_files(install_to=install_path, install_from=tsk.outputs)
return tsk return tsk


for x in cls.ext_in: for x in cls.ext_in:
@@ -371,7 +380,7 @@ def declare_chain(name='', rule=None, reentrant=None, color='BLUE',


def taskgen_method(func): def taskgen_method(func):
""" """
Decorator: register a method as a task generator method.
Decorator that registers method as a task generator method.
The function must accept a task generator as first parameter:: The function must accept a task generator as first parameter::


from waflib.TaskGen import taskgen_method from waflib.TaskGen import taskgen_method
@@ -388,8 +397,8 @@ def taskgen_method(func):


def feature(*k): def feature(*k):
""" """
Decorator: register a task generator method that will be executed when the
object attribute 'feature' contains the corresponding key(s)::
Decorator that registers a task generator method that will be executed when the
object attribute ``feature`` contains the corresponding key(s)::


from waflib.Task import feature from waflib.Task import feature
@feature('myfeature') @feature('myfeature')
@@ -410,7 +419,7 @@ def feature(*k):


def before_method(*k): def before_method(*k):
""" """
Decorator: register a task generator method which will be executed
Decorator that registera task generator method which will be executed
before the functions of given name(s):: before the functions of given name(s)::


from waflib.TaskGen import feature, before from waflib.TaskGen import feature, before
@@ -430,16 +439,14 @@ def before_method(*k):
def deco(func): def deco(func):
setattr(task_gen, func.__name__, func) setattr(task_gen, func.__name__, func)
for fun_name in k: for fun_name in k:
if not func.__name__ in task_gen.prec[fun_name]:
task_gen.prec[fun_name].append(func.__name__)
#task_gen.prec[fun_name].sort()
task_gen.prec[func.__name__].add(fun_name)
return func return func
return deco return deco
before = before_method before = before_method


def after_method(*k): def after_method(*k):
""" """
Decorator: register a task generator method which will be executed
Decorator that registers a task generator method which will be executed
after the functions of given name(s):: after the functions of given name(s)::


from waflib.TaskGen import feature, after from waflib.TaskGen import feature, after
@@ -459,16 +466,14 @@ def after_method(*k):
def deco(func): def deco(func):
setattr(task_gen, func.__name__, func) setattr(task_gen, func.__name__, func)
for fun_name in k: for fun_name in k:
if not fun_name in task_gen.prec[func.__name__]:
task_gen.prec[func.__name__].append(fun_name)
#task_gen.prec[func.__name__].sort()
task_gen.prec[fun_name].add(func.__name__)
return func return func
return deco return deco
after = after_method after = after_method


def extension(*k): def extension(*k):
""" """
Decorator: register a task generator method which will be invoked during
Decorator that registers a task generator method which will be invoked during
the processing of source files for the extension given:: the processing of source files for the extension given::


from waflib import Task from waflib import Task
@@ -487,14 +492,11 @@ def extension(*k):
return func return func
return deco return deco


# ---------------------------------------------------------------
# The following methods are task generator methods commonly used
# they are almost examples, the rest of waf core does not depend on them

@taskgen_method @taskgen_method
def to_nodes(self, lst, path=None): def to_nodes(self, lst, path=None):
""" """
Convert the input list into a list of nodes.
Flatten the input list of string/nodes/lists into a list of nodes.

It is used by :py:func:`waflib.TaskGen.process_source` and :py:func:`waflib.TaskGen.process_rule`. It is used by :py:func:`waflib.TaskGen.process_source` and :py:func:`waflib.TaskGen.process_rule`.
It is designed for source files, for folders, see :py:func:`waflib.Tools.ccroot.to_incnodes`: It is designed for source files, for folders, see :py:func:`waflib.Tools.ccroot.to_incnodes`:


@@ -511,21 +513,23 @@ def to_nodes(self, lst, path=None):
if isinstance(lst, Node.Node): if isinstance(lst, Node.Node):
lst = [lst] lst = [lst]


# either a list or a string, convert to a list of nodes
for x in Utils.to_list(lst): for x in Utils.to_list(lst):
if isinstance(x, str): if isinstance(x, str):
node = find(x) node = find(x)
else:
elif hasattr(x, 'name'):
node = x node = x
else:
tmp.extend(self.to_nodes(x))
continue
if not node: if not node:
raise Errors.WafError("source not found: %r in %r" % (x, self))
raise Errors.WafError('source not found: %r in %r' % (x, self))
tmp.append(node) tmp.append(node)
return tmp return tmp


@feature('*') @feature('*')
def process_source(self): def process_source(self):
""" """
Process each element in the attribute ``source`` by extension.
Processes each element in the attribute ``source`` by extension.


#. The *source* list is converted through :py:meth:`waflib.TaskGen.to_nodes` to a list of :py:class:`waflib.Node.Node` first. #. The *source* list is converted through :py:meth:`waflib.TaskGen.to_nodes` to a list of :py:class:`waflib.Node.Node` first.
#. File extensions are mapped to methods having the signature: ``def meth(self, node)`` by :py:meth:`waflib.TaskGen.extension` #. File extensions are mapped to methods having the signature: ``def meth(self, node)`` by :py:meth:`waflib.TaskGen.extension`
@@ -541,10 +545,29 @@ def process_source(self):
@before_method('process_source') @before_method('process_source')
def process_rule(self): def process_rule(self):
""" """
Process the attribute ``rule``. When present, :py:meth:`waflib.TaskGen.process_source` is disabled::
Processes the attribute ``rule``. When present, :py:meth:`waflib.TaskGen.process_source` is disabled::


def build(bld): def build(bld):
bld(rule='cp ${SRC} ${TGT}', source='wscript', target='bar.txt') bld(rule='cp ${SRC} ${TGT}', source='wscript', target='bar.txt')

Main attributes processed:

* rule: command to execute, it can be a tuple of strings for multiple commands
* chmod: permissions for the resulting files (integer value such as Utils.O755)
* shell: set to False to execute the command directly (default is True to use a shell)
* scan: scanner function
* vars: list of variables to trigger rebuilds, such as CFLAGS
* cls_str: string to display when executing the task
* cls_keyword: label to display when executing the task
* cache_rule: by default, try to re-use similar classes, set to False to disable
* source: list of Node or string objects representing the source files required by this task
* target: list of Node or string objects representing the files that this task creates
* cwd: current working directory (Node or string)
* stdout: standard output, set to None to prevent waf from capturing the text
* stderr: standard error, set to None to prevent waf from capturing the text
* timeout: timeout for command execution (Python 3)
* always: whether to always run the command (False by default)
* deep_inputs: whether the task must depend on the input file tasks too (False by default)
""" """
if not getattr(self, 'rule', None): if not getattr(self, 'rule', None):
return return
@@ -558,28 +581,55 @@ def process_rule(self):
except AttributeError: except AttributeError:
cache = self.bld.cache_rule_attr = {} cache = self.bld.cache_rule_attr = {}


chmod = getattr(self, 'chmod', None)
shell = getattr(self, 'shell', True)
color = getattr(self, 'color', 'BLUE')
scan = getattr(self, 'scan', None)
_vars = getattr(self, 'vars', [])
cls_str = getattr(self, 'cls_str', None)
cls_keyword = getattr(self, 'cls_keyword', None)
use_cache = getattr(self, 'cache_rule', 'True')
deep_inputs = getattr(self, 'deep_inputs', False)

scan_val = has_deps = hasattr(self, 'deps')
if scan:
scan_val = id(scan)

key = Utils.h_list((name, self.rule, chmod, shell, color, cls_str, cls_keyword, scan_val, _vars, deep_inputs))

cls = None cls = None
if getattr(self, 'cache_rule', 'True'):
if use_cache:
try: try:
cls = cache[(name, self.rule)]
cls = cache[key]
except KeyError: except KeyError:
pass pass
if not cls: if not cls:

rule = self.rule rule = self.rule
if hasattr(self, 'chmod'):
if chmod is not None:
def chmod_fun(tsk): def chmod_fun(tsk):
for x in tsk.outputs: for x in tsk.outputs:
os.chmod(x.abspath(), self.chmod)
rule = (self.rule, chmod_fun)

cls = Task.task_factory(name, rule,
getattr(self, 'vars', []),
shell=getattr(self, 'shell', True), color=getattr(self, 'color', 'BLUE'),
scan = getattr(self, 'scan', None))
if getattr(self, 'scan', None):
os.chmod(x.abspath(), tsk.generator.chmod)
if isinstance(rule, tuple):
rule = list(rule)
rule.append(chmod_fun)
rule = tuple(rule)
else:
rule = (rule, chmod_fun)

cls = Task.task_factory(name, rule, _vars, shell=shell, color=color)

if cls_str:
setattr(cls, '__str__', self.cls_str)

if cls_keyword:
setattr(cls, 'keyword', self.cls_keyword)

if deep_inputs:
Task.deep_inputs(cls)

if scan:
cls.scan = self.scan cls.scan = self.scan
elif getattr(self, 'deps', None):
elif has_deps:
def scan(self): def scan(self):
nodes = [] nodes = []
for x in self.generator.to_list(getattr(self.generator, 'deps', None)): for x in self.generator.to_list(getattr(self.generator, 'deps', None)):
@@ -590,26 +640,26 @@ def process_rule(self):
return [nodes, []] return [nodes, []]
cls.scan = scan cls.scan = scan


if getattr(self, 'update_outputs', None):
Task.update_outputs(cls)
if use_cache:
cache[key] = cls


if getattr(self, 'always', None):
Task.always_run(cls)
# now create one instance
tsk = self.create_task(name)


for x in ('after', 'before', 'ext_in', 'ext_out'):
setattr(cls, x, getattr(self, x, []))
for x in ('after', 'before', 'ext_in', 'ext_out'):
setattr(tsk, x, getattr(self, x, []))


if getattr(self, 'cache_rule', 'True'):
cache[(name, self.rule)] = cls
if hasattr(self, 'stdout'):
tsk.stdout = self.stdout


if getattr(self, 'cls_str', None):
setattr(cls, '__str__', self.cls_str)
if hasattr(self, 'stderr'):
tsk.stderr = self.stderr


if getattr(self, 'cls_keyword', None):
setattr(cls, 'keyword', self.cls_keyword)
if getattr(self, 'timeout', None):
tsk.timeout = self.timeout


# now create one instance
tsk = self.create_task(name)
if getattr(self, 'always', None):
tsk.always_run = True


if getattr(self, 'target', None): if getattr(self, 'target', None):
if isinstance(self.target, str): if isinstance(self.target, str):
@@ -623,7 +673,8 @@ def process_rule(self):
x.parent.mkdir() # if a node was given, create the required folders x.parent.mkdir() # if a node was given, create the required folders
tsk.outputs.append(x) tsk.outputs.append(x)
if getattr(self, 'install_path', None): if getattr(self, 'install_path', None):
self.bld.install_files(self.install_path, tsk.outputs, chmod=getattr(self, 'chmod', Utils.O644))
self.install_task = self.add_install_files(install_to=self.install_path,
install_from=tsk.outputs, chmod=getattr(self, 'chmod', Utils.O644))


if getattr(self, 'source', None): if getattr(self, 'source', None):
tsk.inputs = self.to_nodes(self.source) tsk.inputs = self.to_nodes(self.source)
@@ -633,10 +684,16 @@ def process_rule(self):
if getattr(self, 'cwd', None): if getattr(self, 'cwd', None):
tsk.cwd = self.cwd tsk.cwd = self.cwd


if isinstance(tsk.run, functools.partial):
# Python documentation says: "partial objects defined in classes
# behave like static methods and do not transform into bound
# methods during instance attribute look-up."
tsk.run = functools.partial(tsk.run, tsk)

@feature('seq') @feature('seq')
def sequence_order(self): def sequence_order(self):
""" """
Add a strict sequential constraint between the tasks generated by task generators.
Adds a strict sequential constraint between the tasks generated by task generators.
It works because task generators are posted in order. It works because task generators are posted in order.
It will not post objects which belong to other folders. It will not post objects which belong to other folders.


@@ -674,7 +731,7 @@ re_m4 = re.compile('@(\w+)@', re.M)


class subst_pc(Task.Task): class subst_pc(Task.Task):
""" """
Create *.pc* files from *.pc.in*. The task is executed whenever an input variable used
Creates *.pc* files from *.pc.in*. The task is executed whenever an input variable used
in the substitution changes. in the substitution changes.
""" """


@@ -690,6 +747,8 @@ class subst_pc(Task.Task):
if getattr(self.generator, 'is_copy', None): if getattr(self.generator, 'is_copy', None):
for i, x in enumerate(self.outputs): for i, x in enumerate(self.outputs):
x.write(self.inputs[i].read('rb'), 'wb') x.write(self.inputs[i].read('rb'), 'wb')
stat = os.stat(self.inputs[i].abspath()) # Preserve mtime of the copy
os.utime(self.outputs[i].abspath(), (stat.st_atime, stat.st_mtime))
self.force_permissions() self.force_permissions()
return None return None


@@ -699,11 +758,11 @@ class subst_pc(Task.Task):
self.force_permissions() self.force_permissions()
return ret return ret


code = self.inputs[0].read(encoding=getattr(self.generator, 'encoding', 'ISO8859-1'))
code = self.inputs[0].read(encoding=getattr(self.generator, 'encoding', 'latin-1'))
if getattr(self.generator, 'subst_fun', None): if getattr(self.generator, 'subst_fun', None):
code = self.generator.subst_fun(self, code) code = self.generator.subst_fun(self, code)
if code is not None: if code is not None:
self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'ISO8859-1'))
self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'latin-1'))
self.force_permissions() self.force_permissions()
return None return None


@@ -718,7 +777,6 @@ class subst_pc(Task.Task):
lst.append(g(1)) lst.append(g(1))
return "%%(%s)s" % g(1) return "%%(%s)s" % g(1)
return '' return ''
global re_m4
code = getattr(self.generator, 're_m4', re_m4).sub(repl, code) code = getattr(self.generator, 're_m4', re_m4).sub(repl, code)


try: try:
@@ -734,12 +792,14 @@ class subst_pc(Task.Task):
d[x] = tmp d[x] = tmp


code = code % d code = code % d
self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'ISO8859-1'))
self.generator.bld.raw_deps[self.uid()] = self.dep_vars = lst
self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'latin-1'))
self.generator.bld.raw_deps[self.uid()] = lst


# make sure the signature is updated # make sure the signature is updated
try: delattr(self, 'cache_sig')
except AttributeError: pass
try:
delattr(self, 'cache_sig')
except AttributeError:
pass


self.force_permissions() self.force_permissions()


@@ -771,13 +831,14 @@ class subst_pc(Task.Task):
@extension('.pc.in') @extension('.pc.in')
def add_pcfile(self, node): def add_pcfile(self, node):
""" """
Process *.pc.in* files to *.pc*. Install the results to ``${PREFIX}/lib/pkgconfig/``
Processes *.pc.in* files to *.pc*. Installs the results to ``${PREFIX}/lib/pkgconfig/`` by default


def build(bld): def build(bld):
bld(source='foo.pc.in', install_path='${LIBDIR}/pkgconfig/') bld(source='foo.pc.in', install_path='${LIBDIR}/pkgconfig/')
""" """
tsk = self.create_task('subst_pc', node, node.change_ext('.pc', '.pc.in')) tsk = self.create_task('subst_pc', node, node.change_ext('.pc', '.pc.in'))
self.bld.install_files(getattr(self, 'install_path', '${LIBDIR}/pkgconfig/'), tsk.outputs)
self.install_task = self.add_install_files(
install_to=getattr(self, 'install_path', '${LIBDIR}/pkgconfig/'), install_from=tsk.outputs)


class subst(subst_pc): class subst(subst_pc):
pass pass
@@ -786,7 +847,7 @@ class subst(subst_pc):
@before_method('process_source', 'process_rule') @before_method('process_source', 'process_rule')
def process_subst(self): def process_subst(self):
""" """
Define a transformation that substitutes the contents of *source* files to *target* files::
Defines a transformation that substitutes the contents of *source* files to *target* files::


def build(bld): def build(bld):
bld( bld(
@@ -821,7 +882,6 @@ def process_subst(self):
a = self.path.find_node(x) a = self.path.find_node(x)
b = self.path.get_bld().make_node(y) b = self.path.get_bld().make_node(y)
if not os.path.isfile(b.abspath()): if not os.path.isfile(b.abspath()):
b.sig = None
b.parent.mkdir() b.parent.mkdir()
else: else:
if isinstance(x, str): if isinstance(x, str):
@@ -836,25 +896,22 @@ def process_subst(self):
if not a: if not a:
raise Errors.WafError('could not find %r for %r' % (x, self)) raise Errors.WafError('could not find %r for %r' % (x, self))


has_constraints = False
tsk = self.create_task('subst', a, b) tsk = self.create_task('subst', a, b)
for k in ('after', 'before', 'ext_in', 'ext_out'): for k in ('after', 'before', 'ext_in', 'ext_out'):
val = getattr(self, k, None) val = getattr(self, k, None)
if val: if val:
has_constraints = True
setattr(tsk, k, val) setattr(tsk, k, val)


# paranoid safety measure for the general case foo.in->foo.h with ambiguous dependencies # paranoid safety measure for the general case foo.in->foo.h with ambiguous dependencies
if not has_constraints:
global HEADER_EXTS
for xt in HEADER_EXTS:
if b.name.endswith(xt):
tsk.before = [k for k in ('c', 'cxx') if k in Task.classes]
break
for xt in HEADER_EXTS:
if b.name.endswith(xt):
tsk.ext_in = tsk.ext_in + ['.h']
break


inst_to = getattr(self, 'install_path', None) inst_to = getattr(self, 'install_path', None)
if inst_to: if inst_to:
self.bld.install_files(inst_to, b, chmod=getattr(self, 'chmod', Utils.O644))
self.install_task = self.add_install_files(install_to=inst_to,
install_from=b, chmod=getattr(self, 'chmod', Utils.O644))


self.source = [] self.source = []



+ 1
- 1
waflib/Tools/__init__.py View File

@@ -1,3 +1,3 @@
#!/usr/bin/env python #!/usr/bin/env python
# encoding: utf-8 # encoding: utf-8
# Thomas Nagy, 2005-2010 (ita)
# Thomas Nagy, 2005-2018 (ita)

+ 2
- 2
waflib/Tools/ar.py View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
# encoding: utf-8 # encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)
# Thomas Nagy, 2006-2018 (ita)
# Ralf Habacker, 2006 (rh) # Ralf Habacker, 2006 (rh)


""" """
@@ -16,7 +16,7 @@ def find_ar(conf):
conf.load('ar') conf.load('ar')


def configure(conf): def configure(conf):
"""Find the ar program and set the default flags in ``conf.env.ARFLAGS``"""
"""Finds the ar program and sets the default flags in ``conf.env.ARFLAGS``"""
conf.find_program('ar', var='AR') conf.find_program('ar', var='AR')
conf.add_os_flags('ARFLAGS') conf.add_os_flags('ARFLAGS')
if not conf.env.ARFLAGS: if not conf.env.ARFLAGS:


+ 7
- 7
waflib/Tools/c.py View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
# encoding: utf-8 # encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)
# Thomas Nagy, 2006-2018 (ita)


"Base for c programs/libraries" "Base for c programs/libraries"


@@ -10,30 +10,30 @@ from waflib.Tools.ccroot import link_task, stlink_task


@TaskGen.extension('.c') @TaskGen.extension('.c')
def c_hook(self, node): def c_hook(self, node):
"Bind the c file extension to the creation of a :py:class:`waflib.Tools.c.c` instance"
"Binds the c file extensions create :py:class:`waflib.Tools.c.c` instances"
if not self.env.CC and self.env.CXX: if not self.env.CC and self.env.CXX:
return self.create_compiled_task('cxx', node) return self.create_compiled_task('cxx', node)
return self.create_compiled_task('c', node) return self.create_compiled_task('c', node)


class c(Task.Task): class c(Task.Task):
"Compile C files into object files"
run_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT[0].abspath()}'
"Compiles C files into object files"
run_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}'
vars = ['CCDEPS'] # unused variable to depend on, just in case vars = ['CCDEPS'] # unused variable to depend on, just in case
ext_in = ['.h'] # set the build order easily by using ext_out=['.h'] ext_in = ['.h'] # set the build order easily by using ext_out=['.h']
scan = c_preproc.scan scan = c_preproc.scan


class cprogram(link_task): class cprogram(link_task):
"Link object files into a c program"
"Links object files into c programs"
run_str = '${LINK_CC} ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}' run_str = '${LINK_CC} ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}'
ext_out = ['.bin'] ext_out = ['.bin']
vars = ['LINKDEPS'] vars = ['LINKDEPS']
inst_to = '${BINDIR}' inst_to = '${BINDIR}'


class cshlib(cprogram): class cshlib(cprogram):
"Link object files into a c shared library"
"Links object files into c shared libraries"
inst_to = '${LIBDIR}' inst_to = '${LIBDIR}'


class cstlib(stlink_task): class cstlib(stlink_task):
"Link object files into a c static library"
"Links object files into a c static libraries"
pass # do not remove pass # do not remove



+ 21
- 12
waflib/Tools/c_aliases.py View File

@@ -9,6 +9,8 @@ from waflib.Configure import conf


def get_extensions(lst): def get_extensions(lst):
""" """
Returns the file extensions for the list of files given as input

:param lst: files to process :param lst: files to process
:list lst: list of string or :py:class:`waflib.Node.Node` :list lst: list of string or :py:class:`waflib.Node.Node`
:return: list of file extensions :return: list of file extensions
@@ -16,17 +18,15 @@ def get_extensions(lst):
""" """
ret = [] ret = []
for x in Utils.to_list(lst): for x in Utils.to_list(lst):
try:
if not isinstance(x, str):
x = x.name
ret.append(x[x.rfind('.') + 1:])
except Exception:
pass
if not isinstance(x, str):
x = x.name
ret.append(x[x.rfind('.') + 1:])
return ret return ret


def sniff_features(**kw): def sniff_features(**kw):
""" """
Look at the source files and return the features for a task generator (mainly cc and cxx)::
Computes and returns the features required for a task generator by
looking at the file extensions. This aimed for C/C++ mainly::


snif_features(source=['foo.c', 'foo.cxx'], type='shlib') snif_features(source=['foo.c', 'foo.cxx'], type='shlib')
# returns ['cxx', 'c', 'cxxshlib', 'cshlib'] # returns ['cxx', 'c', 'cxxshlib', 'cshlib']
@@ -39,7 +39,7 @@ def sniff_features(**kw):
:rtype: list of string :rtype: list of string
""" """
exts = get_extensions(kw['source']) exts = get_extensions(kw['source'])
type = kw['_type']
typ = kw['typ']
feats = [] feats = []


# watch the order, cxx will have the precedence # watch the order, cxx will have the precedence
@@ -63,18 +63,27 @@ def sniff_features(**kw):
feats.append('java') feats.append('java')
return 'java' return 'java'


if type in ('program', 'shlib', 'stlib'):
if typ in ('program', 'shlib', 'stlib'):
will_link = False will_link = False
for x in feats: for x in feats:
if x in ('cxx', 'd', 'fc', 'c'): if x in ('cxx', 'd', 'fc', 'c'):
feats.append(x + type)
feats.append(x + typ)
will_link = True will_link = True
if not will_link and not kw.get('features', []): if not will_link and not kw.get('features', []):
raise Errors.WafError('Cannot link from %r, try passing eg: features="c cprogram"?' % kw) raise Errors.WafError('Cannot link from %r, try passing eg: features="c cprogram"?' % kw)
return feats return feats


def set_features(kw, _type):
kw['_type'] = _type
def set_features(kw, typ):
"""
Inserts data in the input dict *kw* based on existing data and on the type of target
required (typ).

:param kw: task generator parameters
:type kw: dict
:param typ: type of target
:type typ: string
"""
kw['typ'] = typ
kw['features'] = Utils.to_list(kw.get('features', [])) + Utils.to_list(sniff_features(**kw)) kw['features'] = Utils.to_list(kw.get('features', [])) + Utils.to_list(sniff_features(**kw))


@conf @conf


+ 366
- 281
waflib/Tools/c_config.py
File diff suppressed because it is too large
View File


+ 18
- 36
waflib/Tools/c_osx.py View File

@@ -1,13 +1,13 @@
#!/usr/bin/env python #!/usr/bin/env python
# encoding: utf-8 # encoding: utf-8
# Thomas Nagy 2008-2010
# Thomas Nagy 2008-2018 (ita)


""" """
MacOSX related tools MacOSX related tools
""" """


import os, shutil, platform import os, shutil, platform
from waflib import Task, Utils, Errors
from waflib import Task, Utils
from waflib.TaskGen import taskgen_method, feature, after_method, before_method from waflib.TaskGen import taskgen_method, feature, after_method, before_method


app_info = ''' app_info = '''
@@ -37,8 +37,8 @@ def set_macosx_deployment_target(self):
""" """
see WAF issue 285 and also and also http://trac.macports.org/ticket/17059 see WAF issue 285 and also and also http://trac.macports.org/ticket/17059
""" """
if self.env['MACOSX_DEPLOYMENT_TARGET']:
os.environ['MACOSX_DEPLOYMENT_TARGET'] = self.env['MACOSX_DEPLOYMENT_TARGET']
if self.env.MACOSX_DEPLOYMENT_TARGET:
os.environ['MACOSX_DEPLOYMENT_TARGET'] = self.env.MACOSX_DEPLOYMENT_TARGET
elif 'MACOSX_DEPLOYMENT_TARGET' not in os.environ: elif 'MACOSX_DEPLOYMENT_TARGET' not in os.environ:
if Utils.unversioned_sys_platform() == 'darwin': if Utils.unversioned_sys_platform() == 'darwin':
os.environ['MACOSX_DEPLOYMENT_TARGET'] = '.'.join(platform.mac_ver()[0].split('.')[:2]) os.environ['MACOSX_DEPLOYMENT_TARGET'] = '.'.join(platform.mac_ver()[0].split('.')[:2])
@@ -46,7 +46,7 @@ def set_macosx_deployment_target(self):
@taskgen_method @taskgen_method
def create_bundle_dirs(self, name, out): def create_bundle_dirs(self, name, out):
""" """
Create bundle folders, used by :py:func:`create_task_macplist` and :py:func:`create_task_macapp`
Creates bundle folders, used by :py:func:`create_task_macplist` and :py:func:`create_task_macapp`
""" """
dir = out.parent.find_or_declare(name) dir = out.parent.find_or_declare(name)
dir.mkdir() dir.mkdir()
@@ -78,7 +78,7 @@ def create_task_macapp(self):
bld.env.MACAPP = True bld.env.MACAPP = True
bld.shlib(source='a.c', target='foo') bld.shlib(source='a.c', target='foo')
""" """
if self.env['MACAPP'] or getattr(self, 'mac_app', False):
if self.env.MACAPP or getattr(self, 'mac_app', False):
out = self.link_task.outputs[0] out = self.link_task.outputs[0]


name = bundle_name_for_output(out) name = bundle_name_for_output(out)
@@ -88,7 +88,7 @@ def create_task_macapp(self):


self.apptask = self.create_task('macapp', self.link_task.outputs, n1) self.apptask = self.create_task('macapp', self.link_task.outputs, n1)
inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/MacOS/' % name inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/MacOS/' % name
self.bld.install_files(inst_to, n1, chmod=Utils.O755)
self.add_install_files(install_to=inst_to, install_from=n1, chmod=Utils.O755)


if getattr(self, 'mac_files', None): if getattr(self, 'mac_files', None):
# this only accepts files; they will be installed as seen from mac_files_root # this only accepts files; they will be installed as seen from mac_files_root
@@ -102,38 +102,19 @@ def create_task_macapp(self):
for node in self.to_nodes(self.mac_files): for node in self.to_nodes(self.mac_files):
relpath = node.path_from(mac_files_root or node.parent) relpath = node.path_from(mac_files_root or node.parent)
self.create_task('macapp', node, res_dir.make_node(relpath)) self.create_task('macapp', node, res_dir.make_node(relpath))
self.bld.install_as(os.path.join(inst_to, relpath), node)

if getattr(self, 'mac_resources', None):
# TODO remove in waf 1.9
res_dir = n1.parent.parent.make_node('Resources')
inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Resources' % name
for x in self.to_list(self.mac_resources):
node = self.path.find_node(x)
if not node:
raise Errors.WafError('Missing mac_resource %r in %r' % (x, self))

parent = node.parent
if os.path.isdir(node.abspath()):
nodes = node.ant_glob('**')
else:
nodes = [node]
for node in nodes:
rel = node.path_from(parent)
self.create_task('macapp', node, res_dir.make_node(rel))
self.bld.install_as(inst_to + '/%s' % rel, node)
self.add_install_as(install_to=os.path.join(inst_to, relpath), install_from=node)


if getattr(self.bld, 'is_install', None): if getattr(self.bld, 'is_install', None):
# disable the normal binary installation
# disable regular binary installation
self.install_task.hasrun = Task.SKIP_ME self.install_task.hasrun = Task.SKIP_ME


@feature('cprogram', 'cxxprogram') @feature('cprogram', 'cxxprogram')
@after_method('apply_link') @after_method('apply_link')
def create_task_macplist(self): def create_task_macplist(self):
""" """
Create a :py:class:`waflib.Tools.c_osx.macplist` instance.
Creates a :py:class:`waflib.Tools.c_osx.macplist` instance.
""" """
if self.env['MACAPP'] or getattr(self, 'mac_app', False):
if self.env.MACAPP or getattr(self, 'mac_app', False):
out = self.link_task.outputs[0] out = self.link_task.outputs[0]


name = bundle_name_for_output(out) name = bundle_name_for_output(out)
@@ -160,7 +141,7 @@ def create_task_macplist(self):
plisttask.code = app_info plisttask.code = app_info


inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/' % name inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/' % name
self.bld.install_files(inst_to, n1)
self.add_install_files(install_to=inst_to, install_from=n1)


@feature('cshlib', 'cxxshlib') @feature('cshlib', 'cxxshlib')
@before_method('apply_link', 'propagate_uselib_vars') @before_method('apply_link', 'propagate_uselib_vars')
@@ -177,9 +158,9 @@ def apply_bundle(self):
bld.env.MACBUNDLE = True bld.env.MACBUNDLE = True
bld.shlib(source='a.c', target='foo') bld.shlib(source='a.c', target='foo')
""" """
if self.env['MACBUNDLE'] or getattr(self, 'mac_bundle', False):
self.env['LINKFLAGS_cshlib'] = self.env['LINKFLAGS_cxxshlib'] = [] # disable the '-dynamiclib' flag
self.env['cshlib_PATTERN'] = self.env['cxxshlib_PATTERN'] = self.env['macbundle_PATTERN']
if self.env.MACBUNDLE or getattr(self, 'mac_bundle', False):
self.env.LINKFLAGS_cshlib = self.env.LINKFLAGS_cxxshlib = [] # disable the '-dynamiclib' flag
self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.macbundle_PATTERN
use = self.use = self.to_list(getattr(self, 'use', [])) use = self.use = self.to_list(getattr(self, 'use', []))
if not 'MACBUNDLE' in use: if not 'MACBUNDLE' in use:
use.append('MACBUNDLE') use.append('MACBUNDLE')
@@ -188,7 +169,7 @@ app_dirs = ['Contents', 'Contents/MacOS', 'Contents/Resources']


class macapp(Task.Task): class macapp(Task.Task):
""" """
Create mac applications
Creates mac applications
""" """
color = 'PINK' color = 'PINK'
def run(self): def run(self):
@@ -197,7 +178,7 @@ class macapp(Task.Task):


class macplist(Task.Task): class macplist(Task.Task):
""" """
Create plist files
Creates plist files
""" """
color = 'PINK' color = 'PINK'
ext_in = ['.bin'] ext_in = ['.bin']
@@ -209,3 +190,4 @@ class macplist(Task.Task):
context = getattr(self, 'context', {}) context = getattr(self, 'context', {})
txt = txt.format(**context) txt = txt.format(**context)
self.outputs[0].write(txt) self.outputs[0].write(txt)


+ 260
- 210
waflib/Tools/c_preproc.py View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
# encoding: utf-8 # encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)
# Thomas Nagy, 2006-2018 (ita)


""" """
C/C++ preprocessor for finding dependencies C/C++ preprocessor for finding dependencies
@@ -28,11 +28,13 @@ A dumb preprocessor is also available in the tool *c_dumbpreproc*


import re, string, traceback import re, string, traceback
from waflib import Logs, Utils, Errors from waflib import Logs, Utils, Errors
from waflib.Logs import debug, error


class PreprocError(Errors.WafError): class PreprocError(Errors.WafError):
pass pass


FILE_CACHE_SIZE = 100000
LINE_CACHE_SIZE = 100000

POPFILE = '-' POPFILE = '-'
"Constant representing a special token used in :py:meth:`waflib.Tools.c_preproc.c_parser.start` iteration to switch to a header read previously" "Constant representing a special token used in :py:meth:`waflib.Tools.c_preproc.c_parser.start` iteration to switch to a header read previously"


@@ -42,15 +44,15 @@ recursion_limit = 150
go_absolute = False go_absolute = False
"Set to True to track headers on files in /usr/include, else absolute paths are ignored (but it becomes very slow)" "Set to True to track headers on files in /usr/include, else absolute paths are ignored (but it becomes very slow)"


standard_includes = ['/usr/include']
standard_includes = ['/usr/local/include', '/usr/include']
if Utils.is_win32: if Utils.is_win32:
standard_includes = [] standard_includes = []


use_trigraphs = 0 use_trigraphs = 0
"""Apply trigraph rules (False by default)""" """Apply trigraph rules (False by default)"""


# obsolete, do not use
strict_quotes = 0 strict_quotes = 0
"""Reserve the "#include <>" quotes for system includes (do not search for those includes). False by default."""


g_optrans = { g_optrans = {
'not':'!', 'not':'!',
@@ -69,7 +71,7 @@ g_optrans = {


# ignore #warning and #error # ignore #warning and #error
re_lines = re.compile( re_lines = re.compile(
'^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',
'^[ \t]*(?:#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',
re.IGNORECASE | re.MULTILINE) re.IGNORECASE | re.MULTILINE)
"""Match #include lines""" """Match #include lines"""


@@ -137,54 +139,22 @@ skipped = 's'


def repl(m): def repl(m):
"""Replace function used with :py:attr:`waflib.Tools.c_preproc.re_cpp`""" """Replace function used with :py:attr:`waflib.Tools.c_preproc.re_cpp`"""
s = m.group(0)
if s.startswith('/'):
s = m.group()
if s[0] == '/':
return ' ' return ' '
return s return s


def filter_comments(filename):
"""
Filter the comments from a c/h file, and return the preprocessor lines.
The regexps :py:attr:`waflib.Tools.c_preproc.re_cpp`, :py:attr:`waflib.Tools.c_preproc.re_nl` and :py:attr:`waflib.Tools.c_preproc.re_lines` are used internally.

:return: the preprocessor directives as a list of (keyword, line)
:rtype: a list of string pairs
"""
# return a list of tuples : keyword, line
code = Utils.readf(filename)
if use_trigraphs:
for (a, b) in trig_def: code = code.split(a).join(b)
code = re_nl.sub('', code)
code = re_cpp.sub(repl, code)
return [(m.group(2), m.group(3)) for m in re.finditer(re_lines, code)]

prec = {} prec = {}
""" """
Operator precendence rules required for parsing expressions of the form::
Operator precedence rules required for parsing expressions of the form::


#if 1 && 2 != 0 #if 1 && 2 != 0
""" """
ops = ['* / %', '+ -', '<< >>', '< <= >= >', '== !=', '& | ^', '&& ||', ','] ops = ['* / %', '+ -', '<< >>', '< <= >= >', '== !=', '& | ^', '&& ||', ',']
for x in range(len(ops)):
syms = ops[x]
for x, syms in enumerate(ops):
for u in syms.split(): for u in syms.split():
prec[u] = x prec[u] = x


def trimquotes(s):
"""
Remove the single quotes around an expression::

trimquotes("'test'") == "test"

:param s: expression to transform
:type s: string
:rtype: string
"""
if not s: return ''
s = s.rstrip()
if s[0] == "'" and s[-1] == "'": return s[1:-1]
return s

def reduce_nums(val_1, val_2, val_op): def reduce_nums(val_1, val_2, val_op):
""" """
Apply arithmetic rules to compute a result Apply arithmetic rules to compute a result
@@ -200,32 +170,56 @@ def reduce_nums(val_1, val_2, val_op):
#print val_1, val_2, val_op #print val_1, val_2, val_op


# now perform the operation, make certain a and b are numeric # now perform the operation, make certain a and b are numeric
try: a = 0 + val_1
except TypeError: a = int(val_1)
try: b = 0 + val_2
except TypeError: b = int(val_2)
try:
a = 0 + val_1
except TypeError:
a = int(val_1)
try:
b = 0 + val_2
except TypeError:
b = int(val_2)


d = val_op d = val_op
if d == '%': c = a%b
elif d=='+': c = a+b
elif d=='-': c = a-b
elif d=='*': c = a*b
elif d=='/': c = a/b
elif d=='^': c = a^b
elif d=='==': c = int(a == b)
elif d=='|' or d == 'bitor': c = a|b
elif d=='||' or d == 'or' : c = int(a or b)
elif d=='&' or d == 'bitand': c = a&b
elif d=='&&' or d == 'and': c = int(a and b)
elif d=='!=' or d == 'not_eq': c = int(a != b)
elif d=='^' or d == 'xor': c = int(a^b)
elif d=='<=': c = int(a <= b)
elif d=='<': c = int(a < b)
elif d=='>': c = int(a > b)
elif d=='>=': c = int(a >= b)
elif d=='<<': c = a<<b
elif d=='>>': c = a>>b
else: c = 0
if d == '%':
c = a % b
elif d=='+':
c = a + b
elif d=='-':
c = a - b
elif d=='*':
c = a * b
elif d=='/':
c = a / b
elif d=='^':
c = a ^ b
elif d=='==':
c = int(a == b)
elif d=='|' or d == 'bitor':
c = a | b
elif d=='||' or d == 'or' :
c = int(a or b)
elif d=='&' or d == 'bitand':
c = a & b
elif d=='&&' or d == 'and':
c = int(a and b)
elif d=='!=' or d == 'not_eq':
c = int(a != b)
elif d=='^' or d == 'xor':
c = int(a^b)
elif d=='<=':
c = int(a <= b)
elif d=='<':
c = int(a < b)
elif d=='>':
c = int(a > b)
elif d=='>=':
c = int(a >= b)
elif d=='<<':
c = a << b
elif d=='>>':
c = a >> b
else:
c = 0
return c return c


def get_num(lst): def get_num(lst):
@@ -237,7 +231,8 @@ def get_num(lst):
:return: a pair containing the number and the rest of the list :return: a pair containing the number and the rest of the list
:rtype: tuple(value, list) :rtype: tuple(value, list)
""" """
if not lst: raise PreprocError("empty list for get_num")
if not lst:
raise PreprocError('empty list for get_num')
(p, v) = lst[0] (p, v) = lst[0]
if p == OP: if p == OP:
if v == '(': if v == '(':
@@ -255,7 +250,7 @@ def get_num(lst):
count_par += 1 count_par += 1
i += 1 i += 1
else: else:
raise PreprocError("rparen expected %r" % lst)
raise PreprocError('rparen expected %r' % lst)


(num, _) = get_term(lst[1:i]) (num, _) = get_term(lst[1:i])
return (num, lst[i+1:]) return (num, lst[i+1:])
@@ -272,14 +267,14 @@ def get_num(lst):
num, lst = get_num(lst[1:]) num, lst = get_num(lst[1:])
return (~ int(num), lst) return (~ int(num), lst)
else: else:
raise PreprocError("Invalid op token %r for get_num" % lst)
raise PreprocError('Invalid op token %r for get_num' % lst)
elif p == NUM: elif p == NUM:
return v, lst[1:] return v, lst[1:]
elif p == IDENT: elif p == IDENT:
# all macros should have been replaced, remaining identifiers eval to 0 # all macros should have been replaced, remaining identifiers eval to 0
return 0, lst[1:] return 0, lst[1:]
else: else:
raise PreprocError("Invalid token %r for get_num" % lst)
raise PreprocError('Invalid token %r for get_num' % lst)


def get_term(lst): def get_term(lst):
""" """
@@ -293,7 +288,8 @@ def get_term(lst):
:rtype: value, list :rtype: value, list
""" """


if not lst: raise PreprocError("empty list for get_term")
if not lst:
raise PreprocError('empty list for get_term')
num, lst = get_num(lst) num, lst = get_num(lst)
if not lst: if not lst:
return (num, []) return (num, [])
@@ -318,7 +314,7 @@ def get_term(lst):
break break
i += 1 i += 1
else: else:
raise PreprocError("rparen expected %r" % lst)
raise PreprocError('rparen expected %r' % lst)


if int(num): if int(num):
return get_term(lst[1:i]) return get_term(lst[1:i])
@@ -336,7 +332,7 @@ def get_term(lst):
# operator precedence # operator precedence
p2, v2 = lst[0] p2, v2 = lst[0]
if p2 != OP: if p2 != OP:
raise PreprocError("op expected %r" % lst)
raise PreprocError('op expected %r' % lst)


if prec[v2] >= prec[v]: if prec[v2] >= prec[v]:
num2 = reduce_nums(num, num2, v) num2 = reduce_nums(num, num2, v)
@@ -347,7 +343,7 @@ def get_term(lst):
return get_term([(NUM, num), (p, v), (NUM, num3)] + lst) return get_term([(NUM, num), (p, v), (NUM, num3)] + lst)




raise PreprocError("cannot reduce %r" % lst)
raise PreprocError('cannot reduce %r' % lst)


def reduce_eval(lst): def reduce_eval(lst):
""" """
@@ -432,7 +428,7 @@ def reduce_tokens(lst, defs, ban=[]):
else: else:
lst[i] = (NUM, 0) lst[i] = (NUM, 0)
else: else:
raise PreprocError("Invalid define expression %r" % lst)
raise PreprocError('Invalid define expression %r' % lst)


elif p == IDENT and v in defs: elif p == IDENT and v in defs:


@@ -447,8 +443,8 @@ def reduce_tokens(lst, defs, ban=[]):
del lst[i] del lst[i]
accu = to_add[:] accu = to_add[:]
reduce_tokens(accu, defs, ban+[v]) reduce_tokens(accu, defs, ban+[v])
for x in range(len(accu)):
lst.insert(i, accu[x])
for tmp in accu:
lst.insert(i, tmp)
i += 1 i += 1
else: else:
# collect the arguments for the funcall # collect the arguments for the funcall
@@ -457,11 +453,11 @@ def reduce_tokens(lst, defs, ban=[]):
del lst[i] del lst[i]


if i >= len(lst): if i >= len(lst):
raise PreprocError("expected '(' after %r (got nothing)" % v)
raise PreprocError('expected ( after %r (got nothing)' % v)


(p2, v2) = lst[i] (p2, v2) = lst[i]
if p2 != OP or v2 != '(': if p2 != OP or v2 != '(':
raise PreprocError("expected '(' after %r" % v)
raise PreprocError('expected ( after %r' % v)


del lst[i] del lst[i]


@@ -476,18 +472,22 @@ def reduce_tokens(lst, defs, ban=[]):
one_param.append((p2, v2)) one_param.append((p2, v2))
count_paren += 1 count_paren += 1
elif v2 == ')': elif v2 == ')':
if one_param: args.append(one_param)
if one_param:
args.append(one_param)
break break
elif v2 == ',': elif v2 == ',':
if not one_param: raise PreprocError("empty param in funcall %s" % v)
if not one_param:
raise PreprocError('empty param in funcall %r' % v)
args.append(one_param) args.append(one_param)
one_param = [] one_param = []
else: else:
one_param.append((p2, v2)) one_param.append((p2, v2))
else: else:
one_param.append((p2, v2)) one_param.append((p2, v2))
if v2 == '(': count_paren += 1
elif v2 == ')': count_paren -= 1
if v2 == '(':
count_paren += 1
elif v2 == ')':
count_paren -= 1
else: else:
raise PreprocError('malformed macro') raise PreprocError('malformed macro')


@@ -524,7 +524,6 @@ def reduce_tokens(lst, defs, ban=[]):
accu.append((p2, v2)) accu.append((p2, v2))
accu.extend(toks) accu.extend(toks)
elif to_add[j+1][0] == IDENT and to_add[j+1][1] == '__VA_ARGS__': elif to_add[j+1][0] == IDENT and to_add[j+1][1] == '__VA_ARGS__':
# TODO not sure
# first collect the tokens # first collect the tokens
va_toks = [] va_toks = []
st = len(macro_def[0]) st = len(macro_def[0])
@@ -532,7 +531,8 @@ def reduce_tokens(lst, defs, ban=[]):
for x in args[pt-st+1:]: for x in args[pt-st+1:]:
va_toks.extend(x) va_toks.extend(x)
va_toks.append((OP, ',')) va_toks.append((OP, ','))
if va_toks: va_toks.pop() # extra comma
if va_toks:
va_toks.pop() # extra comma
if len(accu)>1: if len(accu)>1:
(p3, v3) = accu[-1] (p3, v3) = accu[-1]
(p4, v4) = accu[-2] (p4, v4) = accu[-2]
@@ -580,8 +580,15 @@ def eval_macro(lst, defs):
:rtype: int :rtype: int
""" """
reduce_tokens(lst, defs, []) reduce_tokens(lst, defs, [])
if not lst: raise PreprocError("missing tokens to evaluate")
(p, v) = reduce_eval(lst)
if not lst:
raise PreprocError('missing tokens to evaluate')

if lst:
p, v = lst[0]
if p == IDENT and v not in defs:
raise PreprocError('missing macro %r' % lst)

p, v = reduce_eval(lst)
return int(v) != 0 return int(v) != 0


def extract_macro(txt): def extract_macro(txt):
@@ -601,7 +608,8 @@ def extract_macro(txt):
p, name = t[0] p, name = t[0]


p, v = t[1] p, v = t[1]
if p != OP: raise PreprocError("expected open parenthesis")
if p != OP:
raise PreprocError('expected (')


i = 1 i = 1
pindex = 0 pindex = 0
@@ -620,27 +628,27 @@ def extract_macro(txt):
elif p == OP and v == ')': elif p == OP and v == ')':
break break
else: else:
raise PreprocError("unexpected token (3)")
raise PreprocError('unexpected token (3)')
elif prev == IDENT: elif prev == IDENT:
if p == OP and v == ',': if p == OP and v == ',':
prev = v prev = v
elif p == OP and v == ')': elif p == OP and v == ')':
break break
else: else:
raise PreprocError("comma or ... expected")
raise PreprocError('comma or ... expected')
elif prev == ',': elif prev == ',':
if p == IDENT: if p == IDENT:
params[v] = pindex params[v] = pindex
pindex += 1 pindex += 1
prev = p prev = p
elif p == OP and v == '...': elif p == OP and v == '...':
raise PreprocError("not implemented (1)")
raise PreprocError('not implemented (1)')
else: else:
raise PreprocError("comma or ... expected (2)")
raise PreprocError('comma or ... expected (2)')
elif prev == '...': elif prev == '...':
raise PreprocError("not implemented (2)")
raise PreprocError('not implemented (2)')
else: else:
raise PreprocError("unexpected else")
raise PreprocError('unexpected else')


#~ print (name, [params, t[i+1:]]) #~ print (name, [params, t[i+1:]])
return (name, [params, t[i+1:]]) return (name, [params, t[i+1:]])
@@ -652,7 +660,7 @@ def extract_macro(txt):
# empty define, assign an empty token # empty define, assign an empty token
return (v, [[], [('T','')]]) return (v, [[], [('T','')]])


re_include = re.compile('^\s*(<(?P<a>.*)>|"(?P<b>.*)")')
re_include = re.compile('^\s*(<(?:.*)>|"(?:.*)")')
def extract_include(txt, defs): def extract_include(txt, defs):
""" """
Process a line in the form:: Process a line in the form::
@@ -668,15 +676,15 @@ def extract_include(txt, defs):
""" """
m = re_include.search(txt) m = re_include.search(txt)
if m: if m:
if m.group('a'): return '<', m.group('a')
if m.group('b'): return '"', m.group('b')
txt = m.group(1)
return txt[0], txt[1:-1]


# perform preprocessing and look at the result, it must match an include # perform preprocessing and look at the result, it must match an include
toks = tokenize(txt) toks = tokenize(txt)
reduce_tokens(toks, defs, ['waf_include']) reduce_tokens(toks, defs, ['waf_include'])


if not toks: if not toks:
raise PreprocError("could not parse include %s" % txt)
raise PreprocError('could not parse include %r' % txt)


if len(toks) == 1: if len(toks) == 1:
if toks[0][0] == STR: if toks[0][0] == STR:
@@ -686,7 +694,7 @@ def extract_include(txt, defs):
ret = '<', stringize(toks).lstrip('<').rstrip('>') ret = '<', stringize(toks).lstrip('<').rstrip('>')
return ret return ret


raise PreprocError("could not parse include %s." % txt)
raise PreprocError('could not parse include %r' % txt)


def parse_char(txt): def parse_char(txt):
""" """
@@ -698,21 +706,26 @@ def parse_char(txt):
:rtype: string :rtype: string
""" """


if not txt: raise PreprocError("attempted to parse a null char")
if not txt:
raise PreprocError('attempted to parse a null char')
if txt[0] != '\\': if txt[0] != '\\':
return ord(txt) return ord(txt)
c = txt[1] c = txt[1]
if c == 'x': if c == 'x':
if len(txt) == 4 and txt[3] in string.hexdigits: return int(txt[2:], 16)
if len(txt) == 4 and txt[3] in string.hexdigits:
return int(txt[2:], 16)
return int(txt[2:], 16) return int(txt[2:], 16)
elif c.isdigit(): elif c.isdigit():
if c == '0' and len(txt)==2: return 0
if c == '0' and len(txt)==2:
return 0
for i in 3, 2, 1: for i in 3, 2, 1:
if len(txt) > i and txt[1:1+i].isdigit(): if len(txt) > i and txt[1:1+i].isdigit():
return (1+i, int(txt[1:1+i], 8)) return (1+i, int(txt[1:1+i], 8))
else: else:
try: return chr_esc[c]
except KeyError: raise PreprocError("could not parse char literal '%s'" % txt)
try:
return chr_esc[c]
except KeyError:
raise PreprocError('could not parse char literal %r' % txt)


def tokenize(s): def tokenize(s):
""" """
@@ -725,7 +738,6 @@ def tokenize(s):
""" """
return tokenize_private(s)[:] # force a copy of the results return tokenize_private(s)[:] # force a copy of the results


@Utils.run_once
def tokenize_private(s): def tokenize_private(s):
ret = [] ret = []
for match in re_clexer.finditer(s): for match in re_clexer.finditer(s):
@@ -734,28 +746,32 @@ def tokenize_private(s):
v = m(name) v = m(name)
if v: if v:
if name == IDENT: if name == IDENT:
try:
g_optrans[v]
if v in g_optrans:
name = OP name = OP
except KeyError:
# c++ specific
if v.lower() == "true":
v = 1
name = NUM
elif v.lower() == "false":
v = 0
name = NUM
elif v.lower() == "true":
v = 1
name = NUM
elif v.lower() == "false":
v = 0
name = NUM
elif name == NUM: elif name == NUM:
if m('oct'): v = int(v, 8)
elif m('hex'): v = int(m('hex'), 16)
elif m('n0'): v = m('n0')
if m('oct'):
v = int(v, 8)
elif m('hex'):
v = int(m('hex'), 16)
elif m('n0'):
v = m('n0')
else: else:
v = m('char') v = m('char')
if v: v = parse_char(v)
else: v = m('n2') or m('n4')
if v:
v = parse_char(v)
else:
v = m('n2') or m('n4')
elif name == OP: elif name == OP:
if v == '%:': v = '#'
elif v == '%:%:': v = '##'
if v == '%:':
v = '#'
elif v == '%:%:':
v = '##'
elif name == STR: elif name == STR:
# remove the quotes around the string # remove the quotes around the string
v = v[1:-1] v = v[1:-1]
@@ -763,15 +779,20 @@ def tokenize_private(s):
break break
return ret return ret


@Utils.run_once
def define_name(line):
"""
:param line: define line
:type line: string
:rtype: string
:return: the define name
"""
return re_mac.match(line).group(0)
def format_defines(lst):
ret = []
for y in lst:
if y:
pos = y.find('=')
if pos == -1:
# "-DFOO" should give "#define FOO 1"
ret.append(y)
elif pos > 0:
# all others are assumed to be -DX=Y
ret.append('%s %s' % (y[:pos], y[pos+1:]))
else:
raise ValueError('Invalid define expression %r' % y)
return ret


class c_parser(object): class c_parser(object):
""" """
@@ -803,9 +824,12 @@ class c_parser(object):
self.curfile = '' self.curfile = ''
"""Current file""" """Current file"""


self.ban_includes = set([])
self.ban_includes = set()
"""Includes that must not be read (#pragma once)""" """Includes that must not be read (#pragma once)"""


self.listed = set()
"""Include nodes/names already listed to avoid duplicates in self.nodes/self.names"""

def cached_find_resource(self, node, filename): def cached_find_resource(self, node, filename):
""" """
Find a file from the input directory Find a file from the input directory
@@ -818,13 +842,13 @@ class c_parser(object):
:rtype: :py:class:`waflib.Node.Node` :rtype: :py:class:`waflib.Node.Node`
""" """
try: try:
nd = node.ctx.cache_nd
cache = node.ctx.preproc_cache_node
except AttributeError: except AttributeError:
nd = node.ctx.cache_nd = {}
cache = node.ctx.preproc_cache_node = Utils.lru_cache(FILE_CACHE_SIZE)


tup = (node, filename)
key = (node, filename)
try: try:
return nd[tup]
return cache[key]
except KeyError: except KeyError:
ret = node.find_resource(filename) ret = node.find_resource(filename)
if ret: if ret:
@@ -834,10 +858,10 @@ class c_parser(object):
tmp = node.ctx.srcnode.search_node(ret.path_from(node.ctx.bldnode)) tmp = node.ctx.srcnode.search_node(ret.path_from(node.ctx.bldnode))
if tmp and getattr(tmp, 'children', None): if tmp and getattr(tmp, 'children', None):
ret = None ret = None
nd[tup] = ret
cache[key] = ret
return ret return ret


def tryfind(self, filename):
def tryfind(self, filename, kind='"', env=None):
""" """
Try to obtain a node from the filename based from the include paths. Will add Try to obtain a node from the filename based from the include paths. Will add
the node found to :py:attr:`waflib.Tools.c_preproc.c_parser.nodes` or the file name to the node found to :py:attr:`waflib.Tools.c_preproc.c_parser.nodes` or the file name to
@@ -851,29 +875,70 @@ class c_parser(object):
""" """
if filename.endswith('.moc'): if filename.endswith('.moc'):
# we could let the qt4 module use a subclass, but then the function "scan" below must be duplicated # we could let the qt4 module use a subclass, but then the function "scan" below must be duplicated
# in the qt4 and in the qt5 classes. So we have two lines here and it is sufficient. TODO waf 1.9
# in the qt4 and in the qt5 classes. So we have two lines here and it is sufficient.
self.names.append(filename) self.names.append(filename)
return None return None


self.curfile = filename self.curfile = filename


# for msvc it should be a for loop over the whole stack
found = self.cached_find_resource(self.currentnode_stack[-1], filename)
found = None
if kind == '"':
if env.MSVC_VERSION:
for n in reversed(self.currentnode_stack):
found = self.cached_find_resource(n, filename)
if found:
break
else:
found = self.cached_find_resource(self.currentnode_stack[-1], filename)


for n in self.nodepaths:
if found:
break
found = self.cached_find_resource(n, filename)
if not found:
for n in self.nodepaths:
found = self.cached_find_resource(n, filename)
if found:
break


listed = self.listed
if found and not found in self.ban_includes: if found and not found in self.ban_includes:
# TODO duplicates do not increase the no-op build times too much, but they may be worth removing
self.nodes.append(found)
if found not in listed:
listed.add(found)
self.nodes.append(found)
self.addlines(found) self.addlines(found)
else: else:
if not filename in self.names:
if filename not in listed:
listed.add(filename)
self.names.append(filename) self.names.append(filename)
return found return found


def filter_comments(self, node):
"""
Filter the comments from a c/h file, and return the preprocessor lines.
The regexps :py:attr:`waflib.Tools.c_preproc.re_cpp`, :py:attr:`waflib.Tools.c_preproc.re_nl` and :py:attr:`waflib.Tools.c_preproc.re_lines` are used internally.

:return: the preprocessor directives as a list of (keyword, line)
:rtype: a list of string pairs
"""
# return a list of tuples : keyword, line
code = node.read()
if use_trigraphs:
for (a, b) in trig_def:
code = code.split(a).join(b)
code = re_nl.sub('', code)
code = re_cpp.sub(repl, code)
return re_lines.findall(code)

def parse_lines(self, node):
try:
cache = node.ctx.preproc_cache_lines
except AttributeError:
cache = node.ctx.preproc_cache_lines = Utils.lru_cache(LINE_CACHE_SIZE)
try:
return cache[node]
except KeyError:
cache[node] = lines = self.filter_comments(node)
lines.append((POPFILE, ''))
lines.reverse()
return lines

def addlines(self, node): def addlines(self, node):
""" """
Add the lines from a header in the list of preprocessor lines to parse Add the lines from a header in the list of preprocessor lines to parse
@@ -883,34 +948,23 @@ class c_parser(object):
""" """


self.currentnode_stack.append(node.parent) self.currentnode_stack.append(node.parent)
filepath = node.abspath()


self.count_files += 1 self.count_files += 1
if self.count_files > recursion_limit: if self.count_files > recursion_limit:
# issue #812 # issue #812
raise PreprocError("recursion limit exceeded")
pc = self.parse_cache
debug('preproc: reading file %r', filepath)
try:
lns = pc[filepath]
except KeyError:
pass
else:
self.lines.extend(lns)
return
raise PreprocError('recursion limit exceeded')


if Logs.verbose:
Logs.debug('preproc: reading file %r', node)
try: try:
lines = filter_comments(filepath)
lines.append((POPFILE, ''))
lines.reverse()
pc[filepath] = lines # cache the lines filtered
self.lines.extend(lines)
except IOError:
raise PreprocError("could not read the file %s" % filepath)
lines = self.parse_lines(node)
except EnvironmentError:
raise PreprocError('could not read the file %r' % node)
except Exception: except Exception:
if Logs.verbose > 0: if Logs.verbose > 0:
error("parsing %s failed" % filepath)
traceback.print_exc()
Logs.error('parsing %r failed %s', node, traceback.format_exc())
else:
self.lines.extend(lines)


def start(self, node, env): def start(self, node, env):
""" """
@@ -922,27 +976,16 @@ class c_parser(object):
:param env: config set containing additional defines to take into account :param env: config set containing additional defines to take into account
:type env: :py:class:`waflib.ConfigSet.ConfigSet` :type env: :py:class:`waflib.ConfigSet.ConfigSet`
""" """

debug('preproc: scanning %s (in %s)', node.name, node.parent.name)

bld = node.ctx
try:
self.parse_cache = bld.parse_cache
except AttributeError:
self.parse_cache = bld.parse_cache = {}
Logs.debug('preproc: scanning %s (in %s)', node.name, node.parent.name)


self.current_file = node self.current_file = node
self.addlines(node) self.addlines(node)


# macros may be defined on the command-line, so they must be parsed as if they were part of the file # macros may be defined on the command-line, so they must be parsed as if they were part of the file
if env['DEFINES']:
try:
lst = ['%s %s' % (x[0], trimquotes('='.join(x[1:]))) for x in [y.split('=') for y in env['DEFINES']]]
lst.reverse()
self.lines.extend([('define', x) for x in lst])
except AttributeError:
# if the defines are invalid the compiler will tell the user
pass
if env.DEFINES:
lst = format_defines(env.DEFINES)
lst.reverse()
self.lines.extend([('define', x) for x in lst])


while self.lines: while self.lines:
(token, line) = self.lines.pop() (token, line) = self.lines.pop()
@@ -952,8 +995,6 @@ class c_parser(object):
continue continue


try: try:
ve = Logs.verbose
if ve: debug('preproc: line is %s - %s state is %s', token, line, self.state)
state = self.state state = self.state


# make certain we define the state if we are about to enter in an if block # make certain we define the state if we are about to enter in an if block
@@ -969,23 +1010,27 @@ class c_parser(object):


if token == 'if': if token == 'if':
ret = eval_macro(tokenize(line), self.defs) ret = eval_macro(tokenize(line), self.defs)
if ret: state[-1] = accepted
else: state[-1] = ignored
if ret:
state[-1] = accepted
else:
state[-1] = ignored
elif token == 'ifdef': elif token == 'ifdef':
m = re_mac.match(line) m = re_mac.match(line)
if m and m.group(0) in self.defs: state[-1] = accepted
else: state[-1] = ignored
if m and m.group() in self.defs:
state[-1] = accepted
else:
state[-1] = ignored
elif token == 'ifndef': elif token == 'ifndef':
m = re_mac.match(line) m = re_mac.match(line)
if m and m.group(0) in self.defs: state[-1] = ignored
else: state[-1] = accepted
if m and m.group() in self.defs:
state[-1] = ignored
else:
state[-1] = accepted
elif token == 'include' or token == 'import': elif token == 'include' or token == 'import':
(kind, inc) = extract_include(line, self.defs) (kind, inc) = extract_include(line, self.defs)
if ve: debug('preproc: include found %s (%s) ', inc, kind)
if kind == '"' or not strict_quotes:
self.current_file = self.tryfind(inc)
if token == 'import':
self.ban_includes.add(self.current_file)
self.current_file = self.tryfind(inc, kind, env)
if token == 'import':
self.ban_includes.add(self.current_file)
elif token == 'elif': elif token == 'elif':
if state[-1] == accepted: if state[-1] == accepted:
state[-1] = skipped state[-1] = skipped
@@ -993,24 +1038,35 @@ class c_parser(object):
if eval_macro(tokenize(line), self.defs): if eval_macro(tokenize(line), self.defs):
state[-1] = accepted state[-1] = accepted
elif token == 'else': elif token == 'else':
if state[-1] == accepted: state[-1] = skipped
elif state[-1] == ignored: state[-1] = accepted
if state[-1] == accepted:
state[-1] = skipped
elif state[-1] == ignored:
state[-1] = accepted
elif token == 'define': elif token == 'define':
try: try:
self.defs[define_name(line)] = line
except Exception:
raise PreprocError("Invalid define line %s" % line)
self.defs[self.define_name(line)] = line
except AttributeError:
raise PreprocError('Invalid define line %r' % line)
elif token == 'undef': elif token == 'undef':
m = re_mac.match(line) m = re_mac.match(line)
if m and m.group(0) in self.defs:
self.defs.__delitem__(m.group(0))
if m and m.group() in self.defs:
self.defs.__delitem__(m.group())
#print "undef %s" % name #print "undef %s" % name
elif token == 'pragma': elif token == 'pragma':
if re_pragma_once.match(line.lower()): if re_pragma_once.match(line.lower()):
self.ban_includes.add(self.current_file) self.ban_includes.add(self.current_file)
except Exception as e: except Exception as e:
if Logs.verbose: if Logs.verbose:
debug('preproc: line parsing failed (%s): %s %s', e, line, Utils.ex_stack())
Logs.debug('preproc: line parsing failed (%s): %s %s', e, line, traceback.format_exc())

def define_name(self, line):
"""
:param line: define line
:type line: string
:rtype: string
:return: the define name
"""
return re_mac.match(line).group()


def scan(task): def scan(task):
""" """
@@ -1020,9 +1076,6 @@ def scan(task):


This function is bound as a task method on :py:class:`waflib.Tools.c.c` and :py:class:`waflib.Tools.cxx.cxx` for example This function is bound as a task method on :py:class:`waflib.Tools.c.c` and :py:class:`waflib.Tools.cxx.cxx` for example
""" """

global go_absolute

try: try:
incn = task.generator.includes_nodes incn = task.generator.includes_nodes
except AttributeError: except AttributeError:
@@ -1035,7 +1088,4 @@ def scan(task):


tmp = c_parser(nodepaths) tmp = c_parser(nodepaths)
tmp.start(task.inputs[0], task.env) tmp.start(task.inputs[0], task.env)
if Logs.verbose:
debug('deps: deps for %r: %r; unresolved %r' % (task.inputs, tmp.nodes, tmp.names))
return (tmp.nodes, tmp.names) return (tmp.nodes, tmp.names)


+ 13
- 12
waflib/Tools/c_tests.py View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
# encoding: utf-8 # encoding: utf-8
# Thomas Nagy, 2010 (ita)
# Thomas Nagy, 2016-2018 (ita)


""" """
Various configuration tests. Various configuration tests.
@@ -58,7 +58,7 @@ def link_lib_test_fun(self):
@conf @conf
def check_library(self, mode=None, test_exec=True): def check_library(self, mode=None, test_exec=True):
""" """
Check if libraries can be linked with the current linker. Uses :py:func:`waflib.Tools.c_tests.link_lib_test_fun`.
Checks if libraries can be linked with the current linker. Uses :py:func:`waflib.Tools.c_tests.link_lib_test_fun`.


:param mode: c or cxx or d :param mode: c or cxx or d
:type mode: string :type mode: string
@@ -72,8 +72,7 @@ def check_library(self, mode=None, test_exec=True):
features = 'link_lib_test', features = 'link_lib_test',
msg = 'Checking for libraries', msg = 'Checking for libraries',
mode = mode, mode = mode,
test_exec = test_exec,
)
test_exec = test_exec)


######################################################################################## ########################################################################################


@@ -89,7 +88,7 @@ INLINE_VALUES = ['inline', '__inline__', '__inline']
@conf @conf
def check_inline(self, **kw): def check_inline(self, **kw):
""" """
Check for the right value for inline macro.
Checks for the right value for inline macro.
Define INLINE_MACRO to 1 if the define is found. Define INLINE_MACRO to 1 if the define is found.
If the inline macro is not 'inline', add a define to the ``config.h`` (#define inline __inline__) If the inline macro is not 'inline', add a define to the ``config.h`` (#define inline __inline__)


@@ -98,7 +97,6 @@ def check_inline(self, **kw):
:param features: by default *c* or *cxx* depending on the compiler present :param features: by default *c* or *cxx* depending on the compiler present
:type features: list of string :type features: list of string
""" """

self.start_msg('Checking for inline') self.start_msg('Checking for inline')


if not 'define_name' in kw: if not 'define_name' in kw:
@@ -135,7 +133,7 @@ int main(int argc, char **argv) {
@conf @conf
def check_large_file(self, **kw): def check_large_file(self, **kw):
""" """
Check for large file support and define the macro HAVE_LARGEFILE
Checks for large file support and define the macro HAVE_LARGEFILE
The test is skipped on win32 systems (DEST_BINFMT == pe). The test is skipped on win32 systems (DEST_BINFMT == pe).


:param define_name: define to set, by default *HAVE_LARGEFILE* :param define_name: define to set, by default *HAVE_LARGEFILE*
@@ -143,7 +141,6 @@ def check_large_file(self, **kw):
:param execute: execute the test (yes by default) :param execute: execute the test (yes by default)
:type execute: bool :type execute: bool
""" """

if not 'define_name' in kw: if not 'define_name' in kw:
kw['define_name'] = 'HAVE_LARGEFILE' kw['define_name'] = 'HAVE_LARGEFILE'
if not 'execute' in kw: if not 'execute' in kw:
@@ -197,9 +194,12 @@ extern int foo;
''' '''


class grep_for_endianness(Task.Task): class grep_for_endianness(Task.Task):
"""
Task that reads a binary and tries to determine the endianness
"""
color = 'PINK' color = 'PINK'
def run(self): def run(self):
txt = self.inputs[0].read(flags='rb').decode('iso8859-1')
txt = self.inputs[0].read(flags='rb').decode('latin-1')
if txt.find('LiTTleEnDian') > -1: if txt.find('LiTTleEnDian') > -1:
self.generator.tmp.append('little') self.generator.tmp.append('little')
elif txt.find('BIGenDianSyS') > -1: elif txt.find('BIGenDianSyS') > -1:
@@ -211,18 +211,19 @@ class grep_for_endianness(Task.Task):
@after_method('process_source') @after_method('process_source')
def grep_for_endianness_fun(self): def grep_for_endianness_fun(self):
""" """
Used by the endiannes configuration test
Used by the endianness configuration test
""" """
self.create_task('grep_for_endianness', self.compiled_tasks[0].outputs[0]) self.create_task('grep_for_endianness', self.compiled_tasks[0].outputs[0])


@conf @conf
def check_endianness(self): def check_endianness(self):
""" """
Execute a configuration test to determine the endianness
Executes a configuration test to determine the endianness
""" """
tmp = [] tmp = []
def check_msg(self): def check_msg(self):
return tmp[0] return tmp[0]
self.check(fragment=ENDIAN_FRAGMENT, features='c grep_for_endianness', msg="Checking for endianness", define='ENDIANNESS', tmp=tmp, okmsg=check_msg)
self.check(fragment=ENDIAN_FRAGMENT, features='c grep_for_endianness',
msg='Checking for endianness', define='ENDIANNESS', tmp=tmp, okmsg=check_msg)
return tmp[0] return tmp[0]



+ 97
- 43
waflib/Tools/ccroot.py View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
# encoding: utf-8 # encoding: utf-8
# Thomas Nagy, 2005-2010 (ita)
# Thomas Nagy, 2005-2018 (ita)


""" """
Classes and methods shared by tools providing support for C-like language such Classes and methods shared by tools providing support for C-like language such
@@ -8,7 +8,7 @@ as C/C++/D/Assembly/Go (this support module is almost never used alone).
""" """


import os, re import os, re
from waflib import Task, Utils, Node, Errors
from waflib import Task, Utils, Node, Errors, Logs
from waflib.TaskGen import after_method, before_method, feature, taskgen_method, extension from waflib.TaskGen import after_method, before_method, feature, taskgen_method, extension
from waflib.Tools import c_aliases, c_preproc, c_config, c_osx, c_tests from waflib.Tools import c_aliases, c_preproc, c_config, c_osx, c_tests
from waflib.Configure import conf from waflib.Configure import conf
@@ -77,7 +77,7 @@ def to_incnodes(self, inlst):
:return: list of include folders as nodes :return: list of include folders as nodes
""" """
lst = [] lst = []
seen = set([])
seen = set()
for x in self.to_list(inlst): for x in self.to_list(inlst):
if x in seen or not x: if x in seen or not x:
continue continue
@@ -118,9 +118,10 @@ def apply_incpaths(self):
and the list of include paths in ``tg.env.INCLUDES``. and the list of include paths in ``tg.env.INCLUDES``.
""" """


lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env['INCLUDES'])
lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env.INCLUDES)
self.includes_nodes = lst self.includes_nodes = lst
self.env['INCPATHS'] = [x.abspath() for x in lst]
cwd = self.get_cwd()
self.env.INCPATHS = [x.path_from(cwd) for x in lst]


class link_task(Task.Task): class link_task(Task.Task):
""" """
@@ -130,6 +131,9 @@ class link_task(Task.Task):
""" """
color = 'YELLOW' color = 'YELLOW'


weight = 3
"""Try to process link tasks as early as possible"""

inst_to = None inst_to = None
"""Default installation path for the link task outputs, or None to disable""" """Default installation path for the link task outputs, or None to disable"""


@@ -142,6 +146,12 @@ class link_task(Task.Task):
The settings are retrieved from ``env.clsname_PATTERN`` The settings are retrieved from ``env.clsname_PATTERN``
""" """
if isinstance(target, str): if isinstance(target, str):
base = self.generator.path
if target.startswith('#'):
# for those who like flat structures
target = target[1:]
base = self.generator.bld.bldnode

pattern = self.env[self.__class__.__name__ + '_PATTERN'] pattern = self.env[self.__class__.__name__ + '_PATTERN']
if not pattern: if not pattern:
pattern = '%s' pattern = '%s'
@@ -151,7 +161,7 @@ class link_task(Task.Task):
nums = self.generator.vnum.split('.') nums = self.generator.vnum.split('.')
if self.env.DEST_BINFMT == 'pe': if self.env.DEST_BINFMT == 'pe':
# include the version in the dll file name, # include the version in the dll file name,
# the import lib file name stays unversionned.
# the import lib file name stays unversioned.
name = name + '-' + nums[0] name = name + '-' + nums[0]
elif self.env.DEST_OS == 'openbsd': elif self.env.DEST_OS == 'openbsd':
pattern = '%s.%s' % (pattern, nums[0]) pattern = '%s.%s' % (pattern, nums[0])
@@ -162,9 +172,51 @@ class link_task(Task.Task):
tmp = folder + os.sep + pattern % name tmp = folder + os.sep + pattern % name
else: else:
tmp = pattern % name tmp = pattern % name
target = self.generator.path.find_or_declare(tmp)
target = base.find_or_declare(tmp)
self.set_outputs(target) self.set_outputs(target)


def exec_command(self, *k, **kw):
ret = super(link_task, self).exec_command(*k, **kw)
if not ret and self.env.DO_MANIFEST:
ret = self.exec_mf()
return ret

def exec_mf(self):
"""
Create manifest files for VS-like compilers (msvc, ifort, ...)
"""
if not self.env.MT:
return 0

manifest = None
for out_node in self.outputs:
if out_node.name.endswith('.manifest'):
manifest = out_node.abspath()
break
else:
# Should never get here. If we do, it means the manifest file was
# never added to the outputs list, thus we don't have a manifest file
# to embed, so we just return.
return 0

# embedding mode. Different for EXE's and DLL's.
# see: http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx
mode = ''
for x in Utils.to_list(self.generator.features):
if x in ('cprogram', 'cxxprogram', 'fcprogram', 'fcprogram_test'):
mode = 1
elif x in ('cshlib', 'cxxshlib', 'fcshlib'):
mode = 2

Logs.debug('msvc: embedding manifest in mode %r', mode)

lst = [] + self.env.MT
lst.extend(Utils.to_list(self.env.MTFLAGS))
lst.extend(['-manifest', manifest])
lst.append('-outputresource:%s;%s' % (self.outputs[0].abspath(), mode))

return super(link_task, self).exec_command(lst)

class stlink_task(link_task): class stlink_task(link_task):
""" """
Base for static link tasks, which use *ar* most of the time. Base for static link tasks, which use *ar* most of the time.
@@ -178,8 +230,10 @@ class stlink_task(link_task):
def rm_tgt(cls): def rm_tgt(cls):
old = cls.run old = cls.run
def wrap(self): def wrap(self):
try: os.remove(self.outputs[0].abspath())
except OSError: pass
try:
os.remove(self.outputs[0].abspath())
except OSError:
pass
return old(self) return old(self)
setattr(cls, 'run', wrap) setattr(cls, 'run', wrap)
rm_tgt(stlink_task) rm_tgt(stlink_task)
@@ -219,10 +273,12 @@ def apply_link(self):
try: try:
inst_to = self.install_path inst_to = self.install_path
except AttributeError: except AttributeError:
inst_to = self.link_task.__class__.inst_to
inst_to = self.link_task.inst_to
if inst_to: if inst_to:
# install a copy of the node list we have at this moment (implib not added) # install a copy of the node list we have at this moment (implib not added)
self.install_task = self.bld.install_files(inst_to, self.link_task.outputs[:], env=self.env, chmod=self.link_task.chmod, task=self.link_task)
self.install_task = self.add_install_files(
install_to=inst_to, install_from=self.link_task.outputs[:],
chmod=self.link_task.chmod, task=self.link_task)


@taskgen_method @taskgen_method
def use_rec(self, name, **kw): def use_rec(self, name, **kw):
@@ -282,7 +338,7 @@ def process_use(self):
See :py:func:`waflib.Tools.ccroot.use_rec`. See :py:func:`waflib.Tools.ccroot.use_rec`.
""" """


use_not = self.tmp_use_not = set([])
use_not = self.tmp_use_not = set()
self.tmp_use_seen = [] # we would like an ordered set self.tmp_use_seen = [] # we would like an ordered set
use_prec = self.tmp_use_prec = {} use_prec = self.tmp_use_prec = {}
self.uselib = self.to_list(getattr(self, 'uselib', [])) self.uselib = self.to_list(getattr(self, 'uselib', []))
@@ -297,7 +353,7 @@ def process_use(self):
del use_prec[x] del use_prec[x]


# topological sort # topological sort
out = []
out = self.tmp_use_sorted = []
tmp = [] tmp = []
for x in self.tmp_use_seen: for x in self.tmp_use_seen:
for k in use_prec.values(): for k in use_prec.values():
@@ -333,14 +389,15 @@ def process_use(self):
if var == 'LIB' or y.tmp_use_stlib or x in names: if var == 'LIB' or y.tmp_use_stlib or x in names:
self.env.append_value(var, [y.target[y.target.rfind(os.sep) + 1:]]) self.env.append_value(var, [y.target[y.target.rfind(os.sep) + 1:]])
self.link_task.dep_nodes.extend(y.link_task.outputs) self.link_task.dep_nodes.extend(y.link_task.outputs)
tmp_path = y.link_task.outputs[0].parent.path_from(self.bld.bldnode)
tmp_path = y.link_task.outputs[0].parent.path_from(self.get_cwd())
self.env.append_unique(var + 'PATH', [tmp_path]) self.env.append_unique(var + 'PATH', [tmp_path])
else: else:
if y.tmp_use_objects: if y.tmp_use_objects:
self.add_objects_from_tgen(y) self.add_objects_from_tgen(y)


if getattr(y, 'export_includes', None): if getattr(y, 'export_includes', None):
self.includes.extend(y.to_incnodes(y.export_includes))
# self.includes may come from a global variable #2035
self.includes = self.includes + y.to_incnodes(y.export_includes)


if getattr(y, 'export_defines', None): if getattr(y, 'export_defines', None):
self.env.append_value('DEFINES', self.to_list(y.export_defines)) self.env.append_value('DEFINES', self.to_list(y.export_defines))
@@ -390,7 +447,7 @@ def get_uselib_vars(self):
:return: the *uselib* variables associated to the *features* attribute (see :py:attr:`waflib.Tools.ccroot.USELIB_VARS`) :return: the *uselib* variables associated to the *features* attribute (see :py:attr:`waflib.Tools.ccroot.USELIB_VARS`)
:rtype: list of string :rtype: list of string
""" """
_vars = set([])
_vars = set()
for x in self.features: for x in self.features:
if x in USELIB_VARS: if x in USELIB_VARS:
_vars |= USELIB_VARS[x] _vars |= USELIB_VARS[x]
@@ -405,7 +462,7 @@ def propagate_uselib_vars(self):
def build(bld): def build(bld):
bld.env.AFLAGS_aaa = ['bar'] bld.env.AFLAGS_aaa = ['bar']
from waflib.Tools.ccroot import USELIB_VARS from waflib.Tools.ccroot import USELIB_VARS
USELIB_VARS['aaa'] = set('AFLAGS')
USELIB_VARS['aaa'] = ['AFLAGS']


tg = bld(features='aaa', aflags='test') tg = bld(features='aaa', aflags='test')


@@ -447,20 +504,20 @@ def apply_implib(self):
name = self.target.name name = self.target.name
else: else:
name = os.path.split(self.target)[1] name = os.path.split(self.target)[1]
implib = self.env['implib_PATTERN'] % name
implib = self.env.implib_PATTERN % name
implib = dll.parent.find_or_declare(implib) implib = dll.parent.find_or_declare(implib)
self.env.append_value('LINKFLAGS', self.env['IMPLIB_ST'] % implib.bldpath())
self.env.append_value('LINKFLAGS', self.env.IMPLIB_ST % implib.bldpath())
self.link_task.outputs.append(implib) self.link_task.outputs.append(implib)


if getattr(self, 'defs', None) and self.env.DEST_BINFMT == 'pe': if getattr(self, 'defs', None) and self.env.DEST_BINFMT == 'pe':
node = self.path.find_resource(self.defs) node = self.path.find_resource(self.defs)
if not node: if not node:
raise Errors.WafError('invalid def file %r' % self.defs) raise Errors.WafError('invalid def file %r' % self.defs)
if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME):
self.env.append_value('LINKFLAGS', '/def:%s' % node.path_from(self.bld.bldnode))
if self.env.def_PATTERN:
self.env.append_value('LINKFLAGS', self.env.def_PATTERN % node.path_from(self.get_cwd()))
self.link_task.dep_nodes.append(node) self.link_task.dep_nodes.append(node)
else: else:
#gcc for windows takes *.def file a an input without any special flag
# gcc for windows takes *.def file as input without any special flag
self.link_task.inputs.append(node) self.link_task.inputs.append(node)


# where to put the import library # where to put the import library
@@ -475,10 +532,11 @@ def apply_implib(self):
except AttributeError: except AttributeError:
# else, put the library in BINDIR and the import library in LIBDIR # else, put the library in BINDIR and the import library in LIBDIR
inst_to = '${IMPLIBDIR}' inst_to = '${IMPLIBDIR}'
self.install_task.dest = '${BINDIR}'
self.install_task.install_to = '${BINDIR}'
if not self.env.IMPLIBDIR: if not self.env.IMPLIBDIR:
self.env.IMPLIBDIR = self.env.LIBDIR self.env.IMPLIBDIR = self.env.LIBDIR
self.implib_install_task = self.bld.install_files(inst_to, implib, env=self.env, chmod=self.link_task.chmod, task=self.link_task)
self.implib_install_task = self.add_install_files(install_to=inst_to, install_from=implib,
chmod=self.link_task.chmod, task=self.link_task)


# ============ the code above must not know anything about vnum processing on unix platforms ========= # ============ the code above must not know anything about vnum processing on unix platforms =========


@@ -535,34 +593,34 @@ def apply_vnum(self):


# the following task is just to enable execution from the build dir :-/ # the following task is just to enable execution from the build dir :-/
if self.env.DEST_OS != 'openbsd': if self.env.DEST_OS != 'openbsd':
outs = [node.parent.find_or_declare(name3)]
outs = [node.parent.make_node(name3)]
if name2 != name3: if name2 != name3:
outs.append(node.parent.find_or_declare(name2))
outs.append(node.parent.make_node(name2))
self.create_task('vnum', node, outs) self.create_task('vnum', node, outs)


if getattr(self, 'install_task', None): if getattr(self, 'install_task', None):
self.install_task.hasrun = Task.SKIP_ME
bld = self.bld
path = self.install_task.dest
self.install_task.hasrun = Task.SKIPPED
self.install_task.no_errcheck_out = True
path = self.install_task.install_to
if self.env.DEST_OS == 'openbsd': if self.env.DEST_OS == 'openbsd':
libname = self.link_task.outputs[0].name libname = self.link_task.outputs[0].name
t1 = bld.install_as('%s%s%s' % (path, os.sep, libname), node, env=self.env, chmod=self.link_task.chmod)
t1 = self.add_install_as(install_to='%s/%s' % (path, libname), install_from=node, chmod=self.link_task.chmod)
self.vnum_install_task = (t1,) self.vnum_install_task = (t1,)
else: else:
t1 = bld.install_as(path + os.sep + name3, node, env=self.env, chmod=self.link_task.chmod)
t3 = bld.symlink_as(path + os.sep + libname, name3)
t1 = self.add_install_as(install_to=path + os.sep + name3, install_from=node, chmod=self.link_task.chmod)
t3 = self.add_symlink_as(install_to=path + os.sep + libname, install_from=name3)
if name2 != name3: if name2 != name3:
t2 = bld.symlink_as(path + os.sep + name2, name3)
t2 = self.add_symlink_as(install_to=path + os.sep + name2, install_from=name3)
self.vnum_install_task = (t1, t2, t3) self.vnum_install_task = (t1, t2, t3)
else: else:
self.vnum_install_task = (t1, t3) self.vnum_install_task = (t1, t3)


if '-dynamiclib' in self.env['LINKFLAGS']:
if '-dynamiclib' in self.env.LINKFLAGS:
# this requires after(propagate_uselib_vars) # this requires after(propagate_uselib_vars)
try: try:
inst_to = self.install_path inst_to = self.install_path
except AttributeError: except AttributeError:
inst_to = self.link_task.__class__.inst_to
inst_to = self.link_task.inst_to
if inst_to: if inst_to:
p = Utils.subst_vars(inst_to, self.env) p = Utils.subst_vars(inst_to, self.env)
path = os.path.join(p, name2) path = os.path.join(p, name2)
@@ -575,7 +633,6 @@ class vnum(Task.Task):
Create the symbolic links for a versioned shared library. Instances are created by :py:func:`waflib.Tools.ccroot.apply_vnum` Create the symbolic links for a versioned shared library. Instances are created by :py:func:`waflib.Tools.ccroot.apply_vnum`
""" """
color = 'CYAN' color = 'CYAN'
quient = True
ext_in = ['.bin'] ext_in = ['.bin']
def keyword(self): def keyword(self):
return 'Symlinking' return 'Symlinking'
@@ -600,9 +657,6 @@ class fake_shlib(link_task):
for t in self.run_after: for t in self.run_after:
if not t.hasrun: if not t.hasrun:
return Task.ASK_LATER return Task.ASK_LATER

for x in self.outputs:
x.sig = Utils.h_file(x.abspath())
return Task.SKIP_ME return Task.SKIP_ME


class fake_stlib(stlink_task): class fake_stlib(stlink_task):
@@ -613,9 +667,6 @@ class fake_stlib(stlink_task):
for t in self.run_after: for t in self.run_after:
if not t.hasrun: if not t.hasrun:
return Task.ASK_LATER return Task.ASK_LATER

for x in self.outputs:
x.sig = Utils.h_file(x.abspath())
return Task.SKIP_ME return Task.SKIP_ME


@conf @conf
@@ -658,7 +709,10 @@ def process_lib(self):
for y in names: for y in names:
node = x.find_node(y) node = x.find_node(y)
if node: if node:
node.sig = Utils.h_file(node.abspath())
try:
Utils.h_file(node.abspath())
except EnvironmentError:
raise ValueError('Could not read %r' % y)
break break
else: else:
continue continue


+ 1
- 1
waflib/Tools/clang.py View File

@@ -12,7 +12,7 @@ from waflib.Configure import conf
@conf @conf
def find_clang(conf): def find_clang(conf):
""" """
Find the program clang and execute it to ensure it really is clang
Finds the program clang and executes it to ensure it really is clang
""" """
cc = conf.find_program('clang', var='CC') cc = conf.find_program('clang', var='CC')
conf.get_cc_version(cc, clang=True) conf.get_cc_version(cc, clang=True)


+ 2
- 2
waflib/Tools/clangxx.py View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
# encoding: utf-8 # encoding: utf-8
# Thomas Nagy 2009-2010 (ita)
# Thomas Nagy 2009-2018 (ita)


""" """
Detect the Clang++ C++ compiler Detect the Clang++ C++ compiler
@@ -12,7 +12,7 @@ from waflib.Configure import conf
@conf @conf
def find_clangxx(conf): def find_clangxx(conf):
""" """
Find the program clang++, and execute it to ensure it really is clang++
Finds the program clang++, and executes it to ensure it really is clang++
""" """
cxx = conf.find_program('clang++', var='CXX') cxx = conf.find_program('clang++', var='CXX')
conf.get_cc_version(cxx, clang=True) conf.get_cc_version(cxx, clang=True)


+ 15
- 9
waflib/Tools/compiler_c.py View File

@@ -47,10 +47,10 @@ c_compiler = {
'osf1V': ['gcc'], 'osf1V': ['gcc'],
'gnu': ['gcc', 'clang'], 'gnu': ['gcc', 'clang'],
'java': ['gcc', 'msvc', 'clang', 'icc'], 'java': ['gcc', 'msvc', 'clang', 'icc'],
'default':['gcc', 'clang'],
'default':['clang', 'gcc'],
} }
""" """
Dict mapping the platform names to Waf tools finding specific C compilers::
Dict mapping platform names to Waf tools finding specific C compilers::


from waflib.Tools.compiler_c import c_compiler from waflib.Tools.compiler_c import c_compiler
c_compiler['linux'] = ['gcc', 'icc', 'suncc'] c_compiler['linux'] = ['gcc', 'icc', 'suncc']
@@ -63,10 +63,14 @@ def default_compilers():


def configure(conf): def configure(conf):
""" """
Try to find a suitable C compiler or raise a :py:class:`waflib.Errors.ConfigurationError`.
Detects a suitable C compiler

:raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found
""" """
try: test_for_compiler = conf.options.check_c_compiler or default_compilers()
except AttributeError: conf.fatal("Add options(opt): opt.load('compiler_c')")
try:
test_for_compiler = conf.options.check_c_compiler or default_compilers()
except AttributeError:
conf.fatal("Add options(opt): opt.load('compiler_c')")


for compiler in re.split('[ ,]+', test_for_compiler): for compiler in re.split('[ ,]+', test_for_compiler):
conf.env.stash() conf.env.stash()
@@ -76,19 +80,21 @@ def configure(conf):
except conf.errors.ConfigurationError as e: except conf.errors.ConfigurationError as e:
conf.env.revert() conf.env.revert()
conf.end_msg(False) conf.end_msg(False)
debug('compiler_c: %r' % e)
debug('compiler_c: %r', e)
else: else:
if conf.env['CC']:
if conf.env.CC:
conf.end_msg(conf.env.get_flat('CC')) conf.end_msg(conf.env.get_flat('CC'))
conf.env['COMPILER_CC'] = compiler
conf.env.COMPILER_CC = compiler
conf.env.commit()
break break
conf.env.revert()
conf.end_msg(False) conf.end_msg(False)
else: else:
conf.fatal('could not configure a C compiler!') conf.fatal('could not configure a C compiler!')


def options(opt): def options(opt):
""" """
Restrict the compiler detection from the command-line::
This is how to provide compiler preferences on the command-line::


$ waf configure --check-c-compiler=gcc $ waf configure --check-c-compiler=gcc
""" """


+ 14
- 8
waflib/Tools/compiler_cxx.py View File

@@ -48,7 +48,7 @@ cxx_compiler = {
'osf1V': ['g++'], 'osf1V': ['g++'],
'gnu': ['g++', 'clang++'], 'gnu': ['g++', 'clang++'],
'java': ['g++', 'msvc', 'clang++', 'icpc'], 'java': ['g++', 'msvc', 'clang++', 'icpc'],
'default': ['g++', 'clang++']
'default': ['clang++', 'g++']
} }
""" """
Dict mapping the platform names to Waf tools finding specific C++ compilers:: Dict mapping the platform names to Waf tools finding specific C++ compilers::
@@ -64,10 +64,14 @@ def default_compilers():


def configure(conf): def configure(conf):
""" """
Try to find a suitable C++ compiler or raise a :py:class:`waflib.Errors.ConfigurationError`.
Detects a suitable C++ compiler

:raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found
""" """
try: test_for_compiler = conf.options.check_cxx_compiler or default_compilers()
except AttributeError: conf.fatal("Add options(opt): opt.load('compiler_cxx')")
try:
test_for_compiler = conf.options.check_cxx_compiler or default_compilers()
except AttributeError:
conf.fatal("Add options(opt): opt.load('compiler_cxx')")


for compiler in re.split('[ ,]+', test_for_compiler): for compiler in re.split('[ ,]+', test_for_compiler):
conf.env.stash() conf.env.stash()
@@ -77,19 +81,21 @@ def configure(conf):
except conf.errors.ConfigurationError as e: except conf.errors.ConfigurationError as e:
conf.env.revert() conf.env.revert()
conf.end_msg(False) conf.end_msg(False)
debug('compiler_cxx: %r' % e)
debug('compiler_cxx: %r', e)
else: else:
if conf.env['CXX']:
if conf.env.CXX:
conf.end_msg(conf.env.get_flat('CXX')) conf.end_msg(conf.env.get_flat('CXX'))
conf.env['COMPILER_CXX'] = compiler
conf.env.COMPILER_CXX = compiler
conf.env.commit()
break break
conf.env.revert()
conf.end_msg(False) conf.end_msg(False)
else: else:
conf.fatal('could not configure a C++ compiler!') conf.fatal('could not configure a C++ compiler!')


def options(opt): def options(opt):
""" """
Restrict the compiler detection from the command-line::
This is how to provide compiler preferences on the command-line::


$ waf configure --check-cxx-compiler=gxx $ waf configure --check-cxx-compiler=gxx
""" """


+ 7
- 7
waflib/Tools/cxx.py View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
# encoding: utf-8 # encoding: utf-8
# Thomas Nagy, 2005-2010 (ita)
# Thomas Nagy, 2005-2018 (ita)


"Base for c++ programs and libraries" "Base for c++ programs and libraries"


@@ -10,31 +10,31 @@ from waflib.Tools.ccroot import link_task, stlink_task


@TaskGen.extension('.cpp','.cc','.cxx','.C','.c++') @TaskGen.extension('.cpp','.cc','.cxx','.C','.c++')
def cxx_hook(self, node): def cxx_hook(self, node):
"Bind the c++ file extensions to the creation of a :py:class:`waflib.Tools.cxx.cxx` instance"
"Binds c++ file extensions to create :py:class:`waflib.Tools.cxx.cxx` instances"
return self.create_compiled_task('cxx', node) return self.create_compiled_task('cxx', node)


if not '.c' in TaskGen.task_gen.mappings: if not '.c' in TaskGen.task_gen.mappings:
TaskGen.task_gen.mappings['.c'] = TaskGen.task_gen.mappings['.cpp'] TaskGen.task_gen.mappings['.c'] = TaskGen.task_gen.mappings['.cpp']


class cxx(Task.Task): class cxx(Task.Task):
"Compile C++ files into object files"
run_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT[0].abspath()}'
"Compiles C++ files into object files"
run_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}'
vars = ['CXXDEPS'] # unused variable to depend on, just in case vars = ['CXXDEPS'] # unused variable to depend on, just in case
ext_in = ['.h'] # set the build order easily by using ext_out=['.h'] ext_in = ['.h'] # set the build order easily by using ext_out=['.h']
scan = c_preproc.scan scan = c_preproc.scan


class cxxprogram(link_task): class cxxprogram(link_task):
"Link object files into a c++ program"
"Links object files into c++ programs"
run_str = '${LINK_CXX} ${LINKFLAGS} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}' run_str = '${LINK_CXX} ${LINKFLAGS} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}'
vars = ['LINKDEPS'] vars = ['LINKDEPS']
ext_out = ['.bin'] ext_out = ['.bin']
inst_to = '${BINDIR}' inst_to = '${BINDIR}'


class cxxshlib(cxxprogram): class cxxshlib(cxxprogram):
"Link object files into a c++ shared library"
"Links object files into c++ shared libraries"
inst_to = '${LIBDIR}' inst_to = '${LIBDIR}'


class cxxstlib(stlink_task): class cxxstlib(stlink_task):
"Link object files into a c++ static library"
"Links object files into c++ static libraries"
pass # do not remove pass # do not remove



+ 51
- 37
waflib/Tools/errcheck.py View File

@@ -3,9 +3,9 @@
# Thomas Nagy, 2011 (ita) # Thomas Nagy, 2011 (ita)


""" """
errcheck: highlight common mistakes
Common mistakes highlighting.


There is a performance hit, so this tool is only loaded when running "waf -v"
There is a performance impact, so this tool is only loaded when running ``waf -v``
""" """


typos = { typos = {
@@ -18,13 +18,14 @@ typos = {
'importpath':'includes', 'importpath':'includes',
'installpath':'install_path', 'installpath':'install_path',
'iscopy':'is_copy', 'iscopy':'is_copy',
'uses':'use',
} }


meths_typos = ['__call__', 'program', 'shlib', 'stlib', 'objects'] meths_typos = ['__call__', 'program', 'shlib', 'stlib', 'objects']


import sys import sys
from waflib import Logs, Build, Node, Task, TaskGen, ConfigSet, Errors, Utils from waflib import Logs, Build, Node, Task, TaskGen, ConfigSet, Errors, Utils
import waflib.Tools.ccroot
from waflib.Tools import ccroot


def check_same_targets(self): def check_same_targets(self):
mp = Utils.defaultdict(list) mp = Utils.defaultdict(list)
@@ -33,6 +34,8 @@ def check_same_targets(self):
def check_task(tsk): def check_task(tsk):
if not isinstance(tsk, Task.Task): if not isinstance(tsk, Task.Task):
return return
if hasattr(tsk, 'no_errcheck_out'):
return


for node in tsk.outputs: for node in tsk.outputs:
mp[node].append(tsk) mp[node].append(tsk)
@@ -58,30 +61,34 @@ def check_same_targets(self):
Logs.error(msg) Logs.error(msg)
for x in v: for x in v:
if Logs.verbose > 1: if Logs.verbose > 1:
Logs.error(' %d. %r' % (1 + v.index(x), x.generator))
Logs.error(' %d. %r', 1 + v.index(x), x.generator)
else: else:
Logs.error(' %d. %r in %r' % (1 + v.index(x), x.generator.name, getattr(x.generator, 'path', None)))
Logs.error(' %d. %r in %r', 1 + v.index(x), x.generator.name, getattr(x.generator, 'path', None))
Logs.error('If you think that this is an error, set no_errcheck_out on the task instance')


if not dupe: if not dupe:
for (k, v) in uids.items(): for (k, v) in uids.items():
if len(v) > 1: if len(v) > 1:
Logs.error('* Several tasks use the same identifier. Please check the information on\n https://waf.io/apidocs/Task.html?highlight=uid#waflib.Task.Task.uid') Logs.error('* Several tasks use the same identifier. Please check the information on\n https://waf.io/apidocs/Task.html?highlight=uid#waflib.Task.Task.uid')
tg_details = tsk.generator.name
if Logs.verbose > 2:
tg_details = tsk.generator
for tsk in v: for tsk in v:
Logs.error(' - object %r (%r) defined in %r' % (tsk.__class__.__name__, tsk, tsk.generator))
Logs.error(' - object %r (%r) defined in %r', tsk.__class__.__name__, tsk, tg_details)


def check_invalid_constraints(self): def check_invalid_constraints(self):
feat = set([])
feat = set()
for x in list(TaskGen.feats.values()): for x in list(TaskGen.feats.values()):
feat.union(set(x)) feat.union(set(x))
for (x, y) in TaskGen.task_gen.prec.items(): for (x, y) in TaskGen.task_gen.prec.items():
feat.add(x) feat.add(x)
feat.union(set(y)) feat.union(set(y))
ext = set([])
ext = set()
for x in TaskGen.task_gen.mappings.values(): for x in TaskGen.task_gen.mappings.values():
ext.add(x.__name__) ext.add(x.__name__)
invalid = ext & feat invalid = ext & feat
if invalid: if invalid:
Logs.error('The methods %r have invalid annotations: @extension <-> @feature/@before_method/@after_method' % list(invalid))
Logs.error('The methods %r have invalid annotations: @extension <-> @feature/@before_method/@after_method', list(invalid))


# the build scripts have been read, so we can check for invalid after/before attributes on task classes # the build scripts have been read, so we can check for invalid after/before attributes on task classes
for cls in list(Task.classes.values()): for cls in list(Task.classes.values()):
@@ -90,15 +97,15 @@ def check_invalid_constraints(self):


for x in ('before', 'after'): for x in ('before', 'after'):
for y in Utils.to_list(getattr(cls, x, [])): for y in Utils.to_list(getattr(cls, x, [])):
if not Task.classes.get(y, None):
Logs.error('Erroneous order constraint %r=%r on task class %r' % (x, y, cls.__name__))
if not Task.classes.get(y):
Logs.error('Erroneous order constraint %r=%r on task class %r', x, y, cls.__name__)
if getattr(cls, 'rule', None): if getattr(cls, 'rule', None):
Logs.error('Erroneous attribute "rule" on task class %r (rename to "run_str")' % cls.__name__)
Logs.error('Erroneous attribute "rule" on task class %r (rename to "run_str")', cls.__name__)


def replace(m): def replace(m):
""" """
We could add properties, but they would not work in some cases:
bld.program(...) requires 'source' in the attributes
Replaces existing BuildContext methods to verify parameter names,
for example ``bld(source=)`` has no ending *s*
""" """
oldcall = getattr(Build.BuildContext, m) oldcall = getattr(Build.BuildContext, m)
def call(self, *k, **kw): def call(self, *k, **kw):
@@ -107,13 +114,13 @@ def replace(m):
if x in kw: if x in kw:
if x == 'iscopy' and 'subst' in getattr(self, 'features', ''): if x == 'iscopy' and 'subst' in getattr(self, 'features', ''):
continue continue
Logs.error('Fix the typo %r -> %r on %r' % (x, typos[x], ret))
Logs.error('Fix the typo %r -> %r on %r', x, typos[x], ret)
return ret return ret
setattr(Build.BuildContext, m, call) setattr(Build.BuildContext, m, call)


def enhance_lib(): def enhance_lib():
""" """
modify existing classes and methods
Modifies existing classes and methods to enable error verification
""" """
for m in meths_typos: for m in meths_typos:
replace(m) replace(m)
@@ -121,26 +128,36 @@ def enhance_lib():
# catch '..' in ant_glob patterns # catch '..' in ant_glob patterns
def ant_glob(self, *k, **kw): def ant_glob(self, *k, **kw):
if k: if k:
lst=Utils.to_list(k[0])
lst = Utils.to_list(k[0])
for pat in lst: for pat in lst:
if '..' in pat.split('/'):
Logs.error("In ant_glob pattern %r: '..' means 'two dots', not 'parent directory'" % k[0])
if kw.get('remove', True):
try:
if self.is_child_of(self.ctx.bldnode) and not kw.get('quiet', False):
Logs.error('Using ant_glob on the build folder (%r) is dangerous (quiet=True to disable this warning)' % self)
except AttributeError:
pass
sp = pat.split('/')
if '..' in sp:
Logs.error("In ant_glob pattern %r: '..' means 'two dots', not 'parent directory'", k[0])
if '.' in sp:
Logs.error("In ant_glob pattern %r: '.' means 'one dot', not 'current directory'", k[0])
return self.old_ant_glob(*k, **kw) return self.old_ant_glob(*k, **kw)
Node.Node.old_ant_glob = Node.Node.ant_glob Node.Node.old_ant_glob = Node.Node.ant_glob
Node.Node.ant_glob = ant_glob Node.Node.ant_glob = ant_glob


# catch ant_glob on build folders
def ant_iter(self, accept=None, maxdepth=25, pats=[], dir=False, src=True, remove=True, quiet=False):
if remove:
try:
if self.is_child_of(self.ctx.bldnode) and not quiet:
quiet = True
Logs.error('Calling ant_glob on build folders (%r) is dangerous: add quiet=True / remove=False', self)
except AttributeError:
pass
return self.old_ant_iter(accept, maxdepth, pats, dir, src, remove, quiet)
Node.Node.old_ant_iter = Node.Node.ant_iter
Node.Node.ant_iter = ant_iter

# catch conflicting ext_in/ext_out/before/after declarations # catch conflicting ext_in/ext_out/before/after declarations
old = Task.is_before old = Task.is_before
def is_before(t1, t2): def is_before(t1, t2):
ret = old(t1, t2) ret = old(t1, t2)
if ret and old(t2, t1): if ret and old(t2, t1):
Logs.error('Contradictory order constraints in classes %r %r' % (t1, t2))
Logs.error('Contradictory order constraints in classes %r %r', t1, t2)
return ret return ret
Task.is_before = is_before Task.is_before = is_before


@@ -152,7 +169,7 @@ def enhance_lib():
Logs.error('feature shlib -> cshlib, dshlib or cxxshlib') Logs.error('feature shlib -> cshlib, dshlib or cxxshlib')
for x in ('c', 'cxx', 'd', 'fc'): for x in ('c', 'cxx', 'd', 'fc'):
if not x in lst and lst and lst[0] in [x+y for y in ('program', 'shlib', 'stlib')]: if not x in lst and lst and lst[0] in [x+y for y in ('program', 'shlib', 'stlib')]:
Logs.error('%r features is probably missing %r' % (self, x))
Logs.error('%r features is probably missing %r', self, x)
TaskGen.feature('*')(check_err_features) TaskGen.feature('*')(check_err_features)


# check for erroneous order constraints # check for erroneous order constraints
@@ -160,12 +177,12 @@ def enhance_lib():
if not hasattr(self, 'rule') and not 'subst' in Utils.to_list(self.features): if not hasattr(self, 'rule') and not 'subst' in Utils.to_list(self.features):
for x in ('before', 'after', 'ext_in', 'ext_out'): for x in ('before', 'after', 'ext_in', 'ext_out'):
if hasattr(self, x): if hasattr(self, x):
Logs.warn('Erroneous order constraint %r on non-rule based task generator %r' % (x, self))
Logs.warn('Erroneous order constraint %r on non-rule based task generator %r', x, self)
else: else:
for x in ('before', 'after'): for x in ('before', 'after'):
for y in self.to_list(getattr(self, x, [])): for y in self.to_list(getattr(self, x, [])):
if not Task.classes.get(y, None):
Logs.error('Erroneous order constraint %s=%r on %r (no such class)' % (x, y, self))
if not Task.classes.get(y):
Logs.error('Erroneous order constraint %s=%r on %r (no such class)', x, y, self)
TaskGen.feature('*')(check_err_order) TaskGen.feature('*')(check_err_order)


# check for @extension used with @feature/@before_method/@after_method # check for @extension used with @feature/@before_method/@after_method
@@ -200,24 +217,21 @@ def enhance_lib():
TaskGen.task_gen.use_rec = use_rec TaskGen.task_gen.use_rec = use_rec


# check for env.append # check for env.append
def getattri(self, name, default=None):
def _getattr(self, name, default=None):
if name == 'append' or name == 'add': if name == 'append' or name == 'add':
raise Errors.WafError('env.append and env.add do not exist: use env.append_value/env.append_unique') raise Errors.WafError('env.append and env.add do not exist: use env.append_value/env.append_unique')
elif name == 'prepend': elif name == 'prepend':
raise Errors.WafError('env.prepend does not exist: use env.prepend_value') raise Errors.WafError('env.prepend does not exist: use env.prepend_value')
if name in self.__slots__: if name in self.__slots__:
return object.__getattr__(self, name, default)
return super(ConfigSet.ConfigSet, self).__getattr__(name, default)
else: else:
return self[name] return self[name]
ConfigSet.ConfigSet.__getattr__ = getattri
ConfigSet.ConfigSet.__getattr__ = _getattr




def options(opt): def options(opt):
""" """
Add a few methods
Error verification can be enabled by default (not just on ``waf -v``) by adding to the user script options
""" """
enhance_lib() enhance_lib()


def configure(conf):
pass


+ 55
- 58
waflib/Tools/gcc.py View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
# encoding: utf-8 # encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)
# Thomas Nagy, 2006-2018 (ita)
# Ralf Habacker, 2006 (rh) # Ralf Habacker, 2006 (rh)
# Yinon Ehrlich, 2009 # Yinon Ehrlich, 2009


@@ -27,54 +27,51 @@ def gcc_common_flags(conf):
""" """
v = conf.env v = conf.env


v['CC_SRC_F'] = []
v['CC_TGT_F'] = ['-c', '-o']
v.CC_SRC_F = []
v.CC_TGT_F = ['-c', '-o']


# linker
if not v['LINK_CC']: v['LINK_CC'] = v['CC']
v['CCLNK_SRC_F'] = []
v['CCLNK_TGT_F'] = ['-o']
v['CPPPATH_ST'] = '-I%s'
v['DEFINES_ST'] = '-D%s'
if not v.LINK_CC:
v.LINK_CC = v.CC


v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STLIB_ST'] = '-l%s'
v['STLIBPATH_ST'] = '-L%s'
v['RPATH_ST'] = '-Wl,-rpath,%s'
v.CCLNK_SRC_F = []
v.CCLNK_TGT_F = ['-o']
v.CPPPATH_ST = '-I%s'
v.DEFINES_ST = '-D%s'


v['SONAME_ST'] = '-Wl,-h,%s'
v['SHLIB_MARKER'] = '-Wl,-Bdynamic'
v['STLIB_MARKER'] = '-Wl,-Bstatic'
v.LIB_ST = '-l%s' # template for adding libs
v.LIBPATH_ST = '-L%s' # template for adding libpaths
v.STLIB_ST = '-l%s'
v.STLIBPATH_ST = '-L%s'
v.RPATH_ST = '-Wl,-rpath,%s'


# program
v['cprogram_PATTERN'] = '%s'
v.SONAME_ST = '-Wl,-h,%s'
v.SHLIB_MARKER = '-Wl,-Bdynamic'
v.STLIB_MARKER = '-Wl,-Bstatic'


# shared librar
v['CFLAGS_cshlib'] = ['-fPIC']
v['LINKFLAGS_cshlib'] = ['-shared']
v['cshlib_PATTERN'] = 'lib%s.so'
v.cprogram_PATTERN = '%s'


# static lib
v['LINKFLAGS_cstlib'] = ['-Wl,-Bstatic']
v['cstlib_PATTERN'] = 'lib%s.a'
v.CFLAGS_cshlib = ['-fPIC']
v.LINKFLAGS_cshlib = ['-shared']
v.cshlib_PATTERN = 'lib%s.so'


# osx stuff
v['LINKFLAGS_MACBUNDLE'] = ['-bundle', '-undefined', 'dynamic_lookup']
v['CFLAGS_MACBUNDLE'] = ['-fPIC']
v['macbundle_PATTERN'] = '%s.bundle'
v.LINKFLAGS_cstlib = ['-Wl,-Bstatic']
v.cstlib_PATTERN = 'lib%s.a'

v.LINKFLAGS_MACBUNDLE = ['-bundle', '-undefined', 'dynamic_lookup']
v.CFLAGS_MACBUNDLE = ['-fPIC']
v.macbundle_PATTERN = '%s.bundle'


@conf @conf
def gcc_modifier_win32(conf): def gcc_modifier_win32(conf):
"""Configuration flags for executing gcc on Windows""" """Configuration flags for executing gcc on Windows"""
v = conf.env v = conf.env
v['cprogram_PATTERN'] = '%s.exe'
v.cprogram_PATTERN = '%s.exe'


v['cshlib_PATTERN'] = '%s.dll'
v['implib_PATTERN'] = 'lib%s.dll.a'
v['IMPLIB_ST'] = '-Wl,--out-implib,%s'
v.cshlib_PATTERN = '%s.dll'
v.implib_PATTERN = '%s.dll.a'
v.IMPLIB_ST = '-Wl,--out-implib,%s'


v['CFLAGS_cshlib'] = []
v.CFLAGS_cshlib = []


# Auto-import is enabled by default even without this option, # Auto-import is enabled by default even without this option,
# but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages # but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
@@ -86,42 +83,42 @@ def gcc_modifier_cygwin(conf):
"""Configuration flags for executing gcc on Cygwin""" """Configuration flags for executing gcc on Cygwin"""
gcc_modifier_win32(conf) gcc_modifier_win32(conf)
v = conf.env v = conf.env
v['cshlib_PATTERN'] = 'cyg%s.dll'
v.cshlib_PATTERN = 'cyg%s.dll'
v.append_value('LINKFLAGS_cshlib', ['-Wl,--enable-auto-image-base']) v.append_value('LINKFLAGS_cshlib', ['-Wl,--enable-auto-image-base'])
v['CFLAGS_cshlib'] = []
v.CFLAGS_cshlib = []


@conf @conf
def gcc_modifier_darwin(conf): def gcc_modifier_darwin(conf):
"""Configuration flags for executing gcc on MacOS""" """Configuration flags for executing gcc on MacOS"""
v = conf.env v = conf.env
v['CFLAGS_cshlib'] = ['-fPIC']
v['LINKFLAGS_cshlib'] = ['-dynamiclib']
v['cshlib_PATTERN'] = 'lib%s.dylib'
v['FRAMEWORKPATH_ST'] = '-F%s'
v['FRAMEWORK_ST'] = ['-framework']
v['ARCH_ST'] = ['-arch']
v.CFLAGS_cshlib = ['-fPIC']
v.LINKFLAGS_cshlib = ['-dynamiclib']
v.cshlib_PATTERN = 'lib%s.dylib'
v.FRAMEWORKPATH_ST = '-F%s'
v.FRAMEWORK_ST = ['-framework']
v.ARCH_ST = ['-arch']


v['LINKFLAGS_cstlib'] = []
v.LINKFLAGS_cstlib = []


v['SHLIB_MARKER'] = []
v['STLIB_MARKER'] = []
v['SONAME_ST'] = []
v.SHLIB_MARKER = []
v.STLIB_MARKER = []
v.SONAME_ST = []


@conf @conf
def gcc_modifier_aix(conf): def gcc_modifier_aix(conf):
"""Configuration flags for executing gcc on AIX""" """Configuration flags for executing gcc on AIX"""
v = conf.env v = conf.env
v['LINKFLAGS_cprogram'] = ['-Wl,-brtl']
v['LINKFLAGS_cshlib'] = ['-shared','-Wl,-brtl,-bexpfull']
v['SHLIB_MARKER'] = []
v.LINKFLAGS_cprogram = ['-Wl,-brtl']
v.LINKFLAGS_cshlib = ['-shared','-Wl,-brtl,-bexpfull']
v.SHLIB_MARKER = []


@conf @conf
def gcc_modifier_hpux(conf): def gcc_modifier_hpux(conf):
v = conf.env v = conf.env
v['SHLIB_MARKER'] = []
v['STLIB_MARKER'] = []
v['CFLAGS_cshlib'] = ['-fPIC','-DPIC']
v['cshlib_PATTERN'] = 'lib%s.sl'
v.SHLIB_MARKER = []
v.STLIB_MARKER = []
v.CFLAGS_cshlib = ['-fPIC','-DPIC']
v.cshlib_PATTERN = 'lib%s.sl'


@conf @conf
def gcc_modifier_openbsd(conf): def gcc_modifier_openbsd(conf):
@@ -130,9 +127,9 @@ def gcc_modifier_openbsd(conf):
@conf @conf
def gcc_modifier_osf1V(conf): def gcc_modifier_osf1V(conf):
v = conf.env v = conf.env
v['SHLIB_MARKER'] = []
v['STLIB_MARKER'] = []
v['SONAME_ST'] = []
v.SHLIB_MARKER = []
v.STLIB_MARKER = []
v.SONAME_ST = []


@conf @conf
def gcc_modifier_platform(conf): def gcc_modifier_platform(conf):
@@ -155,5 +152,5 @@ def configure(conf):
conf.cc_load_tools() conf.cc_load_tools()
conf.cc_add_flags() conf.cc_add_flags()
conf.link_add_flags() conf.link_add_flags()
conf.check_gcc_o_space()



+ 56
- 58
waflib/Tools/gxx.py View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
# encoding: utf-8 # encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)
# Thomas Nagy, 2006-2018 (ita)
# Ralf Habacker, 2006 (rh) # Ralf Habacker, 2006 (rh)
# Yinon Ehrlich, 2009 # Yinon Ehrlich, 2009


@@ -14,7 +14,7 @@ from waflib.Configure import conf
@conf @conf
def find_gxx(conf): def find_gxx(conf):
""" """
Find the program g++, and if present, try to detect its version number
Finds the program g++, and if present, try to detect its version number
""" """
cxx = conf.find_program(['g++', 'c++'], var='CXX') cxx = conf.find_program(['g++', 'c++'], var='CXX')
conf.get_cc_version(cxx, gcc=True) conf.get_cc_version(cxx, gcc=True)
@@ -27,54 +27,51 @@ def gxx_common_flags(conf):
""" """
v = conf.env v = conf.env


v['CXX_SRC_F'] = []
v['CXX_TGT_F'] = ['-c', '-o']
v.CXX_SRC_F = []
v.CXX_TGT_F = ['-c', '-o']


# linker
if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
v['CXXLNK_SRC_F'] = []
v['CXXLNK_TGT_F'] = ['-o']
v['CPPPATH_ST'] = '-I%s'
v['DEFINES_ST'] = '-D%s'
if not v.LINK_CXX:
v.LINK_CXX = v.CXX


v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STLIB_ST'] = '-l%s'
v['STLIBPATH_ST'] = '-L%s'
v['RPATH_ST'] = '-Wl,-rpath,%s'
v.CXXLNK_SRC_F = []
v.CXXLNK_TGT_F = ['-o']
v.CPPPATH_ST = '-I%s'
v.DEFINES_ST = '-D%s'


v['SONAME_ST'] = '-Wl,-h,%s'
v['SHLIB_MARKER'] = '-Wl,-Bdynamic'
v['STLIB_MARKER'] = '-Wl,-Bstatic'
v.LIB_ST = '-l%s' # template for adding libs
v.LIBPATH_ST = '-L%s' # template for adding libpaths
v.STLIB_ST = '-l%s'
v.STLIBPATH_ST = '-L%s'
v.RPATH_ST = '-Wl,-rpath,%s'


# program
v['cxxprogram_PATTERN'] = '%s'
v.SONAME_ST = '-Wl,-h,%s'
v.SHLIB_MARKER = '-Wl,-Bdynamic'
v.STLIB_MARKER = '-Wl,-Bstatic'


# shared library
v['CXXFLAGS_cxxshlib'] = ['-fPIC']
v['LINKFLAGS_cxxshlib'] = ['-shared']
v['cxxshlib_PATTERN'] = 'lib%s.so'
v.cxxprogram_PATTERN = '%s'


# static lib
v['LINKFLAGS_cxxstlib'] = ['-Wl,-Bstatic']
v['cxxstlib_PATTERN'] = 'lib%s.a'
v.CXXFLAGS_cxxshlib = ['-fPIC']
v.LINKFLAGS_cxxshlib = ['-shared']
v.cxxshlib_PATTERN = 'lib%s.so'


# osx stuff
v['LINKFLAGS_MACBUNDLE'] = ['-bundle', '-undefined', 'dynamic_lookup']
v['CXXFLAGS_MACBUNDLE'] = ['-fPIC']
v['macbundle_PATTERN'] = '%s.bundle'
v.LINKFLAGS_cxxstlib = ['-Wl,-Bstatic']
v.cxxstlib_PATTERN = 'lib%s.a'

v.LINKFLAGS_MACBUNDLE = ['-bundle', '-undefined', 'dynamic_lookup']
v.CXXFLAGS_MACBUNDLE = ['-fPIC']
v.macbundle_PATTERN = '%s.bundle'


@conf @conf
def gxx_modifier_win32(conf): def gxx_modifier_win32(conf):
"""Configuration flags for executing gcc on Windows""" """Configuration flags for executing gcc on Windows"""
v = conf.env v = conf.env
v['cxxprogram_PATTERN'] = '%s.exe'
v.cxxprogram_PATTERN = '%s.exe'


v['cxxshlib_PATTERN'] = '%s.dll'
v['implib_PATTERN'] = 'lib%s.dll.a'
v['IMPLIB_ST'] = '-Wl,--out-implib,%s'
v.cxxshlib_PATTERN = '%s.dll'
v.implib_PATTERN = '%s.dll.a'
v.IMPLIB_ST = '-Wl,--out-implib,%s'


v['CXXFLAGS_cxxshlib'] = []
v.CXXFLAGS_cxxshlib = []


# Auto-import is enabled by default even without this option, # Auto-import is enabled by default even without this option,
# but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages # but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
@@ -86,43 +83,43 @@ def gxx_modifier_cygwin(conf):
"""Configuration flags for executing g++ on Cygwin""" """Configuration flags for executing g++ on Cygwin"""
gxx_modifier_win32(conf) gxx_modifier_win32(conf)
v = conf.env v = conf.env
v['cxxshlib_PATTERN'] = 'cyg%s.dll'
v.cxxshlib_PATTERN = 'cyg%s.dll'
v.append_value('LINKFLAGS_cxxshlib', ['-Wl,--enable-auto-image-base']) v.append_value('LINKFLAGS_cxxshlib', ['-Wl,--enable-auto-image-base'])
v['CXXFLAGS_cxxshlib'] = []
v.CXXFLAGS_cxxshlib = []


@conf @conf
def gxx_modifier_darwin(conf): def gxx_modifier_darwin(conf):
"""Configuration flags for executing g++ on MacOS""" """Configuration flags for executing g++ on MacOS"""
v = conf.env v = conf.env
v['CXXFLAGS_cxxshlib'] = ['-fPIC']
v['LINKFLAGS_cxxshlib'] = ['-dynamiclib']
v['cxxshlib_PATTERN'] = 'lib%s.dylib'
v['FRAMEWORKPATH_ST'] = '-F%s'
v['FRAMEWORK_ST'] = ['-framework']
v['ARCH_ST'] = ['-arch']
v.CXXFLAGS_cxxshlib = ['-fPIC']
v.LINKFLAGS_cxxshlib = ['-dynamiclib']
v.cxxshlib_PATTERN = 'lib%s.dylib'
v.FRAMEWORKPATH_ST = '-F%s'
v.FRAMEWORK_ST = ['-framework']
v.ARCH_ST = ['-arch']


v['LINKFLAGS_cxxstlib'] = []
v.LINKFLAGS_cxxstlib = []


v['SHLIB_MARKER'] = []
v['STLIB_MARKER'] = []
v['SONAME_ST'] = []
v.SHLIB_MARKER = []
v.STLIB_MARKER = []
v.SONAME_ST = []


@conf @conf
def gxx_modifier_aix(conf): def gxx_modifier_aix(conf):
"""Configuration flags for executing g++ on AIX""" """Configuration flags for executing g++ on AIX"""
v = conf.env v = conf.env
v['LINKFLAGS_cxxprogram']= ['-Wl,-brtl']
v.LINKFLAGS_cxxprogram= ['-Wl,-brtl']


v['LINKFLAGS_cxxshlib'] = ['-shared', '-Wl,-brtl,-bexpfull']
v['SHLIB_MARKER'] = []
v.LINKFLAGS_cxxshlib = ['-shared', '-Wl,-brtl,-bexpfull']
v.SHLIB_MARKER = []


@conf @conf
def gxx_modifier_hpux(conf): def gxx_modifier_hpux(conf):
v = conf.env v = conf.env
v['SHLIB_MARKER'] = []
v['STLIB_MARKER'] = []
v['CFLAGS_cxxshlib'] = ['-fPIC','-DPIC']
v['cxxshlib_PATTERN'] = 'lib%s.sl'
v.SHLIB_MARKER = []
v.STLIB_MARKER = []
v.CFLAGS_cxxshlib = ['-fPIC','-DPIC']
v.cxxshlib_PATTERN = 'lib%s.sl'


@conf @conf
def gxx_modifier_openbsd(conf): def gxx_modifier_openbsd(conf):
@@ -131,9 +128,9 @@ def gxx_modifier_openbsd(conf):
@conf @conf
def gcc_modifier_osf1V(conf): def gcc_modifier_osf1V(conf):
v = conf.env v = conf.env
v['SHLIB_MARKER'] = []
v['STLIB_MARKER'] = []
v['SONAME_ST'] = []
v.SHLIB_MARKER = []
v.STLIB_MARKER = []
v.SONAME_ST = []


@conf @conf
def gxx_modifier_platform(conf): def gxx_modifier_platform(conf):
@@ -156,4 +153,5 @@ def configure(conf):
conf.cxx_load_tools() conf.cxx_load_tools()
conf.cxx_add_flags() conf.cxx_add_flags()
conf.link_add_flags() conf.link_add_flags()
conf.check_gcc_o_space('cxx')



+ 3
- 6
waflib/Tools/icc.py View File

@@ -1,10 +1,10 @@
#!/usr/bin/env python #!/usr/bin/env python
# encoding: utf-8 # encoding: utf-8
# Stian Selnes 2008 # Stian Selnes 2008
# Thomas Nagy 2009-2010 (ita)
# Thomas Nagy 2009-2018 (ita)


""" """
Detect the Intel C compiler
Detects the Intel C compiler
""" """


import sys import sys
@@ -14,11 +14,8 @@ from waflib.Configure import conf
@conf @conf
def find_icc(conf): def find_icc(conf):
""" """
Find the program icc and execute it to ensure it really is icc
Finds the program icc and execute it to ensure it really is icc
""" """
if sys.platform == 'cygwin':
conf.fatal('The Intel compiler does not work on Cygwin')

cc = conf.find_program(['icc', 'ICL'], var='CC') cc = conf.find_program(['icc', 'ICL'], var='CC')
conf.get_cc_version(cc, icc=True) conf.get_cc_version(cc, icc=True)
conf.env.CC_NAME = 'icc' conf.env.CC_NAME = 'icc'


+ 3
- 6
waflib/Tools/icpc.py View File

@@ -1,9 +1,9 @@
#!/usr/bin/env python #!/usr/bin/env python
# encoding: utf-8 # encoding: utf-8
# Thomas Nagy 2009-2010 (ita)
# Thomas Nagy 2009-2018 (ita)


""" """
Detect the Intel C++ compiler
Detects the Intel C++ compiler
""" """


import sys import sys
@@ -13,11 +13,8 @@ from waflib.Configure import conf
@conf @conf
def find_icpc(conf): def find_icpc(conf):
""" """
Find the program icpc, and execute it to ensure it really is icpc
Finds the program icpc, and execute it to ensure it really is icpc
""" """
if sys.platform == 'cygwin':
conf.fatal('The Intel compiler does not work on Cygwin')

cxx = conf.find_program('icpc', var='CXX') cxx = conf.find_program('icpc', var='CXX')
conf.get_cc_version(cxx, icc=True) conf.get_cc_version(cxx, icc=True)
conf.env.CXX_NAME = 'icc' conf.env.CXX_NAME = 'icc'


+ 30
- 24
waflib/Tools/irixcc.py View File

@@ -1,11 +1,12 @@
#! /usr/bin/env python #! /usr/bin/env python
# encoding: utf-8
# imported from samba # imported from samba


""" """
compiler definition for irix/MIPSpro cc compiler
based on suncc.py from waf
Compiler definition for irix/MIPSpro cc compiler
""" """


from waflib import Errors
from waflib.Tools import ccroot, ar from waflib.Tools import ccroot, ar
from waflib.Configure import conf from waflib.Configure import conf


@@ -13,41 +14,46 @@ from waflib.Configure import conf
def find_irixcc(conf): def find_irixcc(conf):
v = conf.env v = conf.env
cc = None cc = None
if v['CC']: cc = v['CC']
elif 'CC' in conf.environ: cc = conf.environ['CC']
if not cc: cc = conf.find_program('cc', var='CC')
if not cc: conf.fatal('irixcc was not found')
if v.CC:
cc = v.CC
elif 'CC' in conf.environ:
cc = conf.environ['CC']
if not cc:
cc = conf.find_program('cc', var='CC')
if not cc:
conf.fatal('irixcc was not found')


try: try:
conf.cmd_and_log(cc + ['-version']) conf.cmd_and_log(cc + ['-version'])
except Exception:
except Errors.WafError:
conf.fatal('%r -version could not be executed' % cc) conf.fatal('%r -version could not be executed' % cc)


v['CC'] = cc
v['CC_NAME'] = 'irix'
v.CC = cc
v.CC_NAME = 'irix'


@conf @conf
def irixcc_common_flags(conf): def irixcc_common_flags(conf):
v = conf.env v = conf.env


v['CC_SRC_F'] = ''
v['CC_TGT_F'] = ['-c', '-o']
v['CPPPATH_ST'] = '-I%s'
v['DEFINES_ST'] = '-D%s'
v.CC_SRC_F = ''
v.CC_TGT_F = ['-c', '-o']
v.CPPPATH_ST = '-I%s'
v.DEFINES_ST = '-D%s'


# linker
if not v['LINK_CC']: v['LINK_CC'] = v['CC']
v['CCLNK_SRC_F'] = ''
v['CCLNK_TGT_F'] = ['-o']
if not v.LINK_CC:
v.LINK_CC = v.CC


v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STLIB_ST'] = '-l%s'
v['STLIBPATH_ST'] = '-L%s'
v.CCLNK_SRC_F = ''
v.CCLNK_TGT_F = ['-o']


v['cprogram_PATTERN'] = '%s'
v['cshlib_PATTERN'] = 'lib%s.so'
v['cstlib_PATTERN'] = 'lib%s.a'
v.LIB_ST = '-l%s' # template for adding libs
v.LIBPATH_ST = '-L%s' # template for adding libpaths
v.STLIB_ST = '-l%s'
v.STLIBPATH_ST = '-L%s'

v.cprogram_PATTERN = '%s'
v.cshlib_PATTERN = 'lib%s.so'
v.cstlib_PATTERN = 'lib%s.a'


def configure(conf): def configure(conf):
conf.find_irixcc() conf.find_irixcc()


+ 357
- 512
waflib/Tools/msvc.py
File diff suppressed because it is too large
View File


+ 27
- 29
waflib/Tools/suncc.py View File

@@ -1,27 +1,26 @@
#!/usr/bin/env python #!/usr/bin/env python
# encoding: utf-8 # encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)
# Thomas Nagy, 2006-2018 (ita)
# Ralf Habacker, 2006 (rh) # Ralf Habacker, 2006 (rh)


from waflib import Errors
from waflib.Tools import ccroot, ar from waflib.Tools import ccroot, ar
from waflib.Configure import conf from waflib.Configure import conf


@conf @conf
def find_scc(conf): def find_scc(conf):
""" """
Detect the Sun C compiler
Detects the Sun C compiler
""" """
v = conf.env v = conf.env
cc = conf.find_program('cc', var='CC') cc = conf.find_program('cc', var='CC')

try: try:
conf.cmd_and_log(cc + ['-flags']) conf.cmd_and_log(cc + ['-flags'])
except Exception:
except Errors.WafError:
conf.fatal('%r is not a Sun compiler' % cc) conf.fatal('%r is not a Sun compiler' % cc)
v.CC_NAME = 'sun' v.CC_NAME = 'sun'
conf.get_suncc_version(cc) conf.get_suncc_version(cc)



@conf @conf
def scc_common_flags(conf): def scc_common_flags(conf):
""" """
@@ -29,36 +28,34 @@ def scc_common_flags(conf):
""" """
v = conf.env v = conf.env


v['CC_SRC_F'] = []
v['CC_TGT_F'] = ['-c', '-o']
v.CC_SRC_F = []
v.CC_TGT_F = ['-c', '-o', '']


# linker
if not v['LINK_CC']: v['LINK_CC'] = v['CC']
v['CCLNK_SRC_F'] = ''
v['CCLNK_TGT_F'] = ['-o']
v['CPPPATH_ST'] = '-I%s'
v['DEFINES_ST'] = '-D%s'
if not v.LINK_CC:
v.LINK_CC = v.CC


v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STLIB_ST'] = '-l%s'
v['STLIBPATH_ST'] = '-L%s'
v.CCLNK_SRC_F = ''
v.CCLNK_TGT_F = ['-o', '']
v.CPPPATH_ST = '-I%s'
v.DEFINES_ST = '-D%s'


v['SONAME_ST'] = '-Wl,-h,%s'
v['SHLIB_MARKER'] = '-Bdynamic'
v['STLIB_MARKER'] = '-Bstatic'
v.LIB_ST = '-l%s' # template for adding libs
v.LIBPATH_ST = '-L%s' # template for adding libpaths
v.STLIB_ST = '-l%s'
v.STLIBPATH_ST = '-L%s'


# program
v['cprogram_PATTERN'] = '%s'
v.SONAME_ST = '-Wl,-h,%s'
v.SHLIB_MARKER = '-Bdynamic'
v.STLIB_MARKER = '-Bstatic'


# shared library
v['CFLAGS_cshlib'] = ['-xcode=pic32', '-DPIC']
v['LINKFLAGS_cshlib'] = ['-G']
v['cshlib_PATTERN'] = 'lib%s.so'
v.cprogram_PATTERN = '%s'


# static lib
v['LINKFLAGS_cstlib'] = ['-Bstatic']
v['cstlib_PATTERN'] = 'lib%s.a'
v.CFLAGS_cshlib = ['-xcode=pic32', '-DPIC']
v.LINKFLAGS_cshlib = ['-G']
v.cshlib_PATTERN = 'lib%s.so'

v.LINKFLAGS_cstlib = ['-Bstatic']
v.cstlib_PATTERN = 'lib%s.a'


def configure(conf): def configure(conf):
conf.find_scc() conf.find_scc()
@@ -67,3 +64,4 @@ def configure(conf):
conf.cc_load_tools() conf.cc_load_tools()
conf.cc_add_flags() conf.cc_add_flags()
conf.link_add_flags() conf.link_add_flags()


+ 26
- 27
waflib/Tools/suncxx.py View File

@@ -1,21 +1,22 @@
#!/usr/bin/env python #!/usr/bin/env python
# encoding: utf-8 # encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)
# Thomas Nagy, 2006-2018 (ita)
# Ralf Habacker, 2006 (rh) # Ralf Habacker, 2006 (rh)


from waflib import Errors
from waflib.Tools import ccroot, ar from waflib.Tools import ccroot, ar
from waflib.Configure import conf from waflib.Configure import conf


@conf @conf
def find_sxx(conf): def find_sxx(conf):
""" """
Detect the sun C++ compiler
Detects the sun C++ compiler
""" """
v = conf.env v = conf.env
cc = conf.find_program(['CC', 'c++'], var='CXX') cc = conf.find_program(['CC', 'c++'], var='CXX')
try: try:
conf.cmd_and_log(cc + ['-flags']) conf.cmd_and_log(cc + ['-flags'])
except Exception:
except Errors.WafError:
conf.fatal('%r is not a Sun compiler' % cc) conf.fatal('%r is not a Sun compiler' % cc)
v.CXX_NAME = 'sun' v.CXX_NAME = 'sun'
conf.get_suncc_version(cc) conf.get_suncc_version(cc)
@@ -27,36 +28,34 @@ def sxx_common_flags(conf):
""" """
v = conf.env v = conf.env


v['CXX_SRC_F'] = []
v['CXX_TGT_F'] = ['-c', '-o']
v.CXX_SRC_F = []
v.CXX_TGT_F = ['-c', '-o', '']


# linker
if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
v['CXXLNK_SRC_F'] = []
v['CXXLNK_TGT_F'] = ['-o']
v['CPPPATH_ST'] = '-I%s'
v['DEFINES_ST'] = '-D%s'
if not v.LINK_CXX:
v.LINK_CXX = v.CXX


v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STLIB_ST'] = '-l%s'
v['STLIBPATH_ST'] = '-L%s'
v.CXXLNK_SRC_F = []
v.CXXLNK_TGT_F = ['-o', '']
v.CPPPATH_ST = '-I%s'
v.DEFINES_ST = '-D%s'


v['SONAME_ST'] = '-Wl,-h,%s'
v['SHLIB_MARKER'] = '-Bdynamic'
v['STLIB_MARKER'] = '-Bstatic'
v.LIB_ST = '-l%s' # template for adding libs
v.LIBPATH_ST = '-L%s' # template for adding libpaths
v.STLIB_ST = '-l%s'
v.STLIBPATH_ST = '-L%s'


# program
v['cxxprogram_PATTERN'] = '%s'
v.SONAME_ST = '-Wl,-h,%s'
v.SHLIB_MARKER = '-Bdynamic'
v.STLIB_MARKER = '-Bstatic'


# shared library
v['CXXFLAGS_cxxshlib'] = ['-xcode=pic32', '-DPIC']
v['LINKFLAGS_cxxshlib'] = ['-G']
v['cxxshlib_PATTERN'] = 'lib%s.so'
v.cxxprogram_PATTERN = '%s'


# static lib
v['LINKFLAGS_cxxstlib'] = ['-Bstatic']
v['cxxstlib_PATTERN'] = 'lib%s.a'
v.CXXFLAGS_cxxshlib = ['-xcode=pic32', '-DPIC']
v.LINKFLAGS_cxxshlib = ['-G']
v.cxxshlib_PATTERN = 'lib%s.so'

v.LINKFLAGS_cxxstlib = ['-Bstatic']
v.cxxstlib_PATTERN = 'lib%s.a'


def configure(conf): def configure(conf):
conf.find_sxx() conf.find_sxx()


+ 152
- 63
waflib/Tools/waf_unit_test.py View File

@@ -1,10 +1,10 @@
#!/usr/bin/env python #!/usr/bin/env python
# encoding: utf-8 # encoding: utf-8
# Carlos Rafael Giani, 2006 # Carlos Rafael Giani, 2006
# Thomas Nagy, 2010
# Thomas Nagy, 2010-2018 (ita)


""" """
Unit testing system for C/C++/D providing test execution:
Unit testing system for C/C++/D and interpreted languages providing test execution:


* in parallel, by using ``waf -j`` * in parallel, by using ``waf -j``
* partial (only the tests that have changed) or full (by using ``waf --alltests``) * partial (only the tests that have changed) or full (by using ``waf --alltests``)
@@ -31,31 +31,128 @@ the predefined callback::
bld(features='cxx cxxprogram test', source='main.c', target='app') bld(features='cxx cxxprogram test', source='main.c', target='app')
from waflib.Tools import waf_unit_test from waflib.Tools import waf_unit_test
bld.add_post_fun(waf_unit_test.summary) bld.add_post_fun(waf_unit_test.summary)

By passing --dump-test-scripts the build outputs corresponding python files
(with extension _run.py) that are useful for debugging purposes.
""" """


import os
import os, shlex, sys
from waflib.TaskGen import feature, after_method, taskgen_method from waflib.TaskGen import feature, after_method, taskgen_method
from waflib import Utils, Task, Logs, Options from waflib import Utils, Task, Logs, Options
from waflib.Tools import ccroot
testlock = Utils.threading.Lock() testlock = Utils.threading.Lock()


SCRIPT_TEMPLATE = """#! %(python)s
import subprocess, sys
cmd = %(cmd)r
# if you want to debug with gdb:
#cmd = ['gdb', '-args'] + cmd
env = %(env)r
status = subprocess.call(cmd, env=env, cwd=%(cwd)r, shell=isinstance(cmd, str))
sys.exit(status)
"""

@taskgen_method
def handle_ut_cwd(self, key):
"""
Task generator method, used internally to limit code duplication.
This method may disappear anytime.
"""
cwd = getattr(self, key, None)
if cwd:
if isinstance(cwd, str):
# we want a Node instance
if os.path.isabs(cwd):
self.ut_cwd = self.bld.root.make_node(cwd)
else:
self.ut_cwd = self.path.make_node(cwd)

@feature('test_scripts')
def make_interpreted_test(self):
"""Create interpreted unit tests."""
for x in ['test_scripts_source', 'test_scripts_template']:
if not hasattr(self, x):
Logs.warn('a test_scripts taskgen i missing %s' % x)
return

self.ut_run, lst = Task.compile_fun(self.test_scripts_template, shell=getattr(self, 'test_scripts_shell', False))

script_nodes = self.to_nodes(self.test_scripts_source)
for script_node in script_nodes:
tsk = self.create_task('utest', [script_node])
tsk.vars = lst + tsk.vars
tsk.env['SCRIPT'] = script_node.path_from(tsk.get_cwd())

self.handle_ut_cwd('test_scripts_cwd')

env = getattr(self, 'test_scripts_env', None)
if env:
self.ut_env = env
else:
self.ut_env = dict(os.environ)

paths = getattr(self, 'test_scripts_paths', {})
for (k,v) in paths.items():
p = self.ut_env.get(k, '').split(os.pathsep)
if isinstance(v, str):
v = v.split(os.pathsep)
self.ut_env[k] = os.pathsep.join(p + v)

@feature('test') @feature('test')
@after_method('apply_link')
@after_method('apply_link', 'process_use')
def make_test(self): def make_test(self):
"""Create the unit test task. There can be only one unit test task by task generator.""" """Create the unit test task. There can be only one unit test task by task generator."""
if getattr(self, 'link_task', None):
self.create_task('utest', self.link_task.outputs)

if not getattr(self, 'link_task', None):
return

tsk = self.create_task('utest', self.link_task.outputs)
if getattr(self, 'ut_str', None):
self.ut_run, lst = Task.compile_fun(self.ut_str, shell=getattr(self, 'ut_shell', False))
tsk.vars = lst + tsk.vars

self.handle_ut_cwd('ut_cwd')

if not hasattr(self, 'ut_paths'):
paths = []
for x in self.tmp_use_sorted:
try:
y = self.bld.get_tgen_by_name(x).link_task
except AttributeError:
pass
else:
if not isinstance(y, ccroot.stlink_task):
paths.append(y.outputs[0].parent.abspath())
self.ut_paths = os.pathsep.join(paths) + os.pathsep

if not hasattr(self, 'ut_env'):
self.ut_env = dct = dict(os.environ)
def add_path(var):
dct[var] = self.ut_paths + dct.get(var,'')
if Utils.is_win32:
add_path('PATH')
elif Utils.unversioned_sys_platform() == 'darwin':
add_path('DYLD_LIBRARY_PATH')
add_path('LD_LIBRARY_PATH')
else:
add_path('LD_LIBRARY_PATH')

if not hasattr(self, 'ut_cmd'):
self.ut_cmd = getattr(Options.options, 'testcmd', False)


@taskgen_method @taskgen_method
def add_test_results(self, tup): def add_test_results(self, tup):
"""Override and return tup[1] to interrupt the build immediately if a test does not run""" """Override and return tup[1] to interrupt the build immediately if a test does not run"""
Logs.debug("ut: %r", tup) Logs.debug("ut: %r", tup)
self.utest_result = tup
try:
self.utest_results.append(tup)
except AttributeError:
self.utest_results = [tup]
try: try:
self.bld.utest_results.append(tup) self.bld.utest_results.append(tup)
except AttributeError: except AttributeError:
self.bld.utest_results = [tup] self.bld.utest_results = [tup]


@Task.deep_inputs
class utest(Task.Task): class utest(Task.Task):
""" """
Execute a unit test Execute a unit test
@@ -63,6 +160,7 @@ class utest(Task.Task):
color = 'PINK' color = 'PINK'
after = ['vnum', 'inst'] after = ['vnum', 'inst']
vars = [] vars = []

def runnable_status(self): def runnable_status(self):
""" """
Always execute the task if `waf --alltests` was used or no Always execute the task if `waf --alltests` was used or no
@@ -77,37 +175,17 @@ class utest(Task.Task):
return Task.RUN_ME return Task.RUN_ME
return ret return ret


def add_path(self, dct, path, var):
dct[var] = os.pathsep.join(Utils.to_list(path) + [os.environ.get(var, '')])

def get_test_env(self): def get_test_env(self):
""" """
In general, tests may require any library built anywhere in the project. In general, tests may require any library built anywhere in the project.
Override this method if fewer paths are needed Override this method if fewer paths are needed
""" """
try:
fu = getattr(self.generator.bld, 'all_test_paths')
except AttributeError:
# this operation may be performed by at most #maxjobs
fu = os.environ.copy()

lst = []
for g in self.generator.bld.groups:
for tg in g:
if getattr(tg, 'link_task', None):
s = tg.link_task.outputs[0].parent.abspath()
if s not in lst:
lst.append(s)

if Utils.is_win32:
self.add_path(fu, lst, 'PATH')
elif Utils.unversioned_sys_platform() == 'darwin':
self.add_path(fu, lst, 'DYLD_LIBRARY_PATH')
self.add_path(fu, lst, 'LD_LIBRARY_PATH')
else:
self.add_path(fu, lst, 'LD_LIBRARY_PATH')
self.generator.bld.all_test_paths = fu
return fu
return self.generator.ut_env

def post_run(self):
super(utest, self).post_run()
if getattr(Options.options, 'clear_failed_tests', False) and self.waf_unit_test_results[1]:
self.generator.bld.task_sigs[self.uid()] = None


def run(self): def run(self):
""" """
@@ -116,33 +194,43 @@ class utest(Task.Task):


Override ``add_test_results`` to interrupt the build Override ``add_test_results`` to interrupt the build
""" """

filename = self.inputs[0].abspath()
self.ut_exec = getattr(self.generator, 'ut_exec', [filename])
if getattr(self.generator, 'ut_fun', None):
self.generator.ut_fun(self)


cwd = getattr(self.generator, 'ut_cwd', '') or self.inputs[0].parent.abspath()

testcmd = getattr(self.generator, 'ut_cmd', False) or getattr(Options.options, 'testcmd', False)
if testcmd:
self.ut_exec = (testcmd % " ".join(self.ut_exec)).split(' ')

proc = Utils.subprocess.Popen(self.ut_exec, cwd=cwd, env=self.get_test_env(), stderr=Utils.subprocess.PIPE, stdout=Utils.subprocess.PIPE)
if hasattr(self.generator, 'ut_run'):
return self.generator.ut_run(self)

self.ut_exec = getattr(self.generator, 'ut_exec', [self.inputs[0].abspath()])
ut_cmd = getattr(self.generator, 'ut_cmd', False)
if ut_cmd:
self.ut_exec = shlex.split(ut_cmd % ' '.join(self.ut_exec))

return self.exec_command(self.ut_exec)

def exec_command(self, cmd, **kw):
Logs.debug('runner: %r', cmd)
if getattr(Options.options, 'dump_test_scripts', False):
script_code = SCRIPT_TEMPLATE % {
'python': sys.executable,
'env': self.get_test_env(),
'cwd': self.get_cwd().abspath(),
'cmd': cmd
}
script_file = self.inputs[0].abspath() + '_run.py'
Utils.writef(script_file, script_code)
os.chmod(script_file, Utils.O755)
if Logs.verbose > 1:
Logs.info('Test debug file written as %r' % script_file)

proc = Utils.subprocess.Popen(cmd, cwd=self.get_cwd().abspath(), env=self.get_test_env(),
stderr=Utils.subprocess.PIPE, stdout=Utils.subprocess.PIPE, shell=isinstance(cmd,str))
(stdout, stderr) = proc.communicate() (stdout, stderr) = proc.communicate()

self.waf_unit_test_results = tup = (filename, proc.returncode, stdout, stderr)
self.waf_unit_test_results = tup = (self.inputs[0].abspath(), proc.returncode, stdout, stderr)
testlock.acquire() testlock.acquire()
try: try:
return self.generator.add_test_results(tup) return self.generator.add_test_results(tup)
finally: finally:
testlock.release() testlock.release()


def post_run(self):
super(utest, self).post_run()
if getattr(Options.options, 'clear_failed_tests', False) and self.waf_unit_test_results[1]:
self.generator.bld.task_sigs[self.uid()] = None
def get_cwd(self):
return getattr(self.generator, 'ut_cwd', self.inputs[0].parent)


def summary(bld): def summary(bld):
""" """
@@ -160,15 +248,15 @@ def summary(bld):
total = len(lst) total = len(lst)
tfail = len([x for x in lst if x[1]]) tfail = len([x for x in lst if x[1]])


Logs.pprint('CYAN', ' tests that pass %d/%d' % (total-tfail, total))
Logs.pprint('GREEN', ' tests that pass %d/%d' % (total-tfail, total))
for (f, code, out, err) in lst: for (f, code, out, err) in lst:
if not code: if not code:
Logs.pprint('CYAN', ' %s' % f)
Logs.pprint('GREEN', ' %s' % f)


Logs.pprint('CYAN', ' tests that fail %d/%d' % (tfail, total))
Logs.pprint('GREEN' if tfail == 0 else 'RED', ' tests that fail %d/%d' % (tfail, total))
for (f, code, out, err) in lst: for (f, code, out, err) in lst:
if code: if code:
Logs.pprint('CYAN', ' %s' % f)
Logs.pprint('RED', ' %s' % f)


def set_exit_code(bld): def set_exit_code(bld):
""" """
@@ -199,9 +287,10 @@ def options(opt):
""" """
opt.add_option('--notests', action='store_true', default=False, help='Exec no unit tests', dest='no_tests') opt.add_option('--notests', action='store_true', default=False, help='Exec no unit tests', dest='no_tests')
opt.add_option('--alltests', action='store_true', default=False, help='Exec all unit tests', dest='all_tests') opt.add_option('--alltests', action='store_true', default=False, help='Exec all unit tests', dest='all_tests')
opt.add_option('--clear-failed', action='store_true', default=False, help='Force failed unit tests to run again next time', dest='clear_failed_tests')
opt.add_option('--testcmd', action='store', default=False,
help = 'Run the unit tests using the test-cmd string'
' example "--test-cmd="valgrind --error-exitcode=1'
' %s" to run under valgrind', dest='testcmd')
opt.add_option('--clear-failed', action='store_true', default=False,
help='Force failed unit tests to run again next time', dest='clear_failed_tests')
opt.add_option('--testcmd', action='store', default=False, dest='testcmd',
help='Run the unit tests using the test-cmd string example "--testcmd="valgrind --error-exitcode=1 %s" to run under valgrind')
opt.add_option('--dump-test-scripts', action='store_true', default=False,
help='Create python scripts to help debug tests', dest='dump_test_scripts')



+ 26
- 28
waflib/Tools/xlc.py View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
# encoding: utf-8 # encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)
# Thomas Nagy, 2006-2018 (ita)
# Ralf Habacker, 2006 (rh) # Ralf Habacker, 2006 (rh)
# Yinon Ehrlich, 2009 # Yinon Ehrlich, 2009
# Michael Kuhn, 2009 # Michael Kuhn, 2009
@@ -11,7 +11,7 @@ from waflib.Configure import conf
@conf @conf
def find_xlc(conf): def find_xlc(conf):
""" """
Detect the Aix C compiler
Detects the Aix C compiler
""" """
cc = conf.find_program(['xlc_r', 'xlc'], var='CC') cc = conf.find_program(['xlc_r', 'xlc'], var='CC')
conf.get_xlc_version(cc) conf.get_xlc_version(cc)
@@ -24,38 +24,36 @@ def xlc_common_flags(conf):
""" """
v = conf.env v = conf.env


v['CC_SRC_F'] = []
v['CC_TGT_F'] = ['-c', '-o']
v.CC_SRC_F = []
v.CC_TGT_F = ['-c', '-o']


# linker
if not v['LINK_CC']: v['LINK_CC'] = v['CC']
v['CCLNK_SRC_F'] = []
v['CCLNK_TGT_F'] = ['-o']
v['CPPPATH_ST'] = '-I%s'
v['DEFINES_ST'] = '-D%s'
if not v.LINK_CC:
v.LINK_CC = v.CC


v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STLIB_ST'] = '-l%s'
v['STLIBPATH_ST'] = '-L%s'
v['RPATH_ST'] = '-Wl,-rpath,%s'
v.CCLNK_SRC_F = []
v.CCLNK_TGT_F = ['-o']
v.CPPPATH_ST = '-I%s'
v.DEFINES_ST = '-D%s'


v['SONAME_ST'] = []
v['SHLIB_MARKER'] = []
v['STLIB_MARKER'] = []
v.LIB_ST = '-l%s' # template for adding libs
v.LIBPATH_ST = '-L%s' # template for adding libpaths
v.STLIB_ST = '-l%s'
v.STLIBPATH_ST = '-L%s'
v.RPATH_ST = '-Wl,-rpath,%s'


# program
v['LINKFLAGS_cprogram'] = ['-Wl,-brtl']
v['cprogram_PATTERN'] = '%s'
v.SONAME_ST = []
v.SHLIB_MARKER = []
v.STLIB_MARKER = []


# shared library
v['CFLAGS_cshlib'] = ['-fPIC']
v['LINKFLAGS_cshlib'] = ['-G', '-Wl,-brtl,-bexpfull']
v['cshlib_PATTERN'] = 'lib%s.so'
v.LINKFLAGS_cprogram = ['-Wl,-brtl']
v.cprogram_PATTERN = '%s'


# static lib
v['LINKFLAGS_cstlib'] = []
v['cstlib_PATTERN'] = 'lib%s.a'
v.CFLAGS_cshlib = ['-fPIC']
v.LINKFLAGS_cshlib = ['-G', '-Wl,-brtl,-bexpfull']
v.cshlib_PATTERN = 'lib%s.so'

v.LINKFLAGS_cstlib = []
v.cstlib_PATTERN = 'lib%s.a'


def configure(conf): def configure(conf):
conf.find_xlc() conf.find_xlc()


+ 26
- 28
waflib/Tools/xlcxx.py View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
# encoding: utf-8 # encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)
# Thomas Nagy, 2006-2018 (ita)
# Ralf Habacker, 2006 (rh) # Ralf Habacker, 2006 (rh)
# Yinon Ehrlich, 2009 # Yinon Ehrlich, 2009
# Michael Kuhn, 2009 # Michael Kuhn, 2009
@@ -11,7 +11,7 @@ from waflib.Configure import conf
@conf @conf
def find_xlcxx(conf): def find_xlcxx(conf):
""" """
Detect the Aix C++ compiler
Detects the Aix C++ compiler
""" """
cxx = conf.find_program(['xlc++_r', 'xlc++'], var='CXX') cxx = conf.find_program(['xlc++_r', 'xlc++'], var='CXX')
conf.get_xlc_version(cxx) conf.get_xlc_version(cxx)
@@ -24,38 +24,36 @@ def xlcxx_common_flags(conf):
""" """
v = conf.env v = conf.env


v['CXX_SRC_F'] = []
v['CXX_TGT_F'] = ['-c', '-o']
v.CXX_SRC_F = []
v.CXX_TGT_F = ['-c', '-o']


# linker
if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
v['CXXLNK_SRC_F'] = []
v['CXXLNK_TGT_F'] = ['-o']
v['CPPPATH_ST'] = '-I%s'
v['DEFINES_ST'] = '-D%s'
if not v.LINK_CXX:
v.LINK_CXX = v.CXX


v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STLIB_ST'] = '-l%s'
v['STLIBPATH_ST'] = '-L%s'
v['RPATH_ST'] = '-Wl,-rpath,%s'
v.CXXLNK_SRC_F = []
v.CXXLNK_TGT_F = ['-o']
v.CPPPATH_ST = '-I%s'
v.DEFINES_ST = '-D%s'


v['SONAME_ST'] = []
v['SHLIB_MARKER'] = []
v['STLIB_MARKER'] = []
v.LIB_ST = '-l%s' # template for adding libs
v.LIBPATH_ST = '-L%s' # template for adding libpaths
v.STLIB_ST = '-l%s'
v.STLIBPATH_ST = '-L%s'
v.RPATH_ST = '-Wl,-rpath,%s'


# program
v['LINKFLAGS_cxxprogram']= ['-Wl,-brtl']
v['cxxprogram_PATTERN'] = '%s'
v.SONAME_ST = []
v.SHLIB_MARKER = []
v.STLIB_MARKER = []


# shared library
v['CXXFLAGS_cxxshlib'] = ['-fPIC']
v['LINKFLAGS_cxxshlib'] = ['-G', '-Wl,-brtl,-bexpfull']
v['cxxshlib_PATTERN'] = 'lib%s.so'
v.LINKFLAGS_cxxprogram= ['-Wl,-brtl']
v.cxxprogram_PATTERN = '%s'


# static lib
v['LINKFLAGS_cxxstlib'] = []
v['cxxstlib_PATTERN'] = 'lib%s.a'
v.CXXFLAGS_cxxshlib = ['-fPIC']
v.LINKFLAGS_cxxshlib = ['-G', '-Wl,-brtl,-bexpfull']
v.cxxshlib_PATTERN = 'lib%s.so'

v.LINKFLAGS_cxxstlib = []
v.cxxstlib_PATTERN = 'lib%s.a'


def configure(conf): def configure(conf):
conf.find_xlcxx() conf.find_xlcxx()


+ 448
- 204
waflib/Utils.py
File diff suppressed because it is too large
View File


+ 1
- 1
waflib/__init__.py View File

@@ -1,3 +1,3 @@
#!/usr/bin/env python #!/usr/bin/env python
# encoding: utf-8 # encoding: utf-8
# Thomas Nagy, 2005-2010 (ita)
# Thomas Nagy, 2005-2018 (ita)

+ 3
- 3
waflib/ansiterm.py View File

@@ -120,7 +120,7 @@ else:
def clear_line(self, param): def clear_line(self, param):
mode = param and int(param) or 0 mode = param and int(param) or 0
sbinfo = self.screen_buffer_info() sbinfo = self.screen_buffer_info()
if mode == 1: # Clear from begining of line to cursor position
if mode == 1: # Clear from beginning of line to cursor position
line_start = COORD(0, sbinfo.CursorPosition.Y) line_start = COORD(0, sbinfo.CursorPosition.Y)
line_length = sbinfo.Size.X line_length = sbinfo.Size.X
elif mode == 2: # Clear entire line elif mode == 2: # Clear entire line
@@ -136,7 +136,7 @@ else:
def clear_screen(self, param): def clear_screen(self, param):
mode = to_int(param, 0) mode = to_int(param, 0)
sbinfo = self.screen_buffer_info() sbinfo = self.screen_buffer_info()
if mode == 1: # Clear from begining of screen to cursor position
if mode == 1: # Clear from beginning of screen to cursor position
clear_start = COORD(0, 0) clear_start = COORD(0, 0)
clear_length = sbinfo.CursorPosition.X * sbinfo.CursorPosition.Y clear_length = sbinfo.CursorPosition.X * sbinfo.CursorPosition.Y
elif mode == 2: # Clear entire screen and return cursor to home elif mode == 2: # Clear entire screen and return cursor to home
@@ -320,7 +320,7 @@ else:
sbinfo = CONSOLE_SCREEN_BUFFER_INFO() sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
def get_term_cols(): def get_term_cols():
windll.kernel32.GetConsoleScreenBufferInfo(console, byref(sbinfo)) windll.kernel32.GetConsoleScreenBufferInfo(console, byref(sbinfo))
# TODO Issue 1401
# Issue 1401 - the progress bar cannot reach the last character
return sbinfo.Size.X - 1 return sbinfo.Size.X - 1


# just try and see # just try and see


+ 18
- 14
waflib/extras/batched_cc.py View File

@@ -3,21 +3,22 @@
# Thomas Nagy, 2006-2015 (ita) # Thomas Nagy, 2006-2015 (ita)


""" """
Build as batches.

Instead of compiling object files one by one, c/c++ compilers are often able to compile at once: Instead of compiling object files one by one, c/c++ compilers are often able to compile at once:
cc -c ../file1.c ../file2.c ../file3.c cc -c ../file1.c ../file2.c ../file3.c


Files are output on the directory where the compiler is called, and dependencies are more difficult Files are output on the directory where the compiler is called, and dependencies are more difficult
to track (do not run the command on all source files if only one file changes) to track (do not run the command on all source files if only one file changes)

As such, we do as if the files were compiled one by one, but no command is actually run: As such, we do as if the files were compiled one by one, but no command is actually run:
replace each cc/cpp Task by a TaskSlave. A new task called TaskMaster collects the replace each cc/cpp Task by a TaskSlave. A new task called TaskMaster collects the
signatures from each slave and finds out the command-line to run. signatures from each slave and finds out the command-line to run.


Just import this module in the configuration (no other change required).
This is provided as an example, for performance unity builds are recommended (fewer tasks and
fewer jobs to execute). See waflib/extras/unity.py.
Just import this module to start using it:
def build(bld):
bld.load('batched_cc')

Note that this is provided as an example, unity builds are recommended
for best performance results (fewer tasks and fewer jobs to execute).
See waflib/extras/unity.py.
""" """


from waflib import Task, Utils from waflib import Task, Utils
@@ -26,24 +27,21 @@ from waflib.Tools import c, cxx


MAX_BATCH = 50 MAX_BATCH = 50


c_str = '${CC} ${CFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED}'
c_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${tsk.batch_incpaths()} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED} ${CPPFLAGS}'
c_fun, _ = Task.compile_fun_noshell(c_str) c_fun, _ = Task.compile_fun_noshell(c_str)


cxx_str = '${CXX} ${CXXFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED}'
cxx_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${tsk.batch_incpaths()} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED} ${CPPFLAGS}'
cxx_fun, _ = Task.compile_fun_noshell(cxx_str) cxx_fun, _ = Task.compile_fun_noshell(cxx_str)


count = 70000 count = 70000
class batch_task(Task.Task):
class batch(Task.Task):
color = 'PINK' color = 'PINK'


after = ['c', 'cxx'] after = ['c', 'cxx']
before = ['cprogram', 'cshlib', 'cstlib', 'cxxprogram', 'cxxshlib', 'cxxstlib'] before = ['cprogram', 'cshlib', 'cstlib', 'cxxprogram', 'cxxshlib', 'cxxstlib']


def uid(self): def uid(self):
m = Utils.md5()
m.update(Task.Task.uid(self))
m.update(str(self.generator.idx).encode())
return m.digest()
return Utils.h_list([Task.Task.uid(self), self.generator.idx, self.generator.path.abspath(), self.generator.target])


def __str__(self): def __str__(self):
return 'Batch compilation for %d slaves' % len(self.slaves) return 'Batch compilation for %d slaves' % len(self.slaves)
@@ -74,6 +72,13 @@ class batch_task(Task.Task):


return Task.SKIP_ME return Task.SKIP_ME


def get_cwd(self):
return self.slaves[0].outputs[0].parent

def batch_incpaths(self):
st = self.env.CPPPATH_ST
return [st % node.abspath() for node in self.generator.includes_nodes]

def run(self): def run(self):
self.outputs = [] self.outputs = []


@@ -85,7 +90,6 @@ class batch_task(Task.Task):
srclst.append(t.inputs[0].abspath()) srclst.append(t.inputs[0].abspath())


self.env.SRCLST = srclst self.env.SRCLST = srclst
self.cwd = slaves[0].outputs[0].parent.abspath()


if self.slaves[0].__class__.__name__ == 'c': if self.slaves[0].__class__.__name__ == 'c':
ret = c_fun(self) ret = c_fun(self)


+ 7
- 11
waflib/extras/build_file_tracker.py View File

@@ -8,25 +8,21 @@ want to use this to force partial rebuilds, see playground/track_output_files/ f


Note that there is a variety of ways to implement this, one may want use timestamps on source files too for example, Note that there is a variety of ways to implement this, one may want use timestamps on source files too for example,
or one may want to hash the files in the source directory only under certain conditions (md5_tstamp tool) or one may want to hash the files in the source directory only under certain conditions (md5_tstamp tool)
or to hash the file in the build directory with its timestamp (similar to 'update_outputs')
or to hash the file in the build directory with its timestamp
""" """


import os import os
from waflib import Node, Utils from waflib import Node, Utils


def get_bld_sig(self): def get_bld_sig(self):
if not self.is_bld() or self.ctx.bldnode is self.ctx.srcnode:
return Utils.h_file(self.abspath())

try: try:
return self.cache_sig
# add the creation time to the signature
return self.sig + str(os.stat(self.abspath()).st_mtime)
except AttributeError: except AttributeError:
pass

if not self.is_bld() or self.ctx.bldnode is self.ctx.srcnode:
self.sig = Utils.h_file(self.abspath())
self.cache_sig = ret = self.sig
else:
# add the
self.cache_sig = ret = self.sig + str(os.stat(self.abspath()).st_mtime)
return ret
return None


Node.Node.get_bld_sig = get_bld_sig Node.Node.get_bld_sig = get_bld_sig



+ 3
- 4
waflib/extras/build_logs.py View File

@@ -17,7 +17,7 @@ try:
up = os.path.dirname(Context.g_module.__file__) up = os.path.dirname(Context.g_module.__file__)
except AttributeError: except AttributeError:
up = '.' up = '.'
LOGFILE = os.path.join(up, 'logs', '%s.log' % time.strftime('%Y_%m_%d_%H_%M'))
LOGFILE = os.path.join(up, 'logs', time.strftime('%Y_%m_%d_%H_%M.log'))


wlock = threading.Lock() wlock = threading.Lock()
class log_to_file(object): class log_to_file(object):
@@ -28,7 +28,7 @@ class log_to_file(object):
self.filename = filename self.filename = filename
self.is_valid = True self.is_valid = True
def replace_colors(self, data): def replace_colors(self, data):
for x in Logs.colors_lst.values():
for x in Logs.colors_lst.values():
if isinstance(x, str): if isinstance(x, str):
data = data.replace(x, '') data = data.replace(x, '')
return data return data
@@ -96,7 +96,7 @@ def exit_cleanup():
fileobj.close() fileobj.close()
filename = sys.stdout.filename filename = sys.stdout.filename


Logs.info('Output logged to %r' % filename)
Logs.info('Output logged to %r', filename)


# then copy the log file to "latest.log" if possible # then copy the log file to "latest.log" if possible
up = os.path.dirname(os.path.abspath(filename)) up = os.path.dirname(os.path.abspath(filename))
@@ -104,7 +104,6 @@ def exit_cleanup():
shutil.copy(filename, os.path.join(up, 'latest.log')) shutil.copy(filename, os.path.join(up, 'latest.log'))
except OSError: except OSError:
# this may fail on windows due to processes spawned # this may fail on windows due to processes spawned
#
pass pass


atexit.register(exit_cleanup) atexit.register(exit_cleanup)


+ 36
- 33
waflib/extras/c_nec.py View File

@@ -24,43 +24,46 @@ def find_sxc(conf):


@conf @conf
def get_sxc_version(conf, fc): def get_sxc_version(conf, fc):
version_re = re.compile(r"C\+\+/SX\s*Version\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
cmd = fc + ['-V']
p = Utils.subprocess.Popen(cmd, stdin=False, stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE, env=None)
out, err = p.communicate()
version_re = re.compile(r"C\+\+/SX\s*Version\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
cmd = fc + ['-V']
p = Utils.subprocess.Popen(cmd, stdin=False, stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE, env=None)
out, err = p.communicate()


if out: match = version_re(out)
else: match = version_re(err)
if not match:
conf.fatal('Could not determine the NEC C compiler version.')
k = match.groupdict()
conf.env['C_VERSION'] = (k['major'], k['minor'])
if out:
match = version_re(out)
else:
match = version_re(err)
if not match:
conf.fatal('Could not determine the NEC C compiler version.')
k = match.groupdict()
conf.env['C_VERSION'] = (k['major'], k['minor'])


@conf @conf
def sxc_common_flags(conf): def sxc_common_flags(conf):
v=conf.env
v['CC_SRC_F']=[]
v['CC_TGT_F']=['-c','-o']
if not v['LINK_CC']:v['LINK_CC']=v['CC']
v['CCLNK_SRC_F']=[]
v['CCLNK_TGT_F']=['-o']
v['CPPPATH_ST']='-I%s'
v['DEFINES_ST']='-D%s'
v['LIB_ST']='-l%s'
v['LIBPATH_ST']='-L%s'
v['STLIB_ST']='-l%s'
v['STLIBPATH_ST']='-L%s'
v['RPATH_ST']=''
v['SONAME_ST']=[]
v['SHLIB_MARKER']=[]
v['STLIB_MARKER']=[]
v['LINKFLAGS_cprogram']=['']
v['cprogram_PATTERN']='%s'
v['CFLAGS_cshlib']=['-fPIC']
v['LINKFLAGS_cshlib']=['']
v['cshlib_PATTERN']='lib%s.so'
v['LINKFLAGS_cstlib']=[]
v['cstlib_PATTERN']='lib%s.a'
v=conf.env
v['CC_SRC_F']=[]
v['CC_TGT_F']=['-c','-o']
if not v['LINK_CC']:
v['LINK_CC']=v['CC']
v['CCLNK_SRC_F']=[]
v['CCLNK_TGT_F']=['-o']
v['CPPPATH_ST']='-I%s'
v['DEFINES_ST']='-D%s'
v['LIB_ST']='-l%s'
v['LIBPATH_ST']='-L%s'
v['STLIB_ST']='-l%s'
v['STLIBPATH_ST']='-L%s'
v['RPATH_ST']=''
v['SONAME_ST']=[]
v['SHLIB_MARKER']=[]
v['STLIB_MARKER']=[]
v['LINKFLAGS_cprogram']=['']
v['cprogram_PATTERN']='%s'
v['CFLAGS_cshlib']=['-fPIC']
v['LINKFLAGS_cshlib']=['']
v['cshlib_PATTERN']='lib%s.so'
v['LINKFLAGS_cstlib']=[]
v['cstlib_PATTERN']='lib%s.a'


def configure(conf): def configure(conf):
conf.find_sxc() conf.find_sxc()


+ 0
- 312
waflib/extras/xcode.py View File

@@ -1,312 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# XCode 3/XCode 4 generator for Waf
# Nicolas Mercier 2011

"""
Usage:

def options(opt):
opt.load('xcode')

$ waf configure xcode
"""

# TODO: support iOS projects

from waflib import Context, TaskGen, Build, Utils
import os, sys

HEADERS_GLOB = '**/(*.h|*.hpp|*.H|*.inl)'

MAP_EXT = {
'.h' : "sourcecode.c.h",

'.hh': "sourcecode.cpp.h",
'.inl': "sourcecode.cpp.h",
'.hpp': "sourcecode.cpp.h",

'.c': "sourcecode.c.c",

'.m': "sourcecode.c.objc",

'.mm': "sourcecode.cpp.objcpp",

'.cc': "sourcecode.cpp.cpp",

'.cpp': "sourcecode.cpp.cpp",
'.C': "sourcecode.cpp.cpp",
'.cxx': "sourcecode.cpp.cpp",
'.c++': "sourcecode.cpp.cpp",

'.l': "sourcecode.lex", # luthor
'.ll': "sourcecode.lex",

'.y': "sourcecode.yacc",
'.yy': "sourcecode.yacc",

'.plist': "text.plist.xml",
".nib": "wrapper.nib",
".xib": "text.xib",
}


part1 = 0
part2 = 10000
part3 = 0
id = 562000999
def newid():
global id
id = id + 1
return "%04X%04X%04X%012d" % (0, 10000, 0, id)

class XCodeNode:
def __init__(self):
self._id = newid()

def tostring(self, value):
if isinstance(value, dict):
result = "{\n"
for k,v in value.items():
result = result + "\t\t\t%s = %s;\n" % (k, self.tostring(v))
result = result + "\t\t}"
return result
elif isinstance(value, str):
return "\"%s\"" % value
elif isinstance(value, list):
result = "(\n"
for i in value:
result = result + "\t\t\t%s,\n" % self.tostring(i)
result = result + "\t\t)"
return result
elif isinstance(value, XCodeNode):
return value._id
else:
return str(value)

def write_recursive(self, value, file):
if isinstance(value, dict):
for k,v in value.items():
self.write_recursive(v, file)
elif isinstance(value, list):
for i in value:
self.write_recursive(i, file)
elif isinstance(value, XCodeNode):
value.write(file)

def write(self, file):
for attribute,value in self.__dict__.items():
if attribute[0] != '_':
self.write_recursive(value, file)

w = file.write
w("\t%s = {\n" % self._id)
w("\t\tisa = %s;\n" % self.__class__.__name__)
for attribute,value in self.__dict__.items():
if attribute[0] != '_':
w("\t\t%s = %s;\n" % (attribute, self.tostring(value)))
w("\t};\n\n")



# Configurations
class XCBuildConfiguration(XCodeNode):
def __init__(self, name, settings = {}, env=None):
XCodeNode.__init__(self)
self.baseConfigurationReference = ""
self.buildSettings = settings
self.name = name
if env and env.ARCH:
settings['ARCHS'] = " ".join(env.ARCH)


class XCConfigurationList(XCodeNode):
def __init__(self, settings):
XCodeNode.__init__(self)
self.buildConfigurations = settings
self.defaultConfigurationIsVisible = 0
self.defaultConfigurationName = settings and settings[0].name or ""

# Group/Files
class PBXFileReference(XCodeNode):
def __init__(self, name, path, filetype = '', sourcetree = "SOURCE_ROOT"):
XCodeNode.__init__(self)
self.fileEncoding = 4
if not filetype:
_, ext = os.path.splitext(name)
filetype = MAP_EXT.get(ext, 'text')
self.lastKnownFileType = filetype
self.name = name
self.path = path
self.sourceTree = sourcetree

class PBXGroup(XCodeNode):
def __init__(self, name, sourcetree = "<group>"):
XCodeNode.__init__(self)
self.children = []
self.name = name
self.sourceTree = sourcetree

def add(self, root, sources):
folders = {}
def folder(n):
if not n.is_child_of(root):
return self
try:
return folders[n]
except KeyError:
f = PBXGroup(n.name)
p = folder(n.parent)
folders[n] = f
p.children.append(f)
return f
for s in sources:
f = folder(s.parent)
source = PBXFileReference(s.name, s.abspath())
f.children.append(source)


# Targets
class PBXLegacyTarget(XCodeNode):
def __init__(self, action, target=''):
XCodeNode.__init__(self)
self.buildConfigurationList = XCConfigurationList([XCBuildConfiguration('waf', {})])
if not target:
self.buildArgumentsString = "%s %s" % (sys.argv[0], action)
else:
self.buildArgumentsString = "%s %s --targets=%s" % (sys.argv[0], action, target)
self.buildPhases = []
self.buildToolPath = sys.executable
self.buildWorkingDirectory = ""
self.dependencies = []
self.name = target or action
self.productName = target or action
self.passBuildSettingsInEnvironment = 0

class PBXShellScriptBuildPhase(XCodeNode):
def __init__(self, action, target):
XCodeNode.__init__(self)
self.buildActionMask = 2147483647
self.files = []
self.inputPaths = []
self.outputPaths = []
self.runOnlyForDeploymentPostProcessing = 0
self.shellPath = "/bin/sh"
self.shellScript = "%s %s %s --targets=%s" % (sys.executable, sys.argv[0], action, target)

class PBXNativeTarget(XCodeNode):
def __init__(self, action, target, node, env):
XCodeNode.__init__(self)
conf = XCBuildConfiguration('waf', {'PRODUCT_NAME':target, 'CONFIGURATION_BUILD_DIR':node.parent.abspath()}, env)
self.buildConfigurationList = XCConfigurationList([conf])
self.buildPhases = [PBXShellScriptBuildPhase(action, target)]
self.buildRules = []
self.dependencies = []
self.name = target
self.productName = target
self.productType = "com.apple.product-type.application"
self.productReference = PBXFileReference(target, node.abspath(), 'wrapper.application', 'BUILT_PRODUCTS_DIR')

# Root project object
class PBXProject(XCodeNode):
def __init__(self, name, version):
XCodeNode.__init__(self)
self.buildConfigurationList = XCConfigurationList([XCBuildConfiguration('waf', {})])
self.compatibilityVersion = version[0]
self.hasScannedForEncodings = 1
self.mainGroup = PBXGroup(name)
self.projectRoot = ""
self.projectDirPath = ""
self.targets = []
self._objectVersion = version[1]
self._output = PBXGroup('out')
self.mainGroup.children.append(self._output)

def write(self, file):
w = file.write
w("// !$*UTF8*$!\n")
w("{\n")
w("\tarchiveVersion = 1;\n")
w("\tclasses = {\n")
w("\t};\n")
w("\tobjectVersion = %d;\n" % self._objectVersion)
w("\tobjects = {\n\n")

XCodeNode.write(self, file)

w("\t};\n")
w("\trootObject = %s;\n" % self._id)
w("}\n")

def add_task_gen(self, tg):
if not getattr(tg, 'mac_app', False):
self.targets.append(PBXLegacyTarget('build', tg.name))
else:
target = PBXNativeTarget('build', tg.name, tg.link_task.outputs[0].change_ext('.app'), tg.env)
self.targets.append(target)
self._output.children.append(target.productReference)

class xcode(Build.BuildContext):
cmd = 'xcode'
fun = 'build'

def collect_source(self, tg):
source_files = tg.to_nodes(getattr(tg, 'source', []))
plist_files = tg.to_nodes(getattr(tg, 'mac_plist', []))
resource_files = [tg.path.find_node(i) for i in Utils.to_list(getattr(tg, 'mac_resources', []))]
include_dirs = Utils.to_list(getattr(tg, 'includes', [])) + Utils.to_list(getattr(tg, 'export_dirs', []))
include_files = []
for x in include_dirs:
if not isinstance(x, str):
include_files.append(x)
continue
d = tg.path.find_node(x)
if d:
lst = [y for y in d.ant_glob(HEADERS_GLOB, flat=False)]
include_files.extend(lst)

# remove duplicates
source = list(set(source_files + plist_files + resource_files + include_files))
source.sort(key=lambda x: x.abspath())
return source

def execute(self):
"""
Entry point
"""
self.restore()
if not self.all_envs:
self.load_envs()
self.recurse([self.run_dir])

appname = getattr(Context.g_module, Context.APPNAME, os.path.basename(self.srcnode.abspath()))
p = PBXProject(appname, ('Xcode 3.2', 46))

for g in self.groups:
for tg in g:
if not isinstance(tg, TaskGen.task_gen):
continue

tg.post()

features = Utils.to_list(getattr(tg, 'features', ''))

group = PBXGroup(tg.name)
group.add(tg.path, self.collect_source(tg))
p.mainGroup.children.append(group)

if 'cprogram' or 'cxxprogram' in features:
p.add_task_gen(tg)


# targets that don't produce the executable but that you might want to run
p.targets.append(PBXLegacyTarget('configure'))
p.targets.append(PBXLegacyTarget('dist'))
p.targets.append(PBXLegacyTarget('install'))
p.targets.append(PBXLegacyTarget('check'))
node = self.srcnode.make_node('%s.xcodeproj' % appname)
node.mkdir()
node = node.make_node('project.pbxproj')
p.write(open(node.abspath(), 'w'))



+ 268
- 197
waflib/extras/xcode6.py View File

@@ -1,61 +1,20 @@
#! /usr/bin/env python #! /usr/bin/env python
# encoding: utf-8 # encoding: utf-8
# XCode 3/XCode 4 generator for Waf
# XCode 3/XCode 4/XCode 6/Xcode 7 generator for Waf
# Based on work by Nicolas Mercier 2011 # Based on work by Nicolas Mercier 2011
# Extended by Simon Warg 2015, https://github.com/mimon # Extended by Simon Warg 2015, https://github.com/mimon
# XCode project file format based on http://www.monobjc.net/xcode-project-file-format.html # XCode project file format based on http://www.monobjc.net/xcode-project-file-format.html


""" """
Usage:
See playground/xcode6/ for usage examples.


See also demos/xcode6/ folder

def options(opt):
opt.load('xcode6')

def configure(cnf):
# <do your stuff>

# For example
cnf.env.SDKROOT = 'macosx10.9'

# Use cnf.PROJ_CONFIGURATION to completely set/override
# global project settings
# cnf.env.PROJ_CONFIGURATION = {
# 'Debug': {
# 'SDKROOT': 'macosx10.9'
# }
# 'MyCustomReleaseConfig': {
# 'SDKROOT': 'macosx10.10'
# }
# }

# In the end of configure() do
cnf.load('xcode6')

def build(bld):

# Make a Framework target
bld.framework(
source_files={
'Include': bld.path.ant_glob('include/MyLib/*.h'),
'Source': bld.path.ant_glob('src/MyLib/*.cpp')
},
includes='include',
export_headers=bld.path.ant_glob('include/MyLib/*.h'),
target='MyLib',
)

# You can also make bld.dylib, bld.app, bld.stlib ...

$ waf configure xcode6
""" """


# TODO: support iOS projects
from waflib import Context, TaskGen, Build, Utils, Errors, Logs
import os, sys


from waflib import Context, TaskGen, Build, Utils, ConfigSet, Configure, Errors
from waflib.Build import BuildContext
import os, sys, random, time
# FIXME too few extensions
XCODE_EXTS = ['.c', '.cpp', '.m', '.mm']


HEADERS_GLOB = '**/(*.h|*.hpp|*.H|*.inl)' HEADERS_GLOB = '**/(*.h|*.hpp|*.H|*.inl)'


@@ -123,6 +82,13 @@ TARGET_TYPES = {
'exe' :TARGET_TYPE_EXECUTABLE, 'exe' :TARGET_TYPE_EXECUTABLE,
} }


def delete_invalid_values(dct):
""" Deletes entries that are dictionaries or sets """
for k, v in list(dct.items()):
if isinstance(v, dict) or isinstance(v, set):
del dct[k]
return dct

""" """
Configuration of the global project settings. Sets an environment variable 'PROJ_CONFIGURATION' Configuration of the global project settings. Sets an environment variable 'PROJ_CONFIGURATION'
which is a dictionary of configuration name and buildsettings pair. which is a dictionary of configuration name and buildsettings pair.
@@ -150,15 +116,16 @@ def configure(self):


# Create default project configuration? # Create default project configuration?
if 'PROJ_CONFIGURATION' not in self.env: if 'PROJ_CONFIGURATION' not in self.env:
defaults = delete_invalid_values(self.env.get_merged_dict())
self.env.PROJ_CONFIGURATION = { self.env.PROJ_CONFIGURATION = {
"Debug": self.env.get_merged_dict(),
"Release": self.env.get_merged_dict(),
"Debug": defaults,
"Release": defaults,
} }


# Some build settings are required to be present by XCode. We will supply default values # Some build settings are required to be present by XCode. We will supply default values
# if user hasn't defined any. # if user hasn't defined any.
defaults_required = [('PRODUCT_NAME', '$(TARGET_NAME)')] defaults_required = [('PRODUCT_NAME', '$(TARGET_NAME)')]
for cfgname,settings in self.env.PROJ_CONFIGURATION.iteritems():
for cfgname,settings in self.env.PROJ_CONFIGURATION.items():
for default_var, default_val in defaults_required: for default_var, default_val in defaults_required:
if default_var not in settings: if default_var not in settings:
settings[default_var] = default_val settings[default_var] = default_val
@@ -173,10 +140,17 @@ part3 = 0
id = 562000999 id = 562000999
def newid(): def newid():
global id global id
id = id + 1
id += 1
return "%04X%04X%04X%012d" % (0, 10000, 0, id) return "%04X%04X%04X%012d" % (0, 10000, 0, id)


class XCodeNode:
"""
Represents a tree node in the XCode project plist file format.
When written to a file, all attributes of XCodeNode are stringified together with
its value. However, attributes starting with an underscore _ are ignored
during that process and allows you to store arbitrary values that are not supposed
to be written out.
"""
class XCodeNode(object):
def __init__(self): def __init__(self):
self._id = newid() self._id = newid()
self._been_written = False self._been_written = False
@@ -247,12 +221,14 @@ class XCConfigurationList(XCodeNode):
# Group/Files # Group/Files
class PBXFileReference(XCodeNode): class PBXFileReference(XCodeNode):
def __init__(self, name, path, filetype = '', sourcetree = "SOURCE_ROOT"): def __init__(self, name, path, filetype = '', sourcetree = "SOURCE_ROOT"):

XCodeNode.__init__(self) XCodeNode.__init__(self)
self.fileEncoding = 4 self.fileEncoding = 4
if not filetype: if not filetype:
_, ext = os.path.splitext(name) _, ext = os.path.splitext(name)
filetype = MAP_EXT.get(ext, 'text') filetype = MAP_EXT.get(ext, 'text')
self.lastKnownFileType = filetype self.lastKnownFileType = filetype
self.explicitFileType = filetype
self.name = name self.name = name
self.path = path self.path = path
self.sourceTree = sourcetree self.sourceTree = sourcetree
@@ -267,11 +243,11 @@ class PBXBuildFile(XCodeNode):
""" This element indicate a file reference that is used in a PBXBuildPhase (either as an include or resource). """ """ This element indicate a file reference that is used in a PBXBuildPhase (either as an include or resource). """
def __init__(self, fileRef, settings={}): def __init__(self, fileRef, settings={}):
XCodeNode.__init__(self) XCodeNode.__init__(self)
# fileRef is a reference to a PBXFileReference object # fileRef is a reference to a PBXFileReference object
self.fileRef = fileRef self.fileRef = fileRef


# A map of key/value pairs for additionnal settings.
# A map of key/value pairs for additional settings.
self.settings = settings self.settings = settings


def __hash__(self): def __hash__(self):
@@ -281,16 +257,49 @@ class PBXBuildFile(XCodeNode):
return self.fileRef == other.fileRef return self.fileRef == other.fileRef


class PBXGroup(XCodeNode): class PBXGroup(XCodeNode):
def __init__(self, name, sourcetree = "<group>"):
def __init__(self, name, sourcetree = 'SOURCE_TREE'):
XCodeNode.__init__(self) XCodeNode.__init__(self)
self.children = [] self.children = []
self.name = name self.name = name
self.sourceTree = sourcetree self.sourceTree = sourcetree


# Maintain a lookup table for all PBXFileReferences
# that are contained in this group.
self._filerefs = {}

def add(self, sources): def add(self, sources):
""" sources param should be a list of PBXFileReference objects """
"""
Add a list of PBXFileReferences to this group

:param sources: list of PBXFileReferences objects
"""
self._filerefs.update(dict(zip(sources, sources)))
self.children.extend(sources) self.children.extend(sources)


def get_sub_groups(self):
"""
Returns all child PBXGroup objects contained in this group
"""
return list(filter(lambda x: isinstance(x, PBXGroup), self.children))

def find_fileref(self, fileref):
"""
Recursively search this group for an existing PBXFileReference. Returns None
if none were found.

The reason you'd want to reuse existing PBXFileReferences from a PBXGroup is that XCode doesn't like PBXFileReferences that aren't part of a PBXGroup hierarchy.
If it isn't, the consequence is that certain UI features like 'Reveal in Finder'
stops working.
"""
if fileref in self._filerefs:
return self._filerefs[fileref]
elif self.children:
for childgroup in self.get_sub_groups():
f = childgroup.find_fileref(fileref)
if f:
return f
return None

class PBXContainerItemProxy(XCodeNode): class PBXContainerItemProxy(XCodeNode):
""" This is the element for to decorate a target item. """ """ This is the element for to decorate a target item. """
def __init__(self, containerPortal, remoteGlobalIDString, remoteInfo='', proxyType=1): def __init__(self, containerPortal, remoteGlobalIDString, remoteInfo='', proxyType=1):
@@ -299,7 +308,6 @@ class PBXContainerItemProxy(XCodeNode):
self.remoteGlobalIDString = remoteGlobalIDString # PBXNativeTarget self.remoteGlobalIDString = remoteGlobalIDString # PBXNativeTarget
self.remoteInfo = remoteInfo # Target name self.remoteInfo = remoteInfo # Target name
self.proxyType = proxyType self.proxyType = proxyType


class PBXTargetDependency(XCodeNode): class PBXTargetDependency(XCodeNode):
""" This is the element for referencing other target through content proxies. """ """ This is the element for referencing other target through content proxies. """
@@ -307,7 +315,7 @@ class PBXTargetDependency(XCodeNode):
XCodeNode.__init__(self) XCodeNode.__init__(self)
self.target = native_target self.target = native_target
self.targetProxy = proxy self.targetProxy = proxy
class PBXFrameworksBuildPhase(XCodeNode): class PBXFrameworksBuildPhase(XCodeNode):
""" This is the element for the framework link build phase, i.e. linking to frameworks """ """ This is the element for the framework link build phase, i.e. linking to frameworks """
def __init__(self, pbxbuildfiles): def __init__(self, pbxbuildfiles):
@@ -409,7 +417,7 @@ class PBXProject(XCodeNode):
if not isinstance(env.PROJ_CONFIGURATION, dict): if not isinstance(env.PROJ_CONFIGURATION, dict):
raise Errors.WafError("Error: env.PROJ_CONFIGURATION must be a dictionary. This is done for you if you do not define one yourself. However, did you load the xcode module at the end of your wscript configure() ?") raise Errors.WafError("Error: env.PROJ_CONFIGURATION must be a dictionary. This is done for you if you do not define one yourself. However, did you load the xcode module at the end of your wscript configure() ?")


# Retreive project configuration
# Retrieve project configuration
configurations = [] configurations = []
for config_name, settings in env.PROJ_CONFIGURATION.items(): for config_name, settings in env.PROJ_CONFIGURATION.items():
cf = XCBuildConfiguration(config_name, settings) cf = XCBuildConfiguration(config_name, settings)
@@ -427,8 +435,8 @@ class PBXProject(XCodeNode):
def create_target_dependency(self, target, name): def create_target_dependency(self, target, name):
""" : param target : PXBNativeTarget """ """ : param target : PXBNativeTarget """
proxy = PBXContainerItemProxy(self, target, name) proxy = PBXContainerItemProxy(self, target, name)
dependecy = PBXTargetDependency(target, proxy)
return dependecy
dependency = PBXTargetDependency(target, proxy)
return dependency


def write(self, file): def write(self, file):


@@ -461,13 +469,145 @@ class PBXProject(XCodeNode):
return t return t
return None return None


@TaskGen.feature('c', 'cxx')
@TaskGen.after('propagate_uselib_vars', 'apply_incpaths')
def process_xcode(self):
bld = self.bld
try:
p = bld.project
except AttributeError:
return

if not hasattr(self, 'target_type'):
return

products_group = bld.products_group

target_group = PBXGroup(self.name)
p.mainGroup.children.append(target_group)

# Determine what type to build - framework, app bundle etc.
target_type = getattr(self, 'target_type', 'app')
if target_type not in TARGET_TYPES:
raise Errors.WafError("Target type '%s' does not exists. Available options are '%s'. In target '%s'" % (target_type, "', '".join(TARGET_TYPES.keys()), self.name))
else:
target_type = TARGET_TYPES[target_type]
file_ext = target_type[2]

# Create the output node
target_node = self.path.find_or_declare(self.name+file_ext)
target = PBXNativeTarget(self.name, target_node, target_type, [], [])

products_group.children.append(target.productReference)

# Pull source files from the 'source' attribute and assign them to a UI group.
# Use a default UI group named 'Source' unless the user
# provides a 'group_files' dictionary to customize the UI grouping.
sources = getattr(self, 'source', [])
if hasattr(self, 'group_files'):
group_files = getattr(self, 'group_files', [])
for grpname,files in group_files.items():
group = bld.create_group(grpname, files)
target_group.children.append(group)
else:
group = bld.create_group('Source', sources)
target_group.children.append(group)

# Create a PBXFileReference for each source file.
# If the source file already exists as a PBXFileReference in any of the UI groups, then
# reuse that PBXFileReference object (XCode does not like it if we don't reuse)
for idx, path in enumerate(sources):
fileref = PBXFileReference(path.name, path.abspath())
existing_fileref = target_group.find_fileref(fileref)
if existing_fileref:
sources[idx] = existing_fileref
else:
sources[idx] = fileref

# If the 'source' attribute contains any file extension that XCode can't work with,
# then remove it. The allowed file extensions are defined in XCODE_EXTS.
is_valid_file_extension = lambda file: os.path.splitext(file.path)[1] in XCODE_EXTS
sources = list(filter(is_valid_file_extension, sources))

buildfiles = [bld.unique_buildfile(PBXBuildFile(x)) for x in sources]
target.add_build_phase(PBXSourcesBuildPhase(buildfiles))

# Check if any framework to link against is some other target we've made
libs = getattr(self, 'tmp_use_seen', [])
for lib in libs:
use_target = p.get_target(lib)
if use_target:
# Create an XCode dependency so that XCode knows to build the other target before this target
dependency = p.create_target_dependency(use_target, use_target.name)
target.add_dependency(dependency)

buildphase = PBXFrameworksBuildPhase([PBXBuildFile(use_target.productReference)])
target.add_build_phase(buildphase)
if lib in self.env.LIB:
self.env.LIB = list(filter(lambda x: x != lib, self.env.LIB))

# If 'export_headers' is present, add files to the Headers build phase in xcode.
# These are files that'll get packed into the Framework for instance.
exp_hdrs = getattr(self, 'export_headers', [])
hdrs = bld.as_nodes(Utils.to_list(exp_hdrs))
files = [p.mainGroup.find_fileref(PBXFileReference(n.name, n.abspath())) for n in hdrs]
files = [PBXBuildFile(f, {'ATTRIBUTES': ('Public',)}) for f in files]
buildphase = PBXHeadersBuildPhase(files)
target.add_build_phase(buildphase)

# Merge frameworks and libs into one list, and prefix the frameworks
frameworks = Utils.to_list(self.env.FRAMEWORK)
frameworks = ' '.join(['-framework %s' % (f.split('.framework')[0]) for f in frameworks])

libs = Utils.to_list(self.env.STLIB) + Utils.to_list(self.env.LIB)
libs = ' '.join(bld.env['STLIB_ST'] % t for t in libs)

# Override target specific build settings
bldsettings = {
'HEADER_SEARCH_PATHS': ['$(inherited)'] + self.env['INCPATHS'],
'LIBRARY_SEARCH_PATHS': ['$(inherited)'] + Utils.to_list(self.env.LIBPATH) + Utils.to_list(self.env.STLIBPATH) + Utils.to_list(self.env.LIBDIR) ,
'FRAMEWORK_SEARCH_PATHS': ['$(inherited)'] + Utils.to_list(self.env.FRAMEWORKPATH),
'OTHER_LDFLAGS': libs + ' ' + frameworks,
'OTHER_LIBTOOLFLAGS': bld.env['LINKFLAGS'],
'OTHER_CPLUSPLUSFLAGS': Utils.to_list(self.env['CXXFLAGS']),
'OTHER_CFLAGS': Utils.to_list(self.env['CFLAGS']),
'INSTALL_PATH': []
}

# Install path
installpaths = Utils.to_list(getattr(self, 'install', []))
prodbuildfile = PBXBuildFile(target.productReference)
for instpath in installpaths:
bldsettings['INSTALL_PATH'].append(instpath)
target.add_build_phase(PBXCopyFilesBuildPhase([prodbuildfile], instpath))

if not bldsettings['INSTALL_PATH']:
del bldsettings['INSTALL_PATH']

# Create build settings which can override the project settings. Defaults to none if user
# did not pass argument. This will be filled up with target specific
# search paths, libs to link etc.
settings = getattr(self, 'settings', {})

# The keys represents different build configuration, e.g. Debug, Release and so on..
# Insert our generated build settings to all configuration names
keys = set(settings.keys() + bld.env.PROJ_CONFIGURATION.keys())
for k in keys:
if k in settings:
settings[k].update(bldsettings)
else:
settings[k] = bldsettings

for k,v in settings.items():
target.add_configuration(XCBuildConfiguration(k, v))

p.add_target(target)


class xcode(Build.BuildContext): class xcode(Build.BuildContext):
cmd = 'xcode6' cmd = 'xcode6'
fun = 'build' fun = 'build'


file_refs = dict()
build_files = dict()

def as_nodes(self, files): def as_nodes(self, files):
""" Returns a list of waflib.Nodes from a list of string of file paths """ """ Returns a list of waflib.Nodes from a list of string of file paths """
nodes = [] nodes = []
@@ -476,42 +616,42 @@ class xcode(Build.BuildContext):
d = x d = x
else: else:
d = self.srcnode.find_node(x) d = self.srcnode.find_node(x)
if not d:
raise Errors.WafError('File \'%s\' was not found' % x)
nodes.append(d) nodes.append(d)
return nodes return nodes


def create_group(self, name, files): def create_group(self, name, files):
""" """
Returns a new PBXGroup containing the files (paths) passed in the files arg
:type files: string
Returns a new PBXGroup containing the files (paths) passed in the files arg
:type files: string
""" """
group = PBXGroup(name) group = PBXGroup(name)
""" """
Do not use unique file reference here, since XCode seem to allow only one file reference Do not use unique file reference here, since XCode seem to allow only one file reference
to be referenced by a group. to be referenced by a group.
""" """
files = [(PBXFileReference(d.name, d.abspath())) for d in self.as_nodes(files)]
group.add(files)
files_ = []
for d in self.as_nodes(Utils.to_list(files)):
fileref = PBXFileReference(d.name, d.abspath())
files_.append(fileref)
group.add(files_)
return group return group


def unique_filereference(self, fileref):
"""
Returns a unique fileref, possibly an existing one if the paths are the same.
Use this after you've constructed a PBXFileReference to make sure there is
only one PBXFileReference for the same file in the same project.
"""
if fileref not in self.file_refs:
self.file_refs[fileref] = fileref
return self.file_refs[fileref]

def unique_buildfile(self, buildfile): def unique_buildfile(self, buildfile):
""" """
Returns a unique buildfile, possibly an existing one. Returns a unique buildfile, possibly an existing one.
Use this after you've constructed a PBXBuildFile to make sure there is Use this after you've constructed a PBXBuildFile to make sure there is
only one PBXBuildFile for the same file in the same project. only one PBXBuildFile for the same file in the same project.
""" """
if buildfile not in self.build_files:
self.build_files[buildfile] = buildfile
return self.build_files[buildfile]
try:
build_files = self.build_files
except AttributeError:
build_files = self.build_files = {}

if buildfile not in build_files:
build_files[buildfile] = buildfile
return build_files[buildfile]


def execute(self): def execute(self):
""" """
@@ -525,7 +665,7 @@ class xcode(Build.BuildContext):
appname = getattr(Context.g_module, Context.APPNAME, os.path.basename(self.srcnode.abspath())) appname = getattr(Context.g_module, Context.APPNAME, os.path.basename(self.srcnode.abspath()))


p = PBXProject(appname, ('Xcode 3.2', 46), self.env) p = PBXProject(appname, ('Xcode 3.2', 46), self.env)
# If we don't create a Products group, then # If we don't create a Products group, then
# XCode will create one, which entails that # XCode will create one, which entails that
# we'll start to see duplicate files in the UI # we'll start to see duplicate files in the UI
@@ -533,124 +673,55 @@ class xcode(Build.BuildContext):
products_group = PBXGroup('Products') products_group = PBXGroup('Products')
p.mainGroup.children.append(products_group) p.mainGroup.children.append(products_group)


for g in self.groups:
for tg in g:
if not isinstance(tg, TaskGen.task_gen):
continue

tg.post()

target_group = PBXGroup(tg.name)
p.mainGroup.children.append(target_group)

# Determine what type to build - framework, app bundle etc.
target_type = getattr(tg, 'target_type', 'app')
if target_type not in TARGET_TYPES:
raise Errors.WafError("Target type '%s' does not exists. Available options are '%s'. In target '%s'" % (target_type, "', '".join(TARGET_TYPES.keys()), tg.name))
else:
target_type = TARGET_TYPES[target_type]
file_ext = target_type[2]

# Create the output node
target_node = tg.path.find_or_declare(tg.name+file_ext)
target = PBXNativeTarget(tg.name, target_node, target_type, [], [])

products_group.children.append(target.productReference)

if hasattr(tg, 'source_files'):
# Create list of PBXFileReferences
sources = []
if isinstance(tg.source_files, dict):
for grpname,files in tg.source_files.items():
group = self.create_group(grpname, files)
target_group.children.append(group)
sources.extend(group.children)
elif isinstance(tg.source_files, list):
group = self.create_group("Source", tg.source_files)
target_group.children.append(group)
sources.extend(group.children)
else:
self.to_log("Argument 'source_files' passed to target '%s' was not a dictionary. Hence, some source files may not be included. Please provide a dictionary of source files, with group name as key and list of source files as value.\n" % tg.name)

supported_extensions = ['.c', '.cpp', '.m', '.mm']
sources = filter(lambda fileref: os.path.splitext(fileref.path)[1] in supported_extensions, sources)
buildfiles = [self.unique_buildfile(PBXBuildFile(fileref)) for fileref in sources]
target.add_build_phase(PBXSourcesBuildPhase(buildfiles))

# Create build settings which can override the project settings. Defaults to none if user
# did not pass argument. However, this will be filled up further below with target specfic
# search paths, libs to link etc.
settings = getattr(tg, 'settings', {})

# Check if any framework to link against is some other target we've made
libs = getattr(tg, 'tmp_use_seen', [])
for lib in libs:
use_target = p.get_target(lib)
if use_target:
# Create an XCode dependency so that XCode knows to build the other target before this target
target.add_dependency(p.create_target_dependency(use_target, use_target.name))
target.add_build_phase(PBXFrameworksBuildPhase([PBXBuildFile(use_target.productReference)]))
if lib in tg.env.LIB:
tg.env.LIB = list(filter(lambda x: x != lib, tg.env.LIB))

# If 'export_headers' is present, add files to the Headers build phase in xcode.
# These are files that'll get packed into the Framework for instance.
exp_hdrs = getattr(tg, 'export_headers', [])
hdrs = self.as_nodes(Utils.to_list(exp_hdrs))
files = [self.unique_filereference(PBXFileReference(n.name, n.abspath())) for n in hdrs]
target.add_build_phase(PBXHeadersBuildPhase([PBXBuildFile(f, {'ATTRIBUTES': ('Public',)}) for f in files]))

# Install path
installpaths = Utils.to_list(getattr(tg, 'install', []))
prodbuildfile = PBXBuildFile(target.productReference)
for instpath in installpaths:
target.add_build_phase(PBXCopyFilesBuildPhase([prodbuildfile], instpath))

# Merge frameworks and libs into one list, and prefix the frameworks
ld_flags = ['-framework %s' % lib.split('.framework')[0] for lib in Utils.to_list(tg.env.FRAMEWORK)]
ld_flags.extend(Utils.to_list(tg.env.STLIB) + Utils.to_list(tg.env.LIB))

# Override target specfic build settings
bldsettings = {
'HEADER_SEARCH_PATHS': ['$(inherited)'] + tg.env['INCPATHS'],
'LIBRARY_SEARCH_PATHS': ['$(inherited)'] + Utils.to_list(tg.env.LIBPATH) + Utils.to_list(tg.env.STLIBPATH),
'FRAMEWORK_SEARCH_PATHS': ['$(inherited)'] + Utils.to_list(tg.env.FRAMEWORKPATH),
'OTHER_LDFLAGS': r'\n'.join(ld_flags)
}

# The keys represents different build configuration, e.g. Debug, Release and so on..
# Insert our generated build settings to all configuration names
keys = set(settings.keys() + self.env.PROJ_CONFIGURATION.keys())
for k in keys:
if k in settings:
settings[k].update(bldsettings)
else:
settings[k] = bldsettings

for k,v in settings.items():
target.add_configuration(XCBuildConfiguration(k, v))

p.add_target(target)
self.project = p
self.products_group = products_group

# post all task generators
# the process_xcode method above will be called for each target
if self.targets and self.targets != '*':
(self._min_grp, self._exact_tg) = self.get_targets()

self.current_group = 0
while self.current_group < len(self.groups):
self.post_group()
self.current_group += 1

node = self.bldnode.make_node('%s.xcodeproj' % appname) node = self.bldnode.make_node('%s.xcodeproj' % appname)
node.mkdir() node.mkdir()
node = node.make_node('project.pbxproj') node = node.make_node('project.pbxproj')
p.write(open(node.abspath(), 'w'))
def build_target(self, tgtype, *k, **kw):
"""
Provide user-friendly methods to build different target types
E.g. bld.framework(source='..', ...) to build a Framework target.
E.g. bld.dylib(source='..', ...) to build a Dynamic library target. etc...
"""
self.load('ccroot')
kw['features'] = 'cxx cxxprogram'
with open(node.abspath(), 'w') as f:
p.write(f)
Logs.pprint('GREEN', 'Wrote %r' % node.abspath())

def bind_fun(tgtype):
def fun(self, *k, **kw):
tgtype = fun.__name__
if tgtype == 'shlib' or tgtype == 'dylib':
features = 'cxx cxxshlib'
tgtype = 'dylib'
elif tgtype == 'framework':
features = 'cxx cxxshlib'
tgtype = 'framework'
elif tgtype == 'program':
features = 'cxx cxxprogram'
tgtype = 'exe'
elif tgtype == 'app':
features = 'cxx cxxprogram'
tgtype = 'app'
elif tgtype == 'stlib':
features = 'cxx cxxstlib'
tgtype = 'stlib'
lst = kw['features'] = Utils.to_list(kw.get('features', []))
for x in features.split():
if not x in kw['features']:
lst.append(x)

kw['target_type'] = tgtype kw['target_type'] = tgtype
return self(*k, **kw) return self(*k, **kw)
fun.__name__ = tgtype
setattr(Build.BuildContext, tgtype, fun)
return fun

for xx in 'app framework dylib shlib stlib program'.split():
bind_fun(xx)


def app(self, *k, **kw): return self.build_target('app', *k, **kw)
def framework(self, *k, **kw): return self.build_target('framework', *k, **kw)
def dylib(self, *k, **kw): return self.build_target('dylib', *k, **kw)
def stlib(self, *k, **kw): return self.build_target('stlib', *k, **kw)
def exe(self, *k, **kw): return self.build_target('exe', *k, **kw)

+ 13
- 21
waflib/fixpy2.py View File

@@ -1,24 +1,21 @@
#!/usr/bin/env python #!/usr/bin/env python
# encoding: utf-8 # encoding: utf-8
# Thomas Nagy, 2010-2015 (ita)
# Thomas Nagy, 2010-2018 (ita)


"""
burn a book, save a tree
"""
from __future__ import with_statement


import os import os


all_modifs = {} all_modifs = {}


def fixdir(dir): def fixdir(dir):
"""call all the substitution functions on the waf folders"""
global all_modifs
"""Call all substitution functions on Waf folders"""
for k in all_modifs: for k in all_modifs:
for v in all_modifs[k]: for v in all_modifs[k]:
modif(os.path.join(dir, 'waflib'), k, v) modif(os.path.join(dir, 'waflib'), k, v)


def modif(dir, name, fun): def modif(dir, name, fun):
"""execute a substitution function"""
"""Call a substitution function"""
if name == '*': if name == '*':
lst = [] lst = []
for y in '. Tools extras'.split(): for y in '. Tools extras'.split():
@@ -30,24 +27,17 @@ def modif(dir, name, fun):
return return


filename = os.path.join(dir, name) filename = os.path.join(dir, name)
f = open(filename, 'r')
try:
with open(filename, 'r') as f:
txt = f.read() txt = f.read()
finally:
f.close()


txt = fun(txt) txt = fun(txt)


f = open(filename, 'w')
try:
with open(filename, 'w') as f:
f.write(txt) f.write(txt)
finally:
f.close()


def subst(*k): def subst(*k):
"""register a substitution function""" """register a substitution function"""
def do_subst(fun): def do_subst(fun):
global all_modifs
for x in k: for x in k:
try: try:
all_modifs[x].append(fun) all_modifs[x].append(fun)
@@ -60,13 +50,15 @@ def subst(*k):
def r1(code): def r1(code):
"utf-8 fixes for python < 2.6" "utf-8 fixes for python < 2.6"
code = code.replace('as e:', ',e:') code = code.replace('as e:', ',e:')
code = code.replace(".decode(sys.stdout.encoding or 'iso8859-1')", '')
code = code.replace('.encode()', '')
return code
code = code.replace(".decode(sys.stdout.encoding or'latin-1',errors='replace')", '')
return code.replace('.encode()', '')


@subst('Runner.py') @subst('Runner.py')
def r4(code): def r4(code):
"generator syntax" "generator syntax"
code = code.replace('next(self.biter)', 'self.biter.next()')
return code
return code.replace('next(self.biter)', 'self.biter.next()')

@subst('Context.py')
def r5(code):
return code.replace("('Execution failure: %s'%str(e),ex=e)", "('Execution failure: %s'%str(e),ex=e),None,sys.exc_info()[2]")



+ 64
- 0
waflib/processor.py View File

@@ -0,0 +1,64 @@
#! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2016-2018 (ita)

import os, sys, traceback, base64, signal
try:
import cPickle
except ImportError:
import pickle as cPickle

try:
import subprocess32 as subprocess
except ImportError:
import subprocess

try:
TimeoutExpired = subprocess.TimeoutExpired
except AttributeError:
class TimeoutExpired(Exception):
pass

def run():
txt = sys.stdin.readline().strip()
if not txt:
# parent process probably ended
sys.exit(1)
[cmd, kwargs, cargs] = cPickle.loads(base64.b64decode(txt))
cargs = cargs or {}

ret = 1
out, err, ex, trace = (None, None, None, None)
try:
proc = subprocess.Popen(cmd, **kwargs)
try:
out, err = proc.communicate(**cargs)
except TimeoutExpired:
if kwargs.get('start_new_session') and hasattr(os, 'killpg'):
os.killpg(proc.pid, signal.SIGKILL)
else:
proc.kill()
out, err = proc.communicate()
exc = TimeoutExpired(proc.args, timeout=cargs['timeout'], output=out)
exc.stderr = err
raise exc
ret = proc.returncode
except Exception as e:
exc_type, exc_value, tb = sys.exc_info()
exc_lines = traceback.format_exception(exc_type, exc_value, tb)
trace = str(cmd) + '\n' + ''.join(exc_lines)
ex = e.__class__.__name__

# it is just text so maybe we do not need to pickle()
tmp = [ret, out, err, ex, trace]
obj = base64.b64encode(cPickle.dumps(tmp))
sys.stdout.write(obj.decode())
sys.stdout.write('\n')
sys.stdout.flush()

while 1:
try:
run()
except KeyboardInterrupt:
break


+ 23
- 20
wscript View File

@@ -34,7 +34,9 @@ def check_for_celt(conf):
define = 'HAVE_CELT_API_0_' + version define = 'HAVE_CELT_API_0_' + version
if not found: if not found:
try: try:
conf.check_cfg(package='celt', atleast_version='0.' + version + '.0', args='--cflags --libs')
conf.check_cfg(
package='celt >= 0.%s.0' % version,
args='--cflags --libs')
found = True found = True
conf.define(define, 1) conf.define(define, 1)
continue continue
@@ -75,7 +77,6 @@ def options(opt):
opt.load('compiler_c') opt.load('compiler_c')
opt.load('autooptions'); opt.load('autooptions');


opt.load('xcode')
opt.load('xcode6') opt.load('xcode6')


# install directories # install directories
@@ -111,35 +112,30 @@ def options(opt):
help='Enable ALSA driver', help='Enable ALSA driver',
conf_dest='BUILD_DRIVER_ALSA') conf_dest='BUILD_DRIVER_ALSA')
alsa.check_cfg( alsa.check_cfg(
package='alsa',
atleast_version='1.0.18',
package='alsa >= 1.0.18',
args='--cflags --libs') args='--cflags --libs')
firewire = opt.add_auto_option( firewire = opt.add_auto_option(
'firewire', 'firewire',
help='Enable FireWire driver (FFADO)', help='Enable FireWire driver (FFADO)',
conf_dest='BUILD_DRIVER_FFADO') conf_dest='BUILD_DRIVER_FFADO')
firewire.check_cfg( firewire.check_cfg(
package='libffado',
atleast_version='1.999.17',
package='libffado >= 1.999.17',
args='--cflags --libs') args='--cflags --libs')
freebob = opt.add_auto_option( freebob = opt.add_auto_option(
'freebob', 'freebob',
help='Enable FreeBob driver') help='Enable FreeBob driver')
freebob.check_cfg( freebob.check_cfg(
package='libfreebob',
atleast_version='1.0.0',
package='libfreebob >= 1.0.0',
args='--cflags --libs') args='--cflags --libs')
iio = opt.add_auto_option( iio = opt.add_auto_option(
'iio', 'iio',
help='Enable IIO driver', help='Enable IIO driver',
conf_dest='BUILD_DRIVER_IIO') conf_dest='BUILD_DRIVER_IIO')
iio.check_cfg( iio.check_cfg(
package='gtkIOStream',
atleast_version='1.4.0',
package='gtkIOStream >= 1.4.0',
args='--cflags --libs') args='--cflags --libs')
iio.check_cfg( iio.check_cfg(
package='eigen3',
atleast_version='3.1.2',
package='eigen3 >= 3.1.2',
args='--cflags --libs') args='--cflags --libs')
portaudio = opt.add_auto_option( portaudio = opt.add_auto_option(
'portaudio', 'portaudio',
@@ -147,9 +143,8 @@ def options(opt):
conf_dest='BUILD_DRIVER_PORTAUDIO') conf_dest='BUILD_DRIVER_PORTAUDIO')
portaudio.check(header_name='windows.h') # only build portaudio on windows portaudio.check(header_name='windows.h') # only build portaudio on windows
portaudio.check_cfg( portaudio.check_cfg(
package='portaudio-2.0',
package='portaudio-2.0 >= 19',
uselib_store='PORTAUDIO', uselib_store='PORTAUDIO',
atleast_version='19',
args='--cflags --libs') args='--cflags --libs')
winmme = opt.add_auto_option( winmme = opt.add_auto_option(
'winmme', 'winmme',
@@ -166,8 +161,7 @@ def options(opt):
help='Build Opus netjack2') help='Build Opus netjack2')
opus.check(header_name='opus/opus_custom.h') opus.check(header_name='opus/opus_custom.h')
opus.check_cfg( opus.check_cfg(
package='opus',
atleast_version='0.9.0',
package='opus >= 0.9.0',
args='--cflags --libs') args='--cflags --libs')
samplerate = opt.add_auto_option( samplerate = opt.add_auto_option(
'samplerate', 'samplerate',
@@ -240,10 +234,19 @@ def configure(conf):


# Check for functions. # Check for functions.
conf.check( conf.check(
function_name='ppoll',
header_name=['poll.h', 'signal.h'],
defines=['_GNU_SOURCE'],
mandatory=False)
fragment=''
+ '#define _GNU_SOURCE\n'
+ '#include <poll.h>\n'
+ '#include <signal.h>\n'
+ '#include <stddef.h>\n'
+ 'int\n'
+ 'main(void)\n'
+ '{\n'
+ ' ppoll(NULL, 0, NULL, NULL);\n'
+ '}\n',
msg='Checking for ppoll',
define_name='HAVE_PPOLL',
mandatory=False)


# Check for backtrace support # Check for backtrace support
conf.check( conf.check(


Loading…
Cancel
Save