Browse Source

Stupid attempt at updating waf

tags/v1.9.13
falkTX FilipeCSnuk 6 years ago
parent
commit
cf3f8205c4
100 changed files with 15961 additions and 3921 deletions
  1. +11
    -14
      waf
  2. +635
    -492
      waflib/Build.py
  3. +52
    -34
      waflib/ConfigSet.py
  4. +78
    -105
      waflib/Configure.py
  5. +171
    -141
      waflib/Context.py
  6. +9
    -11
      waflib/Errors.py
  7. +70
    -35
      waflib/Logs.py
  8. +376
    -254
      waflib/Node.py
  9. +116
    -47
      waflib/Options.py
  10. +407
    -169
      waflib/Runner.py
  11. +204
    -216
      waflib/Scripting.py
  12. +657
    -478
      waflib/Task.py
  13. +205
    -147
      waflib/TaskGen.py
  14. +1
    -1
      waflib/Tools/__init__.py
  15. +2
    -2
      waflib/Tools/ar.py
  16. +73
    -0
      waflib/Tools/asm.py
  17. +49
    -0
      waflib/Tools/bison.py
  18. +7
    -7
      waflib/Tools/c.py
  19. +21
    -12
      waflib/Tools/c_aliases.py
  20. +401
    -287
      waflib/Tools/c_config.py
  21. +18
    -36
      waflib/Tools/c_osx.py
  22. +260
    -210
      waflib/Tools/c_preproc.py
  23. +13
    -12
      waflib/Tools/c_tests.py
  24. +97
    -43
      waflib/Tools/ccroot.py
  25. +1
    -1
      waflib/Tools/clang.py
  26. +2
    -2
      waflib/Tools/clangxx.py
  27. +15
    -9
      waflib/Tools/compiler_c.py
  28. +14
    -8
      waflib/Tools/compiler_cxx.py
  29. +85
    -0
      waflib/Tools/compiler_d.py
  30. +73
    -0
      waflib/Tools/compiler_fc.py
  31. +211
    -0
      waflib/Tools/cs.py
  32. +7
    -7
      waflib/Tools/cxx.py
  33. +97
    -0
      waflib/Tools/d.py
  34. +64
    -0
      waflib/Tools/d_config.py
  35. +211
    -0
      waflib/Tools/d_scan.py
  36. +70
    -0
      waflib/Tools/dbus.py
  37. +80
    -0
      waflib/Tools/dmd.py
  38. +51
    -37
      waflib/Tools/errcheck.py
  39. +189
    -0
      waflib/Tools/fc.py
  40. +488
    -0
      waflib/Tools/fc_config.py
  41. +114
    -0
      waflib/Tools/fc_scan.py
  42. +62
    -0
      waflib/Tools/flex.py
  43. +66
    -0
      waflib/Tools/g95.py
  44. +18
    -0
      waflib/Tools/gas.py
  45. +55
    -58
      waflib/Tools/gcc.py
  46. +55
    -0
      waflib/Tools/gdc.py
  47. +93
    -0
      waflib/Tools/gfortran.py
  48. +489
    -0
      waflib/Tools/glib2.py
  49. +131
    -0
      waflib/Tools/gnu_dirs.py
  50. +56
    -58
      waflib/Tools/gxx.py
  51. +3
    -6
      waflib/Tools/icc.py
  52. +3
    -6
      waflib/Tools/icpc.py
  53. +413
    -0
      waflib/Tools/ifort.py
  54. +231
    -0
      waflib/Tools/intltool.py
  55. +30
    -24
      waflib/Tools/irixcc.py
  56. +464
    -0
      waflib/Tools/javaw.py
  57. +56
    -0
      waflib/Tools/ldc2.py
  58. +38
    -0
      waflib/Tools/lua.py
  59. +39
    -0
      waflib/Tools/md5_tstamp.py
  60. +357
    -512
      waflib/Tools/msvc.py
  61. +26
    -0
      waflib/Tools/nasm.py
  62. +24
    -0
      waflib/Tools/nobuild.py
  63. +156
    -0
      waflib/Tools/perl.py
  64. +627
    -0
      waflib/Tools/python.py
  65. +796
    -0
      waflib/Tools/qt5.py
  66. +186
    -0
      waflib/Tools/ruby.py
  67. +27
    -29
      waflib/Tools/suncc.py
  68. +26
    -27
      waflib/Tools/suncxx.py
  69. +543
    -0
      waflib/Tools/tex.py
  70. +355
    -0
      waflib/Tools/vala.py
  71. +153
    -58
      waflib/Tools/waf_unit_test.py
  72. +78
    -0
      waflib/Tools/winres.py
  73. +26
    -28
      waflib/Tools/xlc.py
  74. +26
    -28
      waflib/Tools/xlcxx.py
  75. +448
    -204
      waflib/Utils.py
  76. +1
    -1
      waflib/__init__.py
  77. +3
    -3
      waflib/ansiterm.py
  78. +18
    -14
      waflib/extras/batched_cc.py
  79. +58
    -0
      waflib/extras/biber.py
  80. +128
    -0
      waflib/extras/bjam.py
  81. +108
    -0
      waflib/extras/blender.py
  82. +81
    -0
      waflib/extras/boo.py
  83. +525
    -0
      waflib/extras/boost.py
  84. +7
    -11
      waflib/extras/build_file_tracker.py
  85. +3
    -4
      waflib/extras/build_logs.py
  86. +82
    -0
      waflib/extras/buildcopy.py
  87. +72
    -0
      waflib/extras/c_dumbpreproc.py
  88. +87
    -0
      waflib/extras/c_emscripten.py
  89. +36
    -33
      waflib/extras/c_nec.py
  90. +152
    -0
      waflib/extras/cabal.py
  91. +110
    -0
      waflib/extras/cfg_altoptions.py
  92. +85
    -0
      waflib/extras/clang_compilation_database.py
  93. +875
    -0
      waflib/extras/codelite.py
  94. +39
    -0
      waflib/extras/color_gcc.py
  95. +51
    -0
      waflib/extras/color_rvct.py
  96. +406
    -0
      waflib/extras/compat15.py
  97. +591
    -0
      waflib/extras/cppcheck.py
  98. +209
    -0
      waflib/extras/cpplint.py
  99. +227
    -0
      waflib/extras/cross_gnu.py
  100. +146
    -0
      waflib/extras/cython.py

+ 11
- 14
waf View File

@@ -1,7 +1,7 @@
#!/usr/bin/env python
# encoding: ISO8859-1
# Thomas Nagy, 2005-2015
# encoding: latin-1
# Thomas Nagy, 2005-2018
#
"""
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
@@ -32,18 +32,16 @@ POSSIBILITY OF SUCH DAMAGE.

import os, sys, inspect

VERSION="1.8.17"
REVISION="x"
GIT="x"
INSTALL="x"
C1='x'
C2='x'
C3='x'
VERSION="2.0.11"
REVISION="4af5f6d4e441ea5db7e393d841ce352c"
GIT="fe03c6fa470f0bc3c086baafed8c6b4b6d28a245"
INSTALL=''
C1='#4'
C2='#/'
C3='#&'
cwd = os.getcwd()
join = os.path.join

if sys.hexversion<0x206000f:
raise ImportError('Python >= 2.6 is required to create the waf file')

WAF='waf'
def b(x):
@@ -162,7 +160,6 @@ wafdir = find_lib()
sys.path.insert(0, wafdir)

if __name__ == '__main__':
#import waflib.extras.compat15#PRELUDE
from waflib import Scripting
Scripting.waf_entry_point(cwd, VERSION, wafdir)


+ 635
- 492
waflib/Build.py
File diff suppressed because it is too large
View File


+ 52
- 34
waflib/ConfigSet.py View File

@@ -1,12 +1,12 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2010 (ita)
# Thomas Nagy, 2005-2018 (ita)

"""

ConfigSet: a special dict

The values put in :py:class:`ConfigSet` must be lists
The values put in :py:class:`ConfigSet` must be serializable (dicts, lists, strings)
"""

import copy, re, os
@@ -15,7 +15,7 @@ re_imp = re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$', re.M)

class ConfigSet(object):
"""
A dict that honor serialization and parent relationships. The serialization format
A copy-on-write dict with human-readable serialized format. The serialization format
is human-readable (python-like) and performed by using eval() and repr().
For high performance prefer pickle. Do not store functions as they are not serializable.

@@ -39,17 +39,20 @@ class ConfigSet(object):

def __contains__(self, key):
"""
Enable the *in* syntax::
Enables the *in* syntax::

if 'foo' in env:
print(env['foo'])
"""
if key in self.table: return True
try: return self.parent.__contains__(key)
except AttributeError: return False # parent may not exist
if key in self.table:
return True
try:
return self.parent.__contains__(key)
except AttributeError:
return False # parent may not exist

def keys(self):
"""Dict interface (unknown purpose)"""
"""Dict interface"""
keys = set()
cur = self
while cur:
@@ -59,6 +62,9 @@ class ConfigSet(object):
keys.sort()
return keys

def __iter__(self):
return iter(self.keys())

def __str__(self):
"""Text representation of the ConfigSet (for debugging purposes)"""
return "\n".join(["%r %r" % (x, self.__getitem__(x)) for x in self.keys()])
@@ -73,7 +79,7 @@ class ConfigSet(object):
"""
try:
while 1:
x = self.table.get(key, None)
x = self.table.get(key)
if not x is None:
return x
self = self.parent
@@ -82,13 +88,13 @@ class ConfigSet(object):

def __setitem__(self, key, value):
"""
Dictionary interface: get value from key
Dictionary interface: set value from key
"""
self.table[key] = value

def __delitem__(self, key):
"""
Dictionary interface: get value from key
Dictionary interface: mark the value as missing
"""
self[key] = []

@@ -101,7 +107,7 @@ class ConfigSet(object):
conf.env['value']
"""
if name in self.__slots__:
return object.__getattr__(self, name)
return object.__getattribute__(self, name)
else:
return self[name]

@@ -152,7 +158,7 @@ class ConfigSet(object):

def detach(self):
"""
Detach self from its parent (if existing)
Detaches this instance from its parent (if present)

Modifying the parent :py:class:`ConfigSet` will not change the current object
Modifying this :py:class:`ConfigSet` will not modify the parent one.
@@ -171,18 +177,19 @@ class ConfigSet(object):

def get_flat(self, key):
"""
Return a value as a string. If the input is a list, the value returned is space-separated.
Returns a value as a string. If the input is a list, the value returned is space-separated.

:param key: key to use
:type key: string
"""
s = self[key]
if isinstance(s, str): return s
if isinstance(s, str):
return s
return ' '.join(s)

def _get_list_value_for_modification(self, key):
"""
Return a list value for further modification.
Returns a list value for further modification.

The list may be modified inplace and there is no need to do this afterwards::

@@ -191,16 +198,20 @@ class ConfigSet(object):
try:
value = self.table[key]
except KeyError:
try: value = self.parent[key]
except AttributeError: value = []
if isinstance(value, list):
value = value[:]
try:
value = self.parent[key]
except AttributeError:
value = []
else:
value = [value]
if isinstance(value, list):
# force a copy
value = value[:]
else:
value = [value]
self.table[key] = value
else:
if not isinstance(value, list):
value = [value]
self.table[key] = value
self.table[key] = value = [value]
return value

def append_value(self, var, val):
@@ -232,7 +243,7 @@ class ConfigSet(object):

def append_unique(self, var, val):
"""
Append a value to the specified item only if it's not already present::
Appends a value to the specified item only if it's not already present::

def build(bld):
bld.env.append_unique('CFLAGS', ['-O2', '-g'])
@@ -249,7 +260,7 @@ class ConfigSet(object):

def get_merged_dict(self):
"""
Compute the merged dictionary from the fusion of self and all its parent
Computes the merged dictionary from the fusion of self and all its parent

:rtype: a ConfigSet object
"""
@@ -257,8 +268,10 @@ class ConfigSet(object):
env = self
while 1:
table_list.insert(0, env.table)
try: env = env.parent
except AttributeError: break
try:
env = env.parent
except AttributeError:
break
merged_table = {}
for table in table_list:
merged_table.update(table)
@@ -266,7 +279,7 @@ class ConfigSet(object):

def store(self, filename):
"""
Write the :py:class:`ConfigSet` data into a file. See :py:meth:`ConfigSet.load` for reading such files.
Serializes the :py:class:`ConfigSet` data to a file. See :py:meth:`ConfigSet.load` for reading such files.

:param filename: file to use
:type filename: string
@@ -293,7 +306,7 @@ class ConfigSet(object):

def load(self, filename):
"""
Retrieve the :py:class:`ConfigSet` data from a file. See :py:meth:`ConfigSet.store` for writing such files
Restores contents from a file (current values are not cleared). Files are written using :py:meth:`ConfigSet.store`.

:param filename: file to use
:type filename: string
@@ -303,21 +316,20 @@ class ConfigSet(object):
for m in re_imp.finditer(code):
g = m.group
tbl[g(2)] = eval(g(3))
Logs.debug('env: %s' % str(self.table))
Logs.debug('env: %s', self.table)

def update(self, d):
"""
Dictionary interface: replace values from another dict
Dictionary interface: replace values with the ones from another dict

:param d: object to use the value from
:type d: dict-like object
"""
for k, v in d.items():
self[k] = v
self.table.update(d)

def stash(self):
"""
Store the object state, to provide a kind of transaction support::
Stores the object state to provide transactionality semantics::

env = ConfigSet()
env.stash()
@@ -335,6 +347,12 @@ class ConfigSet(object):
tbl[x] = copy.deepcopy(tbl[x])
self.undo_stack = self.undo_stack + [orig]

def commit(self):
"""
Commits transactional changes. See :py:meth:`ConfigSet.stash`
"""
self.undo_stack.pop(-1)

def revert(self):
"""
Reverts the object to a previous state. See :py:meth:`ConfigSet.stash`


+ 78
- 105
waflib/Configure.py View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2010 (ita)
# Thomas Nagy, 2005-2018 (ita)

"""
Configuration system
@@ -12,15 +12,9 @@ A :py:class:`waflib.Configure.ConfigurationContext` instance is created when ``w
* hold configuration routines such as ``find_program``, etc
"""

import os, shlex, sys, time, re, shutil
import os, re, shlex, shutil, sys, time, traceback
from waflib import ConfigSet, Utils, Options, Logs, Context, Build, Errors

BREAK = 'break'
"""In case of a configuration error, break"""

CONTINUE = 'continue'
"""In case of a configuration error, continue"""

WAF_CONFIG_LOG = 'config.log'
"""Name of the configuration log file"""

@@ -157,7 +151,7 @@ class ConfigurationContext(Context.Context):
self.msg('Setting out to', self.bldnode.abspath())

if id(self.srcnode) == id(self.bldnode):
Logs.warn('Setting top == out (remember to use "update_outputs")')
Logs.warn('Setting top == out')
elif id(self.path) != id(self.srcnode):
if self.srcnode.is_child_of(self.path):
Logs.warn('Are you certain that you do not want to set top="." ?')
@@ -173,8 +167,9 @@ class ConfigurationContext(Context.Context):
# consider the current path as the root directory (see prepare_impl).
# to remove: use 'waf distclean'
env = ConfigSet.ConfigSet()
env['argv'] = sys.argv
env['options'] = Options.options.__dict__
env.argv = sys.argv
env.options = Options.options.__dict__
env.config_cmd = self.cmd

env.run_dir = Context.run_dir
env.top_dir = Context.top_dir
@@ -182,15 +177,15 @@ class ConfigurationContext(Context.Context):

# conf.hash & conf.files hold wscript files paths and hash
# (used only by Configure.autoconfig)
env['hash'] = self.hash
env['files'] = self.files
env['environ'] = dict(self.environ)
env.hash = self.hash
env.files = self.files
env.environ = dict(self.environ)

if not self.env.NO_LOCK_IN_RUN and not getattr(Options.options, 'no_lock_in_run'):
if not (self.env.NO_LOCK_IN_RUN or env.environ.get('NO_LOCK_IN_RUN') or getattr(Options.options, 'no_lock_in_run')):
env.store(os.path.join(Context.run_dir, Options.lockfile))
if not self.env.NO_LOCK_IN_TOP and not getattr(Options.options, 'no_lock_in_top'):
if not (self.env.NO_LOCK_IN_TOP or env.environ.get('NO_LOCK_IN_TOP') or getattr(Options.options, 'no_lock_in_top')):
env.store(os.path.join(Context.top_dir, Options.lockfile))
if not self.env.NO_LOCK_IN_OUT and not getattr(Options.options, 'no_lock_in_out'):
if not (self.env.NO_LOCK_IN_OUT or env.environ.get('NO_LOCK_IN_OUT') or getattr(Options.options, 'no_lock_in_out')):
env.store(os.path.join(Context.out_dir, Options.lockfile))

def prepare_env(self, env):
@@ -202,17 +197,17 @@ class ConfigurationContext(Context.Context):
"""
if not env.PREFIX:
if Options.options.prefix or Utils.is_win32:
env.PREFIX = Utils.sane_path(Options.options.prefix)
env.PREFIX = Options.options.prefix
else:
env.PREFIX = ''
env.PREFIX = '/'
if not env.BINDIR:
if Options.options.bindir:
env.BINDIR = Utils.sane_path(Options.options.bindir)
env.BINDIR = Options.options.bindir
else:
env.BINDIR = Utils.subst_vars('${PREFIX}/bin', env)
if not env.LIBDIR:
if Options.options.libdir:
env.LIBDIR = Utils.sane_path(Options.options.libdir)
env.LIBDIR = Options.options.libdir
else:
env.LIBDIR = Utils.subst_vars('${PREFIX}/lib%s' % Utils.lib64(), env)

@@ -228,38 +223,42 @@ class ConfigurationContext(Context.Context):
tmpenv = self.all_envs[key]
tmpenv.store(os.path.join(self.cachedir.abspath(), key + Build.CACHE_SUFFIX))

def load(self, input, tooldir=None, funs=None, with_sys_path=True):
def load(self, tool_list, tooldir=None, funs=None, with_sys_path=True, cache=False):
"""
Load Waf tools, which will be imported whenever a build is started.

:param input: waf tools to import
:type input: list of string
:param tool_list: waf tools to import
:type tool_list: list of string
:param tooldir: paths for the imports
:type tooldir: list of string
:param funs: functions to execute from the waf tools
:type funs: list of string
:param cache: whether to prevent the tool from running twice
:type cache: bool
"""

tools = Utils.to_list(input)
if tooldir: tooldir = Utils.to_list(tooldir)
tools = Utils.to_list(tool_list)
if tooldir:
tooldir = Utils.to_list(tooldir)
for tool in tools:
# avoid loading the same tool more than once with the same functions
# used by composite projects

mag = (tool, id(self.env), tooldir, funs)
if mag in self.tool_cache:
self.to_log('(tool %s is already loaded, skipping)' % tool)
continue
self.tool_cache.append(mag)
if cache:
mag = (tool, id(self.env), tooldir, funs)
if mag in self.tool_cache:
self.to_log('(tool %s is already loaded, skipping)' % tool)
continue
self.tool_cache.append(mag)

module = None
try:
module = Context.load_tool(tool, tooldir, ctx=self, with_sys_path=with_sys_path)
except ImportError as e:
self.fatal('Could not load the Waf tool %r from %r\n%s' % (tool, sys.path, e))
self.fatal('Could not load the Waf tool %r from %r\n%s' % (tool, getattr(e, 'waf_sys_path', sys.path), e))
except Exception as e:
self.to_log('imp %r (%r & %r)' % (tool, tooldir, funs))
self.to_log(Utils.ex_stack())
self.to_log(traceback.format_exc())
raise

if funs is not None:
@@ -267,8 +266,10 @@ class ConfigurationContext(Context.Context):
else:
func = getattr(module, 'configure', None)
if func:
if type(func) is type(Utils.readf): func(self)
else: self.eval_rules(func)
if type(func) is type(Utils.readf):
func(self)
else:
self.eval_rules(func)

self.tools.append({'tool':tool, 'tooldir':tooldir, 'funs':funs})

@@ -285,8 +286,7 @@ class ConfigurationContext(Context.Context):

def eval_rules(self, rules):
"""
Execute the configuration tests. The method :py:meth:`waflib.Configure.ConfigurationContext.err_handler`
is used to process the eventual exceptions
Execute configuration tests provided as list of functions to run

:param rules: list of configuration method names
:type rules: list of string
@@ -294,28 +294,9 @@ class ConfigurationContext(Context.Context):
self.rules = Utils.to_list(rules)
for x in self.rules:
f = getattr(self, x)
if not f: self.fatal("No such method '%s'." % x)
try:
f()
except Exception as e:
ret = self.err_handler(x, e)
if ret == BREAK:
break
elif ret == CONTINUE:
continue
else:
raise

def err_handler(self, fun, error):
"""
Error handler for the configuration tests, the default is to let the exception raise

:param fun: configuration test
:type fun: method
:param error: exception
:type error: exception
"""
pass
if not f:
self.fatal('No such configuration function %r' % x)
f()

def conf(f):
"""
@@ -330,11 +311,7 @@ def conf(f):
:type f: function
"""
def fun(*k, **kw):
mandatory = True
if 'mandatory' in kw:
mandatory = kw['mandatory']
del kw['mandatory']

mandatory = kw.pop('mandatory', True)
try:
return f(*k, **kw)
except Errors.ConfigurationError:
@@ -347,7 +324,7 @@ def conf(f):
return f

@conf
def add_os_flags(self, var, dest=None, dup=True):
def add_os_flags(self, var, dest=None, dup=False):
"""
Import operating system environment values into ``conf.env`` dict::

@@ -365,7 +342,6 @@ def add_os_flags(self, var, dest=None, dup=True):
flags = shlex.split(self.environ[var])
except KeyError:
return
# TODO: in waf 1.9, make dup=False the default
if dup or ''.join(flags) not in ''.join(Utils.to_list(self.env[dest or var])):
self.env.append_value(dest or var, flags)

@@ -377,21 +353,26 @@ def cmd_to_list(self, cmd):
:param cmd: command
:type cmd: a string or a list of string
"""
if isinstance(cmd, str) and cmd.find(' '):
try:
os.stat(cmd)
except OSError:
if isinstance(cmd, str):
if os.path.isfile(cmd):
# do not take any risk
return [cmd]
if os.sep == '/':
return shlex.split(cmd)
else:
return [cmd]
try:
return shlex.split(cmd, posix=False)
except TypeError:
# Python 2.5 on windows?
return shlex.split(cmd)
return cmd

@conf
def check_waf_version(self, mini='1.7.99', maxi='1.9.0', **kw):
def check_waf_version(self, mini='1.9.99', maxi='2.1.0', **kw):
"""
Raise a Configuration error if the Waf version does not strictly match the given bounds::

conf.check_waf_version(mini='1.8.0', maxi='1.9.0')
conf.check_waf_version(mini='1.9.99', maxi='2.1.0')

:type mini: number, tuple or string
:param mini: Minimum required version
@@ -413,7 +394,7 @@ def find_file(self, filename, path_list=[]):

:param filename: name of the file to search for
:param path_list: list of directories to search
:return: the first occurrence filename or '' if filename could not be found
:return: the first matching filename; else a configuration exception is raised
"""
for n in Utils.to_list(filename):
for d in Utils.to_list(path_list):
@@ -433,14 +414,17 @@ def find_program(self, filename, **kw):

:param path_list: paths to use for searching
:type param_list: list of string
:param var: store the result to conf.env[var], by default use filename.upper()
:param var: store the result to conf.env[var] where var defaults to filename.upper() if not provided; the result is stored as a list of strings
:type var: string
:param ext: list of extensions for the binary (do not add an extension for portability)
:type ext: list of string
:param value: obtain the program from the value passed exclusively
:type value: list or string (list is preferred)
:param exts: list of extensions for the binary (do not add an extension for portability)
:type exts: list of string
:param msg: name to display in the log, by default filename is used
:type msg: string
:param interpreter: interpreter for the program
:type interpreter: ConfigSet variable key
:raises: :py:class:`waflib.Errors.ConfigurationError`
"""

exts = kw.get('exts', Utils.is_win32 and '.exe,.com,.bat,.cmd' or ',.sh,.pl,.py')
@@ -462,18 +446,15 @@ def find_program(self, filename, **kw):
else:
path_list = environ.get('PATH', '').split(os.pathsep)

if var in environ:
filename = environ[var]
if os.path.isfile(filename):
# typical CC=/usr/bin/gcc waf configure build
ret = [filename]
else:
# case CC='ccache gcc' waf configure build
ret = self.cmd_to_list(filename)
if kw.get('value'):
# user-provided in command-line options and passed to find_program
ret = self.cmd_to_list(kw['value'])
elif environ.get(var):
# user-provided in the os environment
ret = self.cmd_to_list(environ[var])
elif self.env[var]:
# set by the user in the wscript file
ret = self.env[var]
ret = self.cmd_to_list(ret)
# a default option in the wscript file
ret = self.cmd_to_list(self.env[var])
else:
if not ret:
ret = self.find_binary(filename, exts.split(','), path_list)
@@ -483,7 +464,6 @@ def find_program(self, filename, **kw):
ret = Utils.get_registry_app_path(Utils.winreg.HKEY_LOCAL_MACHINE, filename)
ret = self.cmd_to_list(ret)


if ret:
if len(ret) == 1:
retmsg = ret[0]
@@ -492,14 +472,14 @@ def find_program(self, filename, **kw):
else:
retmsg = False

self.msg("Checking for program '%s'" % msg, retmsg, **kw)
if not kw.get('quiet', None):
self.msg('Checking for program %r' % msg, retmsg, **kw)
if not kw.get('quiet'):
self.to_log('find program=%r paths=%r var=%r -> %r' % (filename, path_list, var, ret))

if not ret:
self.fatal(kw.get('errmsg', '') or 'Could not find the program %r' % filename)

interpreter = kw.get('interpreter', None)
interpreter = kw.get('interpreter')
if interpreter is None:
if not Utils.check_exe(ret[0], env=environ):
self.fatal('Program %r is not executable' % ret)
@@ -554,7 +534,6 @@ def run_build(self, *k, **kw):
$ waf configure --confcache

"""

lst = [str(v) for (p, v) in kw.items() if p != 'env']
h = Utils.h_list(lst)
dir = self.bldnode.abspath() + os.sep + (not Utils.is_win32 and '.' or '') + 'conf_check_' + Utils.to_hex(h)
@@ -573,9 +552,7 @@ def run_build(self, *k, **kw):
if cachemode == 1:
try:
proj = ConfigSet.ConfigSet(os.path.join(dir, 'cache_run_build'))
except OSError:
pass
except IOError:
except EnvironmentError:
pass
else:
ret = proj['cache_run_build']
@@ -588,7 +565,8 @@ def run_build(self, *k, **kw):
if not os.path.exists(bdir):
os.makedirs(bdir)

self.test_bld = bld = Build.BuildContext(top_dir=dir, out_dir=bdir)
cls_name = kw.get('run_build_cls') or getattr(self, 'run_build_cls', 'build')
self.test_bld = bld = Context.create_context(cls_name, top_dir=dir, out_dir=bdir)
bld.init_dirs()
bld.progress_bar = 0
bld.targets = '*'
@@ -597,17 +575,15 @@ def run_build(self, *k, **kw):
bld.all_envs.update(self.all_envs) # not really necessary
bld.env = kw['env']

# OMG huge hack
bld.kw = kw
bld.conf = self
kw['build_fun'](bld)

ret = -1
try:
try:
bld.compile()
except Errors.WafError:
ret = 'Test does not build: %s' % Utils.ex_stack()
ret = 'Test does not build: %s' % traceback.format_exc()
self.fatal(ret)
else:
ret = getattr(bld, 'retval', 0)
@@ -619,7 +595,6 @@ def run_build(self, *k, **kw):
proj.store(os.path.join(dir, 'cache_run_build'))
else:
shutil.rmtree(dir)

return ret

@conf
@@ -635,7 +610,7 @@ def test(self, *k, **kw):
kw['env'] = self.env.derive()

# validate_c for example
if kw.get('validate', None):
if kw.get('validate'):
kw['validate'](kw)

self.start_msg(kw['msg'], **kw)
@@ -651,7 +626,7 @@ def test(self, *k, **kw):
else:
kw['success'] = ret

if kw.get('post_check', None):
if kw.get('post_check'):
ret = kw['post_check'](kw)

if ret:
@@ -661,5 +636,3 @@ def test(self, *k, **kw):
self.end_msg(self.ret_msg(kw['okmsg'], kw), **kw)
return ret




+ 171
- 141
waflib/Context.py View File

@@ -1,9 +1,9 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2010 (ita)
# Thomas Nagy, 2010-2018 (ita)

"""
Classes and functions required for waf commands
Classes and functions enabling the command system
"""

import os, re, imp, sys
@@ -11,16 +11,16 @@ from waflib import Utils, Errors, Logs
import waflib.Node

# the following 3 constants are updated on each new release (do not touch)
HEXVERSION=0x1081100
HEXVERSION=0x2000b00
"""Constant updated on new releases"""

WAFVERSION="1.8.17"
WAFVERSION="2.0.11"
"""Constant updated on new releases"""

WAFREVISION="cd7579a727d1b390bf9cbf111c1b20e811370bc0"
WAFREVISION="a97f6fb0941091b4966b625f15ec32fa783a8bec"
"""Git revision when the waf version is updated"""

ABI = 98
ABI = 20
"""Version of the build data cache file format (used in :py:const:`waflib.Context.DBFILE`)"""

DBFILE = '.wafpickle-%s-%d-%d' % (sys.platform, sys.hexversion, ABI)
@@ -41,7 +41,6 @@ OUT = 'out'
WSCRIPT_FILE = 'wscript'
"""Name of the waf script files"""


launch_dir = ''
"""Directory from which waf has been called"""
run_dir = ''
@@ -53,23 +52,12 @@ out_dir = ''
waf_dir = ''
"""Directory containing the waf modules"""

local_repo = ''
"""Local repository containing additional Waf tools (plugins)"""
remote_repo = 'https://raw.githubusercontent.com/waf-project/waf/master/'
"""
Remote directory containing downloadable waf tools. The missing tools can be downloaded by using::

$ waf configure --download
"""

remote_locs = ['waflib/extras', 'waflib/Tools']
"""
Remote directories for use with :py:const:`waflib.Context.remote_repo`
"""
default_encoding = Utils.console_encoding()
"""Encoding to use when reading outputs from other processes"""

g_module = None
"""
Module representing the main wscript file (see :py:const:`waflib.Context.run_dir`)
Module representing the top-level wscript file (see :py:const:`waflib.Context.run_dir`)
"""

STDOUT = 1
@@ -82,20 +70,20 @@ List of :py:class:`waflib.Context.Context` subclasses that can be used as waf co
are added automatically by a metaclass.
"""


def create_context(cmd_name, *k, **kw):
"""
Create a new :py:class:`waflib.Context.Context` instance corresponding to the given command.
Returns a new :py:class:`waflib.Context.Context` instance corresponding to the given command.
Used in particular by :py:func:`waflib.Scripting.run_command`

:param cmd_name: command
:param cmd_name: command name
:type cmd_name: string
:param k: arguments to give to the context class initializer
:type k: list
:param k: keyword arguments to give to the context class initializer
:type k: dict
:return: Context object
:rtype: :py:class:`waflib.Context.Context`
"""
global classes
for x in classes:
if x.cmd == cmd_name:
return x(*k, **kw)
@@ -105,14 +93,15 @@ def create_context(cmd_name, *k, **kw):

class store_context(type):
"""
Metaclass for storing the command classes into the list :py:const:`waflib.Context.classes`
Context classes must provide an attribute 'cmd' representing the command to execute
Metaclass that registers command classes into the list :py:const:`waflib.Context.classes`
Context classes must provide an attribute 'cmd' representing the command name, and a function
attribute 'fun' representing the function name that the command uses.
"""
def __init__(cls, name, bases, dict):
super(store_context, cls).__init__(name, bases, dict)
def __init__(cls, name, bases, dct):
super(store_context, cls).__init__(name, bases, dct)
name = cls.__name__

if name == 'ctx' or name == 'Context':
if name in ('ctx', 'Context'):
return

try:
@@ -123,11 +112,10 @@ class store_context(type):
if not getattr(cls, 'fun', None):
cls.fun = cls.cmd

global classes
classes.insert(0, cls)

ctx = store_context('ctx', (object,), {})
"""Base class for the :py:class:`waflib.Context.Context` classes"""
"""Base class for all :py:class:`waflib.Context.Context` classes"""

class Context(ctx):
"""
@@ -138,7 +126,7 @@ class Context(ctx):
def foo(ctx):
print(ctx.__class__.__name__) # waflib.Context.Context

Subclasses must define the attribute 'cmd':
Subclasses must define the class attributes 'cmd' and 'fun':

:param cmd: command to execute as in ``waf cmd``
:type cmd: string
@@ -156,19 +144,18 @@ class Context(ctx):

tools = {}
"""
A cache for modules (wscript files) read by :py:meth:`Context.Context.load`
A module cache for wscript files; see :py:meth:`Context.Context.load`
"""

def __init__(self, **kw):
try:
rd = kw['run_dir']
except KeyError:
global run_dir
rd = run_dir

# binds the context to the nodes in use to avoid a context singleton
self.node_class = type("Nod3", (waflib.Node.Node,), {})
self.node_class.__module__ = "waflib.Node"
self.node_class = type('Nod3', (waflib.Node.Node,), {})
self.node_class.__module__ = 'waflib.Node'
self.node_class.ctx = self

self.root = self.node_class('', None)
@@ -179,18 +166,9 @@ class Context(ctx):
self.exec_dict = {'ctx':self, 'conf':self, 'bld':self, 'opt':self}
self.logger = None

def __hash__(self):
"""
Return a hash value for storing context objects in dicts or sets. The value is not persistent.

:return: hash value
:rtype: int
"""
return id(self)

def finalize(self):
"""
Use to free resources such as open files potentially held by the logger
Called to free resources such as logger files
"""
try:
logger = self.logger
@@ -202,11 +180,11 @@ class Context(ctx):

def load(self, tool_list, *k, **kw):
"""
Load a Waf tool as a module, and try calling the function named :py:const:`waflib.Context.Context.fun` from it.
A ``tooldir`` value may be provided as a list of module paths.
Loads a Waf tool as a module, and try calling the function named :py:const:`waflib.Context.Context.fun`
from it. A ``tooldir`` argument may be provided as a list of module paths.

:param tool_list: list of Waf tool names to load
:type tool_list: list of string or space-separated string
:param tool_list: list of Waf tools to use
"""
tools = Utils.to_list(tool_list)
path = Utils.to_list(kw.get('tooldir', ''))
@@ -220,15 +198,16 @@ class Context(ctx):

def execute(self):
"""
Execute the command. Redefine this method in subclasses.
Here, it calls the function name in the top-level wscript file. Most subclasses
redefine this method to provide additional functionality.
"""
global g_module
self.recurse([os.path.dirname(g_module.root_path)])

def pre_recurse(self, node):
"""
Method executed immediately before a folder is read by :py:meth:`waflib.Context.Context.recurse`. The node given is set
as an attribute ``self.cur_script``, and as the current path ``self.path``
Method executed immediately before a folder is read by :py:meth:`waflib.Context.Context.recurse`.
The current script is bound as a Node object on ``self.cur_script``, and the current path
is bound to ``self.path``

:param node: script
:type node: :py:class:`waflib.Node.Node`
@@ -240,7 +219,7 @@ class Context(ctx):

def post_recurse(self, node):
"""
Restore ``self.cur_script`` and ``self.path`` right after :py:meth:`waflib.Context.Context.recurse` terminates.
Restores ``self.cur_script`` and ``self.path`` right after :py:meth:`waflib.Context.Context.recurse` terminates.

:param node: script
:type node: :py:class:`waflib.Node.Node`
@@ -251,10 +230,13 @@ class Context(ctx):

def recurse(self, dirs, name=None, mandatory=True, once=True, encoding=None):
"""
Run user code from the supplied list of directories.
Runs user-provided functions from the supplied list of directories.
The directories can be either absolute, or relative to the directory
of the wscript file. The methods :py:meth:`waflib.Context.Context.pre_recurse` and :py:meth:`waflib.Context.Context.post_recurse`
are called immediately before and after a script has been executed.
of the wscript file

The methods :py:meth:`waflib.Context.Context.pre_recurse` and
:py:meth:`waflib.Context.Context.post_recurse` are called immediately before
and after a script has been executed.

:param dirs: List of directories to visit
:type dirs: list of string or space-separated string
@@ -300,7 +282,7 @@ class Context(ctx):
if not user_function:
if not mandatory:
continue
raise Errors.WafError('No function %s defined in %s' % (name or self.fun, node.abspath()))
raise Errors.WafError('No function %r defined in %s' % (name or self.fun, node.abspath()))
user_function(self)
finally:
self.post_recurse(node)
@@ -313,25 +295,39 @@ class Context(ctx):
raise Errors.WafError('Cannot read the folder %r' % d)
raise Errors.WafError('No wscript file in directory %s' % d)

def log_command(self, cmd, kw):
if Logs.verbose:
fmt = os.environ.get('WAF_CMD_FORMAT')
if fmt == 'string':
if not isinstance(cmd, str):
cmd = Utils.shell_escape(cmd)
Logs.debug('runner: %r', cmd)
Logs.debug('runner_env: kw=%s', kw)

def exec_command(self, cmd, **kw):
"""
Execute a command and return the exit status. If the context has the attribute 'log',
capture and log the process stderr/stdout for logging purposes::
Runs an external process and returns the exit status::

def run(tsk):
ret = tsk.generator.bld.exec_command('touch foo.txt')
return ret

This method captures the standard/error outputs (Issue 1101), but it does not return the values
unlike :py:meth:`waflib.Context.Context.cmd_and_log`
If the context has the attribute 'log', then captures and logs the process stderr/stdout.
Unlike :py:meth:`waflib.Context.Context.cmd_and_log`, this method does not return the
stdout/stderr values captured.

:param cmd: command argument for subprocess.Popen
:type cmd: string or list
:param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate.
:type kw: dict
:returns: process exit status
:rtype: integer
:raises: :py:class:`waflib.Errors.WafError` if an invalid executable is specified for a non-shell process
:raises: :py:class:`waflib.Errors.WafError` in case of execution failure
"""
subprocess = Utils.subprocess
kw['shell'] = isinstance(cmd, str)
Logs.debug('runner: %r' % (cmd,))
Logs.debug('runner_env: kw=%s' % kw)
self.log_command(cmd, kw)

if self.logger:
self.logger.info(cmd)
@@ -342,40 +338,42 @@ class Context(ctx):
kw['stderr'] = subprocess.PIPE

if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
raise Errors.WafError("Program %s not found!" % cmd[0])
raise Errors.WafError('Program %s not found!' % cmd[0])

wargs = {}
cargs = {}
if 'timeout' in kw:
if kw['timeout'] is not None:
wargs['timeout'] = kw['timeout']
if sys.hexversion >= 0x3030000:
cargs['timeout'] = kw['timeout']
if not 'start_new_session' in kw:
kw['start_new_session'] = True
del kw['timeout']
if 'input' in kw:
if kw['input']:
wargs['input'] = kw['input']
kw['stdin'] = Utils.subprocess.PIPE
cargs['input'] = kw['input']
kw['stdin'] = subprocess.PIPE
del kw['input']

if 'cwd' in kw:
if not isinstance(kw['cwd'], str):
kw['cwd'] = kw['cwd'].abspath()

encoding = kw.pop('decode_as', default_encoding)

try:
if kw['stdout'] or kw['stderr']:
p = subprocess.Popen(cmd, **kw)
(out, err) = p.communicate(**wargs)
ret = p.returncode
else:
out, err = (None, None)
ret = subprocess.Popen(cmd, **kw).wait(**wargs)
ret, out, err = Utils.run_process(cmd, kw, cargs)
except Exception as e:
raise Errors.WafError('Execution failure: %s' % str(e), ex=e)

if out:
if not isinstance(out, str):
out = out.decode(sys.stdout.encoding or 'iso8859-1')
out = out.decode(encoding, errors='replace')
if self.logger:
self.logger.debug('out: %s' % out)
self.logger.debug('out: %s', out)
else:
Logs.info(out, extra={'stream':sys.stdout, 'c1': ''})
if err:
if not isinstance(err, str):
err = err.decode(sys.stdout.encoding or 'iso8859-1')
err = err.decode(encoding, errors='replace')
if self.logger:
self.logger.error('err: %s' % err)
else:
@@ -385,9 +383,9 @@ class Context(ctx):

def cmd_and_log(self, cmd, **kw):
"""
Execute a command and return stdout/stderr if the execution is successful.
Executes a process and returns stdout/stderr if the execution is successful.
An exception is thrown when the exit status is non-0. In that case, both stderr and stdout
will be bound to the WafError object::
will be bound to the WafError object (configuration tests)::

def configure(conf):
out = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.STDOUT, quiet=waflib.Context.BOTH)
@@ -395,65 +393,69 @@ class Context(ctx):
(out, err) = conf.cmd_and_log(cmd, input='\\n'.encode(), output=waflib.Context.STDOUT)
try:
conf.cmd_and_log(['which', 'someapp'], output=waflib.Context.BOTH)
except Exception as e:
except Errors.WafError as e:
print(e.stdout, e.stderr)

:param cmd: args for subprocess.Popen
:type cmd: list or string
:param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate.
:type kw: dict
:returns: a tuple containing the contents of stdout and stderr
:rtype: string
:raises: :py:class:`waflib.Errors.WafError` if an invalid executable is specified for a non-shell process
:raises: :py:class:`waflib.Errors.WafError` in case of execution failure; stdout/stderr/returncode are bound to the exception object
"""
subprocess = Utils.subprocess
kw['shell'] = isinstance(cmd, str)
Logs.debug('runner: %r' % (cmd,))

if 'quiet' in kw:
quiet = kw['quiet']
del kw['quiet']
else:
quiet = None
self.log_command(cmd, kw)

if 'output' in kw:
to_ret = kw['output']
del kw['output']
else:
to_ret = STDOUT
quiet = kw.pop('quiet', None)
to_ret = kw.pop('output', STDOUT)

if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
raise Errors.WafError("Program %s not found!" % cmd[0])
raise Errors.WafError('Program %r not found!' % cmd[0])

kw['stdout'] = kw['stderr'] = subprocess.PIPE
if quiet is None:
self.to_log(cmd)

wargs = {}
cargs = {}
if 'timeout' in kw:
if kw['timeout'] is not None:
wargs['timeout'] = kw['timeout']
if sys.hexversion >= 0x3030000:
cargs['timeout'] = kw['timeout']
if not 'start_new_session' in kw:
kw['start_new_session'] = True
del kw['timeout']
if 'input' in kw:
if kw['input']:
wargs['input'] = kw['input']
kw['stdin'] = Utils.subprocess.PIPE
cargs['input'] = kw['input']
kw['stdin'] = subprocess.PIPE
del kw['input']

if 'cwd' in kw:
if not isinstance(kw['cwd'], str):
kw['cwd'] = kw['cwd'].abspath()

encoding = kw.pop('decode_as', default_encoding)

try:
p = subprocess.Popen(cmd, **kw)
(out, err) = p.communicate(**wargs)
ret, out, err = Utils.run_process(cmd, kw, cargs)
except Exception as e:
raise Errors.WafError('Execution failure: %s' % str(e), ex=e)

if not isinstance(out, str):
out = out.decode(sys.stdout.encoding or 'iso8859-1')
out = out.decode(encoding, errors='replace')
if not isinstance(err, str):
err = err.decode(sys.stdout.encoding or 'iso8859-1')
err = err.decode(encoding, errors='replace')

if out and quiet != STDOUT and quiet != BOTH:
self.to_log('out: %s' % out)
if err and quiet != STDERR and quiet != BOTH:
self.to_log('err: %s' % err)

if p.returncode:
e = Errors.WafError('Command %r returned %r' % (cmd, p.returncode))
e.returncode = p.returncode
if ret:
e = Errors.WafError('Command %r returned %r' % (cmd, ret))
e.returncode = ret
e.stderr = err
e.stdout = out
raise e
@@ -466,7 +468,8 @@ class Context(ctx):

def fatal(self, msg, ex=None):
"""
Raise a configuration error to interrupt the execution immediately::
Prints an error message in red and stops command execution; this is
usually used in the configuration section::

def configure(conf):
conf.fatal('a requirement is missing')
@@ -475,24 +478,31 @@ class Context(ctx):
:type msg: string
:param ex: optional exception object
:type ex: exception
:raises: :py:class:`waflib.Errors.ConfigurationError`
"""
if self.logger:
self.logger.info('from %s: %s' % (self.path.abspath(), msg))
try:
msg = '%s\n(complete log in %s)' % (msg, self.logger.handlers[0].baseFilename)
except Exception:
logfile = self.logger.handlers[0].baseFilename
except AttributeError:
pass
else:
if os.environ.get('WAF_PRINT_FAILURE_LOG'):
# see #1930
msg = 'Log from (%s):\n%s\n' % (logfile, Utils.readf(logfile))
else:
msg = '%s\n(complete log in %s)' % (msg, logfile)
raise self.errors.ConfigurationError(msg, ex=ex)

def to_log(self, msg):
"""
Log some information to the logger (if present), or to stderr. If the message is empty,
it is not printed::
Logs information to the logger (if present), or to stderr.
Empty messages are not printed::

def build(bld):
bld.to_log('starting the build')

When in doubt, override this method, or provide a logger on the context class.
Provide a logger on the context class or override this method if necessary.

:param msg: message
:type msg: string
@@ -508,7 +518,7 @@ class Context(ctx):

def msg(self, *k, **kw):
"""
Print a configuration message of the form ``msg: result``.
Prints a configuration message of the form ``msg: result``.
The second part of the message will be in colors. The output
can be disabled easly by setting ``in_msg`` to a positive value::

@@ -536,7 +546,7 @@ class Context(ctx):
except KeyError:
result = k[1]

color = kw.get('color', None)
color = kw.get('color')
if not isinstance(color, str):
color = result and 'GREEN' or 'YELLOW'

@@ -544,12 +554,12 @@ class Context(ctx):

def start_msg(self, *k, **kw):
"""
Print the beginning of a 'Checking for xxx' message. See :py:meth:`waflib.Context.Context.msg`
Prints the beginning of a 'Checking for xxx' message. See :py:meth:`waflib.Context.Context.msg`
"""
if kw.get('quiet', None):
if kw.get('quiet'):
return

msg = kw.get('msg', None) or k[0]
msg = kw.get('msg') or k[0]
try:
if self.in_msg:
self.in_msg += 1
@@ -567,19 +577,19 @@ class Context(ctx):
Logs.pprint('NORMAL', "%s :" % msg.ljust(self.line_just), sep='')

def end_msg(self, *k, **kw):
"""Print the end of a 'Checking for' message. See :py:meth:`waflib.Context.Context.msg`"""
if kw.get('quiet', None):
"""Prints the end of a 'Checking for' message. See :py:meth:`waflib.Context.Context.msg`"""
if kw.get('quiet'):
return
self.in_msg -= 1
if self.in_msg:
return

result = kw.get('result', None) or k[0]
result = kw.get('result') or k[0]

defcolor = 'GREEN'
if result == True:
if result is True:
msg = 'ok'
elif result == False:
elif not result:
msg = 'not found'
defcolor = 'YELLOW'
else:
@@ -597,7 +607,17 @@ class Context(ctx):
Logs.pprint(color, msg)

def load_special_tools(self, var, ban=[]):
global waf_dir
"""
Loads third-party extensions modules for certain programming languages
by trying to list certain files in the extras/ directory. This method
is typically called once for a programming language group, see for
example :py:mod:`waflib.Tools.compiler_c`

:param var: glob expression, for example 'cxx\_\*.py'
:type var: string
:param ban: list of exact file names to exclude
:type ban: list of string
"""
if os.path.isdir(waf_dir):
lst = self.root.find_node(waf_dir).find_node('waflib/extras').ant_glob(var)
for x in lst:
@@ -608,12 +628,12 @@ class Context(ctx):
waflibs = PyZipFile(waf_dir)
lst = waflibs.namelist()
for x in lst:
if not re.match("waflib/extras/%s" % var.replace("*", ".*"), var):
if not re.match('waflib/extras/%s' % var.replace('*', '.*'), var):
continue
f = os.path.basename(x)
doban = False
for b in ban:
r = b.replace("*", ".*")
r = b.replace('*', '.*')
if re.match(r, f):
doban = True
if not doban:
@@ -622,13 +642,13 @@ class Context(ctx):

cache_modules = {}
"""
Dictionary holding already loaded modules, keyed by their absolute path.
Dictionary holding already loaded modules (wscript), indexed by their absolute path.
The modules are added automatically by :py:func:`waflib.Context.load_module`
"""

def load_module(path, encoding=None):
"""
Load a source file as a python module.
Loads a wscript file as a python module. This method caches results in :py:attr:`waflib.Context.cache_modules`

:param path: file path
:type path: string
@@ -648,17 +668,17 @@ def load_module(path, encoding=None):

module_dir = os.path.dirname(path)
sys.path.insert(0, module_dir)

try : exec(compile(code, path, 'exec'), module.__dict__)
finally: sys.path.remove(module_dir)
try:
exec(compile(code, path, 'exec'), module.__dict__)
finally:
sys.path.remove(module_dir)

cache_modules[path] = module

return module

def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True):
"""
Import a Waf tool (python module), and store it in the dict :py:const:`waflib.Context.Context.tools`
Importx a Waf tool as a python module, and stores it in the dict :py:const:`waflib.Context.Context.tools`

:type tool: string
:param tool: Name of the tool
@@ -672,14 +692,18 @@ def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True):
else:
tool = tool.replace('++', 'xx')

origSysPath = sys.path
if not with_sys_path: sys.path = []
if not with_sys_path:
back_path = sys.path
sys.path = []
try:
if tooldir:
assert isinstance(tooldir, list)
sys.path = tooldir + sys.path
try:
__import__(tool)
except ImportError as e:
e.waf_sys_path = list(sys.path)
raise
finally:
for d in tooldir:
sys.path.remove(d)
@@ -687,7 +711,8 @@ def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True):
Context.tools[tool] = ret
return ret
else:
if not with_sys_path: sys.path.insert(0, waf_dir)
if not with_sys_path:
sys.path.insert(0, waf_dir)
try:
for x in ('waflib.Tools.%s', 'waflib.extras.%s', 'waflib.%s', '%s'):
try:
@@ -695,13 +720,18 @@ def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True):
break
except ImportError:
x = None
if x is None: # raise an exception
else: # raise an exception
__import__(tool)
except ImportError as e:
e.waf_sys_path = list(sys.path)
raise
finally:
if not with_sys_path: sys.path.remove(waf_dir)
if not with_sys_path:
sys.path.remove(waf_dir)
ret = sys.modules[x % tool]
Context.tools[tool] = ret
return ret
finally:
if not with_sys_path: sys.path += origSysPath
if not with_sys_path:
sys.path += back_path


+ 9
- 11
waflib/Errors.py View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2010 (ita)
# Thomas Nagy, 2010-2018 (ita)

"""
Exceptions used in the Waf code
@@ -17,6 +17,7 @@ class WafError(Exception):
:param ex: exception causing this error (optional)
:type ex: exception
"""
Exception.__init__(self)
self.msg = msg
assert not isinstance(msg, Exception)

@@ -35,9 +36,7 @@ class WafError(Exception):
return str(self.msg)

class BuildError(WafError):
"""
Errors raised during the build and install phases
"""
"""Error raised during the build and install phases"""
def __init__(self, error_tasks=[]):
"""
:param error_tasks: tasks that could not complete normally
@@ -47,24 +46,23 @@ class BuildError(WafError):
WafError.__init__(self, self.format_error())

def format_error(self):
"""format the error messages from the tasks that failed"""
"""Formats the error messages from the tasks that failed"""
lst = ['Build failed']
for tsk in self.tasks:
txt = tsk.format_error()
if txt: lst.append(txt)
if txt:
lst.append(txt)
return '\n'.join(lst)

class ConfigurationError(WafError):
"""
Configuration exception raised in particular by :py:meth:`waflib.Context.Context.fatal`
"""
"""Configuration exception raised in particular by :py:meth:`waflib.Context.Context.fatal`"""
pass

class TaskRescan(WafError):
"""task-specific exception type, trigger a signature recomputation"""
"""Task-specific exception type signalling required signature recalculations"""
pass

class TaskNotReady(WafError):
"""task-specific exception type, raised when the task signature cannot be computed"""
"""Task-specific exception type signalling that task signatures cannot be computed"""
pass


+ 70
- 35
waflib/Logs.py View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2010 (ita)
# Thomas Nagy, 2005-2018 (ita)

"""
logging, colors, terminal width and pretty-print
@@ -23,8 +23,15 @@ import logging
LOG_FORMAT = os.environ.get('WAF_LOG_FORMAT', '%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s')
HOUR_FORMAT = os.environ.get('WAF_HOUR_FORMAT', '%H:%M:%S')

zones = ''
zones = []
"""
See :py:class:`waflib.Logs.log_filter`
"""

verbose = 0
"""
Global verbosity level, see :py:func:`waflib.Logs.debug` and :py:func:`waflib.Logs.error`
"""

colors_lst = {
'USE' : True,
@@ -49,6 +56,15 @@ except NameError:
unicode = None

def enable_colors(use):
"""
If *1* is given, then the system will perform a few verifications
before enabling colors, such as checking whether the interpreter
is running in a terminal. A value of zero will disable colors,
and a value above *1* will force colors.

:param use: whether to enable colors or not
:type use: integer
"""
if use == 1:
if not (sys.stderr.isatty() or sys.stdout.isatty()):
use = 0
@@ -74,15 +90,23 @@ except AttributeError:
return 80

get_term_cols.__doc__ = """
Get the console width in characters.
Returns the console width in characters.

:return: the number of characters per line
:rtype: int
"""

def get_color(cl):
if not colors_lst['USE']: return ''
return colors_lst.get(cl, '')
"""
Returns the ansi sequence corresponding to the given color name.
An empty string is returned when coloring is globally disabled.

:param cl: color name in capital letters
:type cl: string
"""
if colors_lst['USE']:
return colors_lst.get(cl, '')
return ''

class color_dict(object):
"""attribute-based color access, eg: colors.PINK"""
@@ -96,7 +120,7 @@ colors = color_dict()
re_log = re.compile(r'(\w+): (.*)', re.M)
class log_filter(logging.Filter):
"""
The waf logs are of the form 'name: message', and can be filtered by 'waf --zones=name'.
Waf logs are of the form 'name: message', and can be filtered by 'waf --zones=name'.
For example, the following::

from waflib import Logs
@@ -106,17 +130,14 @@ class log_filter(logging.Filter):

$ waf --zones=test
"""
def __init__(self, name=None):
pass
def __init__(self, name=''):
logging.Filter.__init__(self, name)

def filter(self, rec):
"""
filter a record, adding the colors automatically
Filters log records by zone and by logging level

* error: red
* warning: yellow

:param rec: message to record
:param rec: log entry
"""
rec.zone = rec.module
if rec.levelno >= logging.INFO:
@@ -136,6 +157,9 @@ class log_filter(logging.Filter):
class log_handler(logging.StreamHandler):
"""Dispatches messages to stderr/stdout depending on the severity level"""
def emit(self, record):
"""
Delegates the functionality to :py:meth:`waflib.Log.log_handler.emit_override`
"""
# default implementation
try:
try:
@@ -153,6 +177,9 @@ class log_handler(logging.StreamHandler):
self.handleError(record)

def emit_override(self, record, **kw):
"""
Writes the log record to the desired stream (stderr/stdout)
"""
self.terminator = getattr(record, 'terminator', '\n')
stream = self.stream
if unicode:
@@ -169,7 +196,7 @@ class log_handler(logging.StreamHandler):
else:
stream.write(fs % msg)
except UnicodeError:
stream.write((fs % msg).encode("UTF-8"))
stream.write((fs % msg).encode('utf-8'))
else:
logging.StreamHandler.emit(self, record)

@@ -179,7 +206,10 @@ class formatter(logging.Formatter):
logging.Formatter.__init__(self, LOG_FORMAT, HOUR_FORMAT)

def format(self, rec):
"""Messages in warning, error or info mode are displayed in color by default"""
"""
Formats records and adds colors as needed. The records do not get
a leading hour format if the logging level is above *INFO*.
"""
try:
msg = rec.msg.decode('utf-8')
except Exception:
@@ -200,10 +230,14 @@ class formatter(logging.Formatter):
c2 = getattr(rec, 'c2', colors.NORMAL)
msg = '%s%s%s' % (c1, msg, c2)
else:
msg = msg.replace('\r', '\n')
msg = re.sub(r'\x1B\[(K|.*?(m|h|l))', '', msg)
# remove single \r that make long lines in text files
# and other terminal commands
msg = re.sub(r'\r(?!\n)|\x1B\[(K|.*?(m|h|l))', '', msg)

if rec.levelno >= logging.INFO: # ??
if rec.levelno >= logging.INFO:
# the goal of this is to format without the leading "Logs, hour" prefix
if rec.args:
return msg % rec.args
return msg

rec.msg = msg
@@ -216,19 +250,17 @@ log = None

def debug(*k, **kw):
"""
Wrap logging.debug, the output is filtered for performance reasons
Wraps logging.debug and discards messages if the verbosity level :py:attr:`waflib.Logs.verbose` ≤ 0
"""
if verbose:
k = list(k)
k[0] = k[0].replace('\n', ' ')
global log
log.debug(*k, **kw)

def error(*k, **kw):
"""
Wrap logging.errors, display the origin of the message when '-vv' is set
Wrap logging.errors, adds the stack trace when the verbosity level :py:attr:`waflib.Logs.verbose` ≥ 2
"""
global log
log.error(*k, **kw)
if verbose > 2:
st = traceback.extract_stack()
@@ -236,28 +268,27 @@ def error(*k, **kw):
st = st[:-1]
buf = []
for filename, lineno, name, line in st:
buf.append(' File "%s", line %d, in %s' % (filename, lineno, name))
buf.append(' File %r, line %d, in %s' % (filename, lineno, name))
if line:
buf.append(' %s' % line.strip())
if buf: log.error("\n".join(buf))
if buf:
log.error('\n'.join(buf))

def warn(*k, **kw):
"""
Wrap logging.warn
Wraps logging.warn
"""
global log
log.warn(*k, **kw)

def info(*k, **kw):
"""
Wrap logging.info
Wraps logging.info
"""
global log
log.info(*k, **kw)

def init_log():
"""
Initialize the loggers globally
Initializes the logger :py:attr:`waflib.Logs.log`
"""
global log
log = logging.getLogger('waflib')
@@ -271,7 +302,7 @@ def init_log():

def make_logger(path, name):
"""
Create a simple logger, which is often used to redirect the context command output::
Creates a simple logger, which is often used to redirect the context command output::

from waflib import Logs
bld.logger = Logs.make_logger('test.log', 'build')
@@ -291,7 +322,11 @@ def make_logger(path, name):
:type name: string
"""
logger = logging.getLogger(name)
hdlr = logging.FileHandler(path, 'w')
if sys.hexversion > 0x3000000:
encoding = sys.stdout.encoding
else:
encoding = None
hdlr = logging.FileHandler(path, 'w', encoding=encoding)
formatter = logging.Formatter('%(message)s')
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
@@ -300,7 +335,7 @@ def make_logger(path, name):

def make_mem_logger(name, to_log, size=8192):
"""
Create a memory logger to avoid writing concurrently to the main logger
Creates a memory logger to avoid writing concurrently to the main logger
"""
from logging.handlers import MemoryHandler
logger = logging.getLogger(name)
@@ -314,7 +349,7 @@ def make_mem_logger(name, to_log, size=8192):

def free_logger(logger):
"""
Free the resources held by the loggers created through make_logger or make_mem_logger.
Frees the resources held by the loggers created through make_logger or make_mem_logger.
This is used for file cleanup and for handler removal (logger objects are re-used).
"""
try:
@@ -326,7 +361,7 @@ def free_logger(logger):

def pprint(col, msg, label='', sep='\n'):
"""
Print messages in color immediately on stderr::
Prints messages in color immediately on stderr::

from waflib import Logs
Logs.pprint('RED', 'Something bad just happened')
@@ -340,5 +375,5 @@ def pprint(col, msg, label='', sep='\n'):
:param sep: a string to append at the end (line separator)
:type sep: string
"""
info("%s%s%s %s" % (colors(col), msg, colors.NORMAL, label), extra={'terminator':sep})
info('%s%s%s %s', colors(col), msg, colors.NORMAL, label, extra={'terminator':sep})


+ 376
- 254
waflib/Node.py
File diff suppressed because it is too large
View File


+ 116
- 47
waflib/Options.py View File

@@ -1,66 +1,75 @@
#!/usr/bin/env python
# encoding: utf-8
# Scott Newton, 2005 (scottn)
# Thomas Nagy, 2006-2010 (ita)
# Thomas Nagy, 2006-2018 (ita)

"""
Support for waf command-line options

Provides default command-line options,
as well as custom ones, used by the ``options`` wscript function.

Provides default and command-line options, as well the command
that reads the ``options`` wscript function.
"""

import os, tempfile, optparse, sys, re
from waflib import Logs, Utils, Context
from waflib import Logs, Utils, Context, Errors

cmds = 'distclean configure build install clean uninstall check dist distcheck'.split()
options = optparse.Values()
"""
Constant representing the default waf commands displayed in::

$ waf --help

"""

options = {}
"""
A dictionary representing the command-line options::
A global dictionary representing user-provided command-line options::

$ waf --foo=bar

"""

commands = []
"""
List of commands to execute extracted from the command-line. This list is consumed during the execution, see :py:func:`waflib.Scripting.run_commands`.
List of commands to execute extracted from the command-line. This list
is consumed during the execution by :py:func:`waflib.Scripting.run_commands`.
"""

envvars = []
"""
List of environment variable declarations placed after the Waf executable name.
These are detected by searching for "=" in the rest arguments.
These are detected by searching for "=" in the remaining arguments.
You probably do not want to use this.
"""

lockfile = os.environ.get('WAFLOCK', '.lock-waf_%s_build' % sys.platform)
platform = Utils.unversioned_sys_platform()

"""
Name of the lock file that marks a project as configured
"""

class opt_parser(optparse.OptionParser):
"""
Command-line options parser.
"""
def __init__(self, ctx):
optparse.OptionParser.__init__(self, conflict_handler="resolve", version='waf %s (%s)' % (Context.WAFVERSION, Context.WAFREVISION))
def __init__(self, ctx, allow_unknown=False):
optparse.OptionParser.__init__(self, conflict_handler='resolve', add_help_option=False,
version='waf %s (%s)' % (Context.WAFVERSION, Context.WAFREVISION))
self.formatter.width = Logs.get_term_cols()
self.ctx = ctx
self.allow_unknown = allow_unknown

def _process_args(self, largs, rargs, values):
"""
Custom _process_args to allow unknown options according to the allow_unknown status
"""
while rargs:
try:
optparse.OptionParser._process_args(self,largs,rargs,values)
except (optparse.BadOptionError, optparse.AmbiguousOptionError) as e:
if self.allow_unknown:
largs.append(e.opt_str)
else:
self.error(str(e))

def print_usage(self, file=None):
return self.print_help(file)

def get_usage(self):
"""
Return the message to print on ``waf --help``
Builds the message to print on ``waf --help``

:rtype: string
"""
cmds_str = {}
for cls in Context.classes:
@@ -96,10 +105,9 @@ Main commands (example: ./waf build -j4)

class OptionsContext(Context.Context):
"""
Collect custom options from wscript files and parses the command line.
Set the global :py:const:`waflib.Options.commands` and :py:const:`waflib.Options.options` values.
Collects custom options from wscript files and parses the command line.
Sets the global :py:const:`waflib.Options.commands` and :py:const:`waflib.Options.options` values.
"""

cmd = 'options'
fun = 'options'

@@ -114,11 +122,18 @@ class OptionsContext(Context.Context):
jobs = self.jobs()
p = self.add_option
color = os.environ.get('NOCOLOR', '') and 'no' or 'auto'
if os.environ.get('CLICOLOR', '') == '0':
color = 'no'
elif os.environ.get('CLICOLOR_FORCE', '') == '1':
color = 'yes'
p('-c', '--color', dest='colors', default=color, action='store', help='whether to use colors (yes/no/auto) [default: auto]', choices=('yes', 'no', 'auto'))
p('-j', '--jobs', dest='jobs', default=jobs, type='int', help='amount of parallel jobs (%r)' % jobs)
p('-j', '--jobs', dest='jobs', default=jobs, type='int', help='amount of parallel jobs (%r)' % jobs)
p('-k', '--keep', dest='keep', default=0, action='count', help='continue despite errors (-kk to try harder)')
p('-v', '--verbose', dest='verbose', default=0, action='count', help='verbosity level -v -vv or -vvv [default: 0]')
p('--zones', dest='zones', default='', action='store', help='debugging zones (task_gen, deps, tasks, etc)')
p('--profile', dest='profile', default=0, action='store_true', help=optparse.SUPPRESS_HELP)
p('--pdb', dest='pdb', default=0, action='store_true', help=optparse.SUPPRESS_HELP)
p('-h', '--help', dest='whelp', default=0, action='store_true', help="show this help message and exit")

gr = self.add_option_group('Configuration options')
self.option_groups['configure options'] = gr
@@ -132,7 +147,7 @@ class OptionsContext(Context.Context):

default_prefix = getattr(Context.g_module, 'default_prefix', os.environ.get('PREFIX'))
if not default_prefix:
if platform == 'win32':
if Utils.unversioned_sys_platform() == 'win32':
d = tempfile.gettempdir()
default_prefix = d[0].upper() + d[1:]
# win32 preserves the case, but gettempdir does not
@@ -161,8 +176,8 @@ class OptionsContext(Context.Context):

def jobs(self):
"""
Find the amount of cpu cores to set the default amount of tasks executed in parallel. At
runtime the options can be obtained from :py:const:`waflib.Options.options` ::
Finds the optimal amount of cpu cores to use for parallel jobs.
At runtime the options can be obtained from :py:const:`waflib.Options.options` ::

from waflib.Options import options
njobs = options.jobs
@@ -185,7 +200,7 @@ class OptionsContext(Context.Context):
if not count and os.name not in ('nt', 'java'):
try:
tmp = self.cmd_and_log(['sysctl', '-n', 'hw.ncpu'], quiet=0)
except Exception:
except Errors.WafError:
pass
else:
if re.match('^[0-9]+$', tmp):
@@ -198,21 +213,25 @@ class OptionsContext(Context.Context):

def add_option(self, *k, **kw):
"""
Wrapper for optparse.add_option::
Wraps ``optparse.add_option``::

def options(ctx):
ctx.add_option('-u', '--use', dest='use', default=False, action='store_true',
help='a boolean option')
ctx.add_option('-u', '--use', dest='use', default=False,
action='store_true', help='a boolean option')

:rtype: optparse option object
"""
return self.parser.add_option(*k, **kw)

def add_option_group(self, *k, **kw):
"""
Wrapper for optparse.add_option_group::
Wraps ``optparse.add_option_group``::

def options(ctx):
gr = ctx.add_option_group('some options')
gr.add_option('-u', '--use', dest='use', default=False, action='store_true')

:rtype: optparse option group object
"""
try:
gr = self.option_groups[k[0]]
@@ -223,13 +242,14 @@ class OptionsContext(Context.Context):

def get_option_group(self, opt_str):
"""
Wrapper for optparse.get_option_group::
Wraps ``optparse.get_option_group``::

def options(ctx):
gr = ctx.get_option_group('configure options')
gr.add_option('-o', '--out', action='store', default='',
help='build dir for the project', dest='out')

:rtype: optparse option group object
"""
try:
return self.option_groups[opt_str]
@@ -239,35 +259,84 @@ class OptionsContext(Context.Context):
return group
return None

def parse_args(self, _args=None):
"""
Parse arguments from a list (not bound to the command-line).
def sanitize_path(self, path, cwd=None):
if not cwd:
cwd = Context.launch_dir
p = os.path.expanduser(path)
p = os.path.join(cwd, p)
p = os.path.normpath(p)
p = os.path.abspath(p)
return p

:param _args: arguments
:type _args: list of strings
def parse_cmd_args(self, _args=None, cwd=None, allow_unknown=False):
"""
Just parse the arguments
"""
global options, commands, envvars
self.parser.allow_unknown = allow_unknown
(options, leftover_args) = self.parser.parse_args(args=_args)

envvars = []
commands = []
for arg in leftover_args:
if '=' in arg:
envvars.append(arg)
else:
elif arg != 'options':
commands.append(arg)

if options.destdir:
options.destdir = Utils.sane_path(options.destdir)
for name in 'top out destdir prefix bindir libdir'.split():
# those paths are usually expanded from Context.launch_dir
if getattr(options, name, None):
path = self.sanitize_path(getattr(options, name), cwd)
setattr(options, name, path)
return options, commands, envvars

def init_module_vars(self, arg_options, arg_commands, arg_envvars):
options.__dict__.clear()
del commands[:]
del envvars[:]

options.__dict__.update(arg_options.__dict__)
commands.extend(arg_commands)
envvars.extend(arg_envvars)

for var in envvars:
(name, value) = var.split('=', 1)
os.environ[name.strip()] = value

def init_logs(self, options, commands, envvars):
Logs.verbose = options.verbose
if options.verbose >= 1:
self.load('errcheck')

colors = {'yes' : 2, 'auto' : 1, 'no' : 0}[options.colors]
Logs.enable_colors(colors)

if options.zones:
Logs.zones = options.zones.split(',')
if not Logs.verbose:
Logs.verbose = 1
elif Logs.verbose > 0:
Logs.zones = ['runner']
if Logs.verbose > 2:
Logs.zones = ['*']

def parse_args(self, _args=None):
"""
Parses arguments from a list which is not necessarily the command-line.
Initializes the module variables options, commands and envvars
If help is requested, prints it and exit the application

:param _args: arguments
:type _args: list of strings
"""
options, commands, envvars = self.parse_cmd_args()
self.init_logs(options, commands, envvars)
self.init_module_vars(options, commands, envvars)

def execute(self):
"""
See :py:func:`waflib.Context.Context.execute`
"""
super(OptionsContext, self).execute()
self.parse_args()
Utils.alloc_process_pool(options.jobs)


+ 407
- 169
waflib/Runner.py View File

@@ -1,98 +1,125 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2010 (ita)
# Thomas Nagy, 2005-2018 (ita)

"""
Runner.py: Task scheduling and execution

"""

import random, atexit
import heapq, traceback
try:
from queue import Queue
from queue import Queue, PriorityQueue
except ImportError:
from Queue import Queue
try:
from Queue import PriorityQueue
except ImportError:
class PriorityQueue(Queue):
def _init(self, maxsize):
self.maxsize = maxsize
self.queue = []
def _put(self, item):
heapq.heappush(self.queue, item)
def _get(self):
return heapq.heappop(self.queue)

from waflib import Utils, Task, Errors, Logs

GAP = 10
GAP = 5
"""
Wait for free tasks if there are at least ``GAP * njobs`` in queue
Wait for at least ``GAP * njobs`` before trying to enqueue more tasks to run
"""

class TaskConsumer(Utils.threading.Thread):
"""
Task consumers belong to a pool of workers
class PriorityTasks(object):
def __init__(self):
self.lst = []
def __len__(self):
return len(self.lst)
def __iter__(self):
return iter(self.lst)
def clear(self):
self.lst = []
def append(self, task):
heapq.heappush(self.lst, task)
def appendleft(self, task):
"Deprecated, do not use"
heapq.heappush(self.lst, task)
def pop(self):
return heapq.heappop(self.lst)
def extend(self, lst):
if self.lst:
for x in lst:
self.append(x)
else:
if isinstance(lst, list):
self.lst = lst
heapq.heapify(lst)
else:
self.lst = lst.lst

They wait for tasks in the queue and then use ``task.process(...)``
class Consumer(Utils.threading.Thread):
"""
def __init__(self):
Daemon thread object that executes a task. It shares a semaphore with
the coordinator :py:class:`waflib.Runner.Spawner`. There is one
instance per task to consume.
"""
def __init__(self, spawner, task):
Utils.threading.Thread.__init__(self)
self.ready = Queue()
self.task = task
"""Task to execute"""
self.spawner = spawner
"""Coordinator object"""
self.setDaemon(1)
self.start()
def run(self):
"""
Obtain :py:class:`waflib.Task.TaskBase` instances from this queue.
Processes a single task
"""
try:
if not self.spawner.master.stop:
self.spawner.master.process_task(self.task)
finally:
self.spawner.sem.release()
self.spawner.master.out.put(self.task)
self.task = None
self.spawner = None

class Spawner(Utils.threading.Thread):
"""
Daemon thread that consumes tasks from :py:class:`waflib.Runner.Parallel` producer and
spawns a consuming thread :py:class:`waflib.Runner.Consumer` for each
:py:class:`waflib.Task.Task` instance.
"""
def __init__(self, master):
Utils.threading.Thread.__init__(self)
self.master = master
""":py:class:`waflib.Runner.Parallel` producer instance"""
self.sem = Utils.threading.Semaphore(master.numjobs)
"""Bounded semaphore that prevents spawning more than *n* concurrent consumers"""
self.setDaemon(1)
self.start()

def run(self):
"""
Loop over the tasks to execute
Spawns new consumers to execute tasks by delegating to :py:meth:`waflib.Runner.Spawner.loop`
"""
try:
self.loop()
except Exception:
# Python 2 prints unnecessary messages when shutting down
# we also want to stop the thread properly
pass

def loop(self):
"""
Obtain tasks from :py:attr:`waflib.Runner.TaskConsumer.ready` and call
:py:meth:`waflib.Task.TaskBase.process`. If the object is a function, execute it.
Consumes task objects from the producer; ends when the producer has no more
task to provide.
"""
master = self.master
while 1:
tsk = self.ready.get()
if not isinstance(tsk, Task.TaskBase):
tsk(self)
else:
tsk.process()

pool = Queue()
"""
Pool of task consumer objects
"""

def get_pool():
"""
Obtain a task consumer from :py:attr:`waflib.Runner.pool`.
Do not forget to put it back by using :py:func:`waflib.Runner.put_pool`
and reset properly (original waiting queue).

:rtype: :py:class:`waflib.Runner.TaskConsumer`
"""
try:
return pool.get(False)
except Exception:
return TaskConsumer()

def put_pool(x):
"""
Return a task consumer to the thread pool :py:attr:`waflib.Runner.pool`

:param x: task consumer object
:type x: :py:class:`waflib.Runner.TaskConsumer`
"""
pool.put(x)

def _free_resources():
global pool
lst = []
while pool.qsize():
lst.append(pool.get())
for x in lst:
x.ready.put(None)
for x in lst:
x.join()
pool = None
atexit.register(_free_resources)
task = master.ready.get()
self.sem.acquire()
if not master.stop:
task.log_display(task.generator.bld)
Consumer(self, task)

class Parallel(object):
"""
@@ -106,7 +133,7 @@ class Parallel(object):

self.numjobs = j
"""
Number of consumers in the pool
Amount of parallel consumers to use
"""

self.bld = bld
@@ -114,19 +141,25 @@ class Parallel(object):
Instance of :py:class:`waflib.Build.BuildContext`
"""

self.outstanding = []
"""List of :py:class:`waflib.Task.TaskBase` that may be ready to be executed"""
self.outstanding = PriorityTasks()
"""Heap of :py:class:`waflib.Task.Task` that may be ready to be executed"""

self.postponed = PriorityTasks()
"""Heap of :py:class:`waflib.Task.Task` which are not ready to run for non-DAG reasons"""

self.frozen = []
"""List of :py:class:`waflib.Task.TaskBase` that cannot be executed immediately"""
self.incomplete = set()
"""List of :py:class:`waflib.Task.Task` waiting for dependent tasks to complete (DAG)"""

self.ready = PriorityQueue(0)
"""List of :py:class:`waflib.Task.Task` ready to be executed by consumers"""

self.out = Queue(0)
"""List of :py:class:`waflib.Task.TaskBase` returned by the task consumers"""
"""List of :py:class:`waflib.Task.Task` returned by the task consumers"""

self.count = 0
"""Amount of tasks that may be processed by :py:class:`waflib.Runner.TaskConsumer`"""

self.processed = 1
self.processed = 0
"""Amount of tasks processed"""

self.stop = False
@@ -139,33 +172,44 @@ class Parallel(object):
"""Task iterator which must give groups of parallelizable tasks when calling ``next()``"""

self.dirty = False
"""Flag to indicate that tasks have been executed, and that the build cache must be saved (call :py:meth:`waflib.Build.BuildContext.store`)"""
"""
Flag that indicates that the build cache must be saved when a task was executed
(calls :py:meth:`waflib.Build.BuildContext.store`)"""

self.revdeps = Utils.defaultdict(set)
"""
The reverse dependency graph of dependencies obtained from Task.run_after
"""

self.spawner = Spawner(self)
"""
Coordinating daemon thread that spawns thread consumers
"""

def get_next_task(self):
"""
Obtain the next task to execute.
Obtains the next Task instance to run

:rtype: :py:class:`waflib.Task.TaskBase`
:rtype: :py:class:`waflib.Task.Task`
"""
if not self.outstanding:
return None
return self.outstanding.pop(0)
return self.outstanding.pop()

def postpone(self, tsk):
"""
A task cannot be executed at this point, put it in the list :py:attr:`waflib.Runner.Parallel.frozen`.
Adds the task to the list :py:attr:`waflib.Runner.Parallel.postponed`.
The order is scrambled so as to consume as many tasks in parallel as possible.

:param tsk: task
:type tsk: :py:class:`waflib.Task.TaskBase`
:param tsk: task instance
:type tsk: :py:class:`waflib.Task.Task`
"""
if random.randint(0, 1):
self.frozen.insert(0, tsk)
else:
self.frozen.append(tsk)
self.postponed.append(tsk)

def refill_task_list(self):
"""
Put the next group of tasks to execute in :py:attr:`waflib.Runner.Parallel.outstanding`.
Pulls a next group of tasks to execute in :py:attr:`waflib.Runner.Parallel.outstanding`.
Ensures that all tasks in the current build group are complete before processing the next one.
"""
while self.count > self.numjobs * GAP:
self.get_out()
@@ -173,132 +217,224 @@ class Parallel(object):
while not self.outstanding:
if self.count:
self.get_out()
elif self.frozen:
if self.outstanding:
break
elif self.postponed:
try:
cond = self.deadlock == self.processed
except AttributeError:
pass
else:
if cond:
msg = 'check the build order for the tasks'
for tsk in self.frozen:
if not tsk.run_after:
msg = 'check the methods runnable_status'
break
# The most common reason is conflicting build order declaration
# for example: "X run_after Y" and "Y run_after X"
# Another can be changing "run_after" dependencies while the build is running
# for example: updating "tsk.run_after" in the "runnable_status" method
lst = []
for tsk in self.frozen:
lst.append('%s\t-> %r' % (repr(tsk), [id(x) for x in tsk.run_after]))
raise Errors.WafError('Deadlock detected: %s%s' % (msg, ''.join(lst)))
for tsk in self.postponed:
deps = [id(x) for x in tsk.run_after if not x.hasrun]
lst.append('%s\t-> %r' % (repr(tsk), deps))
if not deps:
lst.append('\n task %r dependencies are done, check its *runnable_status*?' % id(tsk))
raise Errors.WafError('Deadlock detected: check the task build order%s' % ''.join(lst))
self.deadlock = self.processed

if self.frozen:
self.outstanding += self.frozen
self.frozen = []
if self.postponed:
self.outstanding.extend(self.postponed)
self.postponed.clear()
elif not self.count:
self.outstanding.extend(next(self.biter))
self.total = self.bld.total()
break
if self.incomplete:
for x in self.incomplete:
for k in x.run_after:
if not k.hasrun:
break
else:
# dependency added after the build started without updating revdeps
self.incomplete.remove(x)
self.outstanding.append(x)
break
else:
raise Errors.WafError('Broken revdeps detected on %r' % self.incomplete)
else:
tasks = next(self.biter)
ready, waiting = self.prio_and_split(tasks)
self.outstanding.extend(ready)
self.incomplete.update(waiting)
self.total = self.bld.total()
break

def add_more_tasks(self, tsk):
"""
Tasks may be added dynamically during the build by binding them to the task :py:attr:`waflib.Task.TaskBase.more_tasks`
If a task provides :py:attr:`waflib.Task.Task.more_tasks`, then the tasks contained
in that list are added to the current build and will be processed before the next build group.

:param tsk: task
:type tsk: :py:attr:`waflib.Task.TaskBase`
The priorities for dependent tasks are not re-calculated globally

:param tsk: task instance
:type tsk: :py:attr:`waflib.Task.Task`
"""
if getattr(tsk, 'more_tasks', None):
self.outstanding += tsk.more_tasks
more = set(tsk.more_tasks)
groups_done = set()
def iteri(a, b):
for x in a:
yield x
for x in b:
yield x

# Update the dependency tree
# this assumes that task.run_after values were updated
for x in iteri(self.outstanding, self.incomplete):
for k in x.run_after:
if isinstance(k, Task.TaskGroup):
if k not in groups_done:
groups_done.add(k)
for j in k.prev & more:
self.revdeps[j].add(k)
elif k in more:
self.revdeps[k].add(x)

ready, waiting = self.prio_and_split(tsk.more_tasks)
self.outstanding.extend(ready)
self.incomplete.update(waiting)
self.total += len(tsk.more_tasks)

def mark_finished(self, tsk):
def try_unfreeze(x):
# DAG ancestors are likely to be in the incomplete set
# This assumes that the run_after contents have not changed
# after the build starts, else a deadlock may occur
if x in self.incomplete:
# TODO remove dependencies to free some memory?
# x.run_after.remove(tsk)
for k in x.run_after:
if not k.hasrun:
break
else:
self.incomplete.remove(x)
self.outstanding.append(x)

if tsk in self.revdeps:
for x in self.revdeps[tsk]:
if isinstance(x, Task.TaskGroup):
x.prev.remove(tsk)
if not x.prev:
for k in x.next:
# TODO necessary optimization?
k.run_after.remove(x)
try_unfreeze(k)
# TODO necessary optimization?
x.next = []
else:
try_unfreeze(x)
del self.revdeps[tsk]

if hasattr(tsk, 'semaphore'):
sem = tsk.semaphore
sem.release(tsk)
while sem.waiting and not sem.is_locked():
# take a frozen task, make it ready to run
x = sem.waiting.pop()
self._add_task(x)

def get_out(self):
"""
Obtain one task returned from the task consumers, and update the task count. Add more tasks if necessary through
:py:attr:`waflib.Runner.Parallel.add_more_tasks`.
Waits for a Task that task consumers add to :py:attr:`waflib.Runner.Parallel.out` after execution.
Adds more Tasks if necessary through :py:attr:`waflib.Runner.Parallel.add_more_tasks`.

:rtype: :py:attr:`waflib.Task.TaskBase`
:rtype: :py:attr:`waflib.Task.Task`
"""
tsk = self.out.get()
if not self.stop:
self.add_more_tasks(tsk)
self.mark_finished(tsk)

self.count -= 1
self.dirty = True
return tsk

def add_task(self, tsk):
"""
Pass a task to a consumer.
Enqueue a Task to :py:attr:`waflib.Runner.Parallel.ready` so that consumers can run them.

:param tsk: task
:type tsk: :py:attr:`waflib.Task.TaskBase`
:param tsk: task instance
:type tsk: :py:attr:`waflib.Task.Task`
"""
try:
self.pool
except AttributeError:
self.init_task_pool()
# TODO change in waf 2.1
self.ready.put(tsk)

def init_task_pool(self):
# lazy creation, and set a common pool for all task consumers
pool = self.pool = [get_pool() for i in range(self.numjobs)]
self.ready = Queue(0)
def setq(consumer):
consumer.ready = self.ready
for x in pool:
x.ready.put(setq)
return pool

def free_task_pool(self):
# return the consumers, setting a different queue for each of them
def setq(consumer):
consumer.ready = Queue(0)
self.out.put(self)
try:
pool = self.pool
except AttributeError:
pass
def _add_task(self, tsk):
if hasattr(tsk, 'semaphore'):
sem = tsk.semaphore
try:
sem.acquire(tsk)
except IndexError:
sem.waiting.add(tsk)
return

self.count += 1
self.processed += 1
if self.numjobs == 1:
tsk.log_display(tsk.generator.bld)
try:
self.process_task(tsk)
finally:
self.out.put(tsk)
else:
for x in pool:
self.ready.put(setq)
for x in pool:
self.get_out()
for x in pool:
put_pool(x)
self.pool = []
self.add_task(tsk)

def process_task(self, tsk):
"""
Processes a task and attempts to stop the build in case of errors
"""
tsk.process()
if tsk.hasrun != Task.SUCCESS:
self.error_handler(tsk)

def skip(self, tsk):
"""
Mark a task as skipped/up-to-date
"""
tsk.hasrun = Task.SKIPPED
self.mark_finished(tsk)

def cancel(self, tsk):
"""
Mark a task as failed because of unsatisfiable dependencies
"""
tsk.hasrun = Task.CANCELED
self.mark_finished(tsk)

def error_handler(self, tsk):
"""
Called when a task cannot be executed. The flag :py:attr:`waflib.Runner.Parallel.stop` is set, unless
the build is executed with::
Called when a task cannot be executed. The flag :py:attr:`waflib.Runner.Parallel.stop` is set,
unless the build is executed with::

$ waf build -k

:param tsk: task
:type tsk: :py:attr:`waflib.Task.TaskBase`
:param tsk: task instance
:type tsk: :py:attr:`waflib.Task.Task`
"""
if hasattr(tsk, 'scan') and hasattr(tsk, 'uid'):
# TODO waf 1.9 - this breaks encapsulation
key = (tsk.uid(), 'imp')
try:
del self.bld.task_sigs[key]
except KeyError:
pass
if not self.bld.keep:
self.stop = True
self.error.append(tsk)

def task_status(self, tsk):
"""
Obtains the task status to decide whether to run it immediately or not.

:return: the exit status, for example :py:attr:`waflib.Task.ASK_LATER`
:rtype: integer
"""
try:
return tsk.runnable_status()
except Exception:
self.processed += 1
tsk.err_msg = Utils.ex_stack()
tsk.err_msg = traceback.format_exc()
if not self.stop and self.bld.keep:
self.skip(tsk)
if self.bld.keep == 1:
# if -k stop at the first exception, if -kk try to go as far as possible
# if -k stop on the first exception, if -kk try to go as far as possible
if Logs.verbose > 1 or not self.error:
self.error.append(tsk)
self.stop = True
@@ -306,17 +442,20 @@ class Parallel(object):
if Logs.verbose > 1:
self.error.append(tsk)
return Task.EXCEPTION
tsk.hasrun = Task.EXCEPTION

tsk.hasrun = Task.EXCEPTION
self.error_handler(tsk)

return Task.EXCEPTION

def start(self):
"""
Give tasks to :py:class:`waflib.Runner.TaskConsumer` instances until the build finishes or the ``stop`` flag is set.
If only one job is used, then execute the tasks one by one, without consumers.
Obtains Task instances from the BuildContext instance and adds the ones that need to be executed to
:py:class:`waflib.Runner.Parallel.ready` so that the :py:class:`waflib.Runner.Spawner` consumer thread
has them executed. Obtains the executed Tasks back from :py:class:`waflib.Runner.Parallel.out`
and marks the build as failed by setting the ``stop`` flag.
If only one job is used, then executes the tasks one by one, without consumers.
"""

self.total = self.bld.total()

while not self.stop:
@@ -338,36 +477,135 @@ class Parallel(object):
self.processed += 1
continue

if self.stop: # stop immediately after a failure was detected
if self.stop: # stop immediately after a failure is detected
break


st = self.task_status(tsk)
if st == Task.RUN_ME:
tsk.position = (self.processed, self.total)
self.count += 1
tsk.master = self
self.processed += 1

if self.numjobs == 1:
tsk.process()
else:
self.add_task(tsk)
if st == Task.ASK_LATER:
self._add_task(tsk)
elif st == Task.ASK_LATER:
self.postpone(tsk)
elif st == Task.SKIP_ME:
self.processed += 1
self.skip(tsk)
self.add_more_tasks(tsk)
elif st == Task.CANCEL_ME:
# A dependency problem has occurred, and the
# build is most likely run with `waf -k`
if Logs.verbose > 1:
self.error.append(tsk)
self.processed += 1
self.cancel(tsk)

# self.count represents the tasks that have been made available to the consumer threads
# collect all the tasks after an error else the message may be incomplete
while self.error and self.count:
self.get_out()

#print loop
assert (self.count == 0 or self.stop)
self.ready.put(None)
if not self.stop:
assert not self.count
assert not self.postponed
assert not self.incomplete

def prio_and_split(self, tasks):
"""
Label input tasks with priority values, and return a pair containing
the tasks that are ready to run and the tasks that are necessarily
waiting for other tasks to complete.

The priority system is really meant as an optional layer for optimization:
dependency cycles are found quickly, and builds should be more efficient.
A high priority number means that a task is processed first.

This method can be overridden to disable the priority system::

def prio_and_split(self, tasks):
return tasks, []

# free the task pool, if any
self.free_task_pool()
:return: A pair of task lists
:rtype: tuple
"""
# to disable:
#return tasks, []
for x in tasks:
x.visited = 0

reverse = self.revdeps

groups_done = set()
for x in tasks:
for k in x.run_after:
if isinstance(k, Task.TaskGroup):
if k not in groups_done:
groups_done.add(k)
for j in k.prev:
reverse[j].add(k)
else:
reverse[k].add(x)

# the priority number is not the tree depth
def visit(n):
if isinstance(n, Task.TaskGroup):
return sum(visit(k) for k in n.next)

if n.visited == 0:
n.visited = 1

if n in reverse:
rev = reverse[n]
n.prio_order = n.tree_weight + len(rev) + sum(visit(k) for k in rev)
else:
n.prio_order = n.tree_weight

n.visited = 2
elif n.visited == 1:
raise Errors.WafError('Dependency cycle found!')
return n.prio_order

for x in tasks:
if x.visited != 0:
# must visit all to detect cycles
continue
try:
visit(x)
except Errors.WafError:
self.debug_cycles(tasks, reverse)

ready = []
waiting = []
for x in tasks:
for k in x.run_after:
if not k.hasrun:
waiting.append(x)
break
else:
ready.append(x)
return (ready, waiting)

def debug_cycles(self, tasks, reverse):
tmp = {}
for x in tasks:
tmp[x] = 0

def visit(n, acc):
if isinstance(n, Task.TaskGroup):
for k in n.next:
visit(k, acc)
return
if tmp[n] == 0:
tmp[n] = 1
for k in reverse.get(n, []):
visit(k, [n] + acc)
tmp[n] = 2
elif tmp[n] == 1:
lst = []
for tsk in acc:
lst.append(repr(tsk))
if tsk is n:
# exclude prior nodes, we want the minimum cycle
break
raise Errors.WafError('Task dependency cycle in "run_after" constraints: %s' % ''.join(lst))
for x in tasks:
visit(x, [])


+ 204
- 216
waflib/Scripting.py View File

@@ -1,9 +1,11 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2010 (ita)
# Thomas Nagy, 2005-2018 (ita)

"Module called for configuring, compiling and installing targets"

from __future__ import with_statement

import os, shlex, shutil, traceback, errno, sys, stat
from waflib import Utils, Configure, Logs, Options, ConfigSet, Context, Errors, Build, Node

@@ -24,69 +26,66 @@ def waf_entry_point(current_directory, version, wafdir):
:param wafdir: absolute path representing the directory of the waf library
:type wafdir: string
"""

Logs.init_log()

if Context.WAFVERSION != version:
Logs.error('Waf script %r and library %r do not match (directory %r)' % (version, Context.WAFVERSION, wafdir))
Logs.error('Waf script %r and library %r do not match (directory %r)', version, Context.WAFVERSION, wafdir)
sys.exit(1)

if '--version' in sys.argv:
Context.run_dir = current_directory
ctx = Context.create_context('options')
ctx.curdir = current_directory
ctx.parse_args()
sys.exit(0)
# Store current directory before any chdir
Context.waf_dir = wafdir
Context.run_dir = Context.launch_dir = current_directory
start_dir = current_directory
no_climb = os.environ.get('NOCLIMB')

if len(sys.argv) > 1:
# os.path.join handles absolute paths in sys.argv[1] accordingly (it discards the previous ones)
# os.path.join handles absolute paths
# if sys.argv[1] is not an absolute path, then it is relative to the current working directory
potential_wscript = os.path.join(current_directory, sys.argv[1])
# maybe check if the file is executable
# perhaps extract 'wscript' as a constant
if os.path.basename(potential_wscript) == 'wscript' and os.path.isfile(potential_wscript):
if os.path.basename(potential_wscript) == Context.WSCRIPT_FILE and os.path.isfile(potential_wscript):
# need to explicitly normalize the path, as it may contain extra '/.'
# TODO abspath?
current_directory = os.path.normpath(os.path.dirname(potential_wscript))
path = os.path.normpath(os.path.dirname(potential_wscript))
start_dir = os.path.abspath(path)
no_climb = True
sys.argv.pop(1)

Context.waf_dir = wafdir
Context.launch_dir = current_directory
ctx = Context.create_context('options')
(options, commands, env) = ctx.parse_cmd_args(allow_unknown=True)
if options.top:
start_dir = Context.run_dir = Context.top_dir = options.top
no_climb = True
if options.out:
Context.out_dir = options.out

# if 'configure' is in the commands, do not search any further
no_climb = os.environ.get('NOCLIMB', None)
if not no_climb:
for k in no_climb_commands:
for y in sys.argv:
for y in commands:
if y.startswith(k):
no_climb = True
break

# if --top is provided assume the build started in the top directory
for i, x in enumerate(sys.argv):
# WARNING: this modifies sys.argv
if x.startswith('--top='):
Context.run_dir = Context.top_dir = Utils.sane_path(x[6:])
sys.argv[i] = '--top=' + Context.run_dir
if x.startswith('--out='):
Context.out_dir = Utils.sane_path(x[6:])
sys.argv[i] = '--out=' + Context.out_dir

# try to find a lock file (if the project was configured)
# at the same time, store the first wscript file seen
cur = current_directory
while cur and not Context.top_dir:
lst = os.listdir(cur)
cur = start_dir
while cur:
try:
lst = os.listdir(cur)
except OSError:
lst = []
Logs.error('Directory %r is unreadable!', cur)
if Options.lockfile in lst:
env = ConfigSet.ConfigSet()
try:
env.load(os.path.join(cur, Options.lockfile))
ino = os.stat(cur)[stat.ST_INO]
except Exception:
except EnvironmentError:
pass
else:
# check if the folder was not moved
for x in (env.run_dir, env.top_dir, env.out_dir):
if not x:
continue
if Utils.is_win32:
if cur == x:
load = True
@@ -102,7 +101,7 @@ def waf_entry_point(current_directory, version, wafdir):
load = True
break
else:
Logs.warn('invalid lock file in %s' % cur)
Logs.warn('invalid lock file in %s', cur)
load = False

if load:
@@ -123,56 +122,62 @@ def waf_entry_point(current_directory, version, wafdir):
if no_climb:
break

if not Context.run_dir:
if '-h' in sys.argv or '--help' in sys.argv:
Logs.warn('No wscript file found: the help message may be incomplete')
Context.run_dir = current_directory
ctx = Context.create_context('options')
ctx.curdir = current_directory
ctx.parse_args()
wscript = os.path.normpath(os.path.join(Context.run_dir, Context.WSCRIPT_FILE))
if not os.path.exists(wscript):
if options.whelp:
Logs.warn('These are the generic options (no wscript/project found)')
ctx.parser.print_help()
sys.exit(0)
Logs.error('Waf: Run from a directory containing a file named %r' % Context.WSCRIPT_FILE)
Logs.error('Waf: Run from a folder containing a %r file (or try -h for the generic options)', Context.WSCRIPT_FILE)
sys.exit(1)

try:
os.chdir(Context.run_dir)
except OSError:
Logs.error('Waf: The folder %r is unreadable' % Context.run_dir)
Logs.error('Waf: The folder %r is unreadable', Context.run_dir)
sys.exit(1)

try:
set_main_module(os.path.normpath(os.path.join(Context.run_dir, Context.WSCRIPT_FILE)))
set_main_module(wscript)
except Errors.WafError as e:
Logs.pprint('RED', e.verbose_msg)
Logs.error(str(e))
sys.exit(1)
except Exception as e:
Logs.error('Waf: The wscript in %r is unreadable' % Context.run_dir, e)
Logs.error('Waf: The wscript in %r is unreadable', Context.run_dir)
traceback.print_exc(file=sys.stdout)
sys.exit(2)

"""
import cProfile, pstats
cProfile.runctx("from waflib import Scripting; Scripting.run_commands()", {}, {}, 'profi.txt')
p = pstats.Stats('profi.txt')
p.sort_stats('time').print_stats(75) # or 'cumulative'
"""
try:
run_commands()
except Errors.WafError as e:
if Logs.verbose > 1:
Logs.pprint('RED', e.verbose_msg)
Logs.error(e.msg)
sys.exit(1)
except SystemExit:
raise
except Exception as e:
traceback.print_exc(file=sys.stdout)
sys.exit(2)
except KeyboardInterrupt:
Logs.pprint('RED', 'Interrupted')
sys.exit(68)
#"""
if options.profile:
import cProfile, pstats
cProfile.runctx('from waflib import Scripting; Scripting.run_commands()', {}, {}, 'profi.txt')
p = pstats.Stats('profi.txt')
p.sort_stats('time').print_stats(75) # or 'cumulative'
else:
try:
try:
run_commands()
except:
if options.pdb:
import pdb
type, value, tb = sys.exc_info()
traceback.print_exc()
pdb.post_mortem(tb)
else:
raise
except Errors.WafError as e:
if Logs.verbose > 1:
Logs.pprint('RED', e.verbose_msg)
Logs.error(e.msg)
sys.exit(1)
except SystemExit:
raise
except Exception as e:
traceback.print_exc(file=sys.stdout)
sys.exit(2)
except KeyboardInterrupt:
Logs.pprint('RED', 'Interrupted')
sys.exit(68)

def set_main_module(file_path):
"""
@@ -193,7 +198,7 @@ def set_main_module(file_path):
name = obj.__name__
if not name in Context.g_module.__dict__:
setattr(Context.g_module, name, obj)
for k in (update, dist, distclean, distcheck, update):
for k in (dist, distclean, distcheck):
set_def(k)
# add dummy init and shutdown functions if they're not defined
if not 'init' in Context.g_module.__dict__:
@@ -205,36 +210,20 @@ def set_main_module(file_path):

def parse_options():
"""
Parse the command-line options and initialize the logging system.
Parses the command-line options and initialize the logging system.
Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization.
"""
Context.create_context('options').execute()

for var in Options.envvars:
(name, value) = var.split('=', 1)
os.environ[name.strip()] = value

ctx = Context.create_context('options')
ctx.execute()
if not Options.commands:
Options.commands = [default_cmd]
Options.commands = [x for x in Options.commands if x != 'options'] # issue 1076

# process some internal Waf options
Logs.verbose = Options.options.verbose
#Logs.init_log()

if Options.options.zones:
Logs.zones = Options.options.zones.split(',')
if not Logs.verbose:
Logs.verbose = 1
elif Logs.verbose > 0:
Logs.zones = ['runner']

if Logs.verbose > 2:
Logs.zones = ['*']
Options.commands.append(default_cmd)
if Options.options.whelp:
ctx.parser.print_help()
sys.exit(0)

def run_command(cmd_name):
"""
Execute a single command. Called by :py:func:`waflib.Scripting.run_commands`.
Executes a single Waf command. Called by :py:func:`waflib.Scripting.run_commands`.

:param cmd_name: command to execute, like ``build``
:type cmd_name: string
@@ -252,7 +241,7 @@ def run_command(cmd_name):

def run_commands():
"""
Execute the commands that were given on the command-line, and the other options
Execute the Waf commands that were given on the command-line, and the other options
Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization, and executed
after :py:func:`waflib.Scripting.parse_options`.
"""
@@ -261,18 +250,11 @@ def run_commands():
while Options.commands:
cmd_name = Options.commands.pop(0)
ctx = run_command(cmd_name)
Logs.info('%r finished successfully (%s)' % (cmd_name, str(ctx.log_timer)))
Logs.info('%r finished successfully (%s)', cmd_name, ctx.log_timer)
run_command('shutdown')

###########################################################################################

def _can_distclean(name):
# WARNING: this method may disappear anytime
for k in '.o .moc .exe'.split():
if name.endswith(k):
return True
return False

def distclean_dir(dirname):
"""
Distclean function called in the particular case when::
@@ -284,12 +266,12 @@ def distclean_dir(dirname):
"""
for (root, dirs, files) in os.walk(dirname):
for f in files:
if _can_distclean(f):
if f.endswith(('.o', '.moc', '.exe')):
fname = os.path.join(root, f)
try:
os.remove(fname)
except OSError:
Logs.warn('Could not remove %r' % fname)
Logs.warn('Could not remove %r', fname)

for x in (Context.DBFILE, 'config.log'):
try:
@@ -303,40 +285,53 @@ def distclean_dir(dirname):
pass

def distclean(ctx):
'''removes the build directory'''
lst = os.listdir('.')
for f in lst:
if f == Options.lockfile:
try:
proj = ConfigSet.ConfigSet(f)
except IOError:
Logs.warn('Could not read %r' % f)
continue
'''removes build folders and data'''

if proj['out_dir'] != proj['top_dir']:
try:
shutil.rmtree(proj['out_dir'])
except IOError:
pass
except OSError as e:
if e.errno != errno.ENOENT:
Logs.warn('Could not remove %r' % proj['out_dir'])
else:
distclean_dir(proj['out_dir'])
def remove_and_log(k, fun):
try:
fun(k)
except EnvironmentError as e:
if e.errno != errno.ENOENT:
Logs.warn('Could not remove %r', k)

for k in (proj['out_dir'], proj['top_dir'], proj['run_dir']):
p = os.path.join(k, Options.lockfile)
try:
os.remove(p)
except OSError as e:
if e.errno != errno.ENOENT:
Logs.warn('Could not remove %r' % p)
# remove waf cache folders on the top-level
if not Options.commands:
for k in os.listdir('.'):
for x in '.waf-2 waf-2 .waf3-2 waf3-2'.split():
if k.startswith(x):
remove_and_log(k, shutil.rmtree)

# remove a build folder, if any
cur = '.'
if ctx.options.no_lock_in_top:
cur = ctx.options.out

try:
lst = os.listdir(cur)
except OSError:
Logs.warn('Could not read %r', cur)
return

if Options.lockfile in lst:
f = os.path.join(cur, Options.lockfile)
try:
env = ConfigSet.ConfigSet(f)
except EnvironmentError:
Logs.warn('Could not read %r', f)
return

if not env.out_dir or not env.top_dir:
Logs.warn('Invalid lock file %r', f)
return

if env.out_dir == env.top_dir:
distclean_dir(env.out_dir)
else:
remove_and_log(env.out_dir, shutil.rmtree)

# remove local waf cache folders
if not Options.commands:
for x in '.waf-1. waf-1. .waf3-1. waf3-1.'.split():
if f.startswith(x):
shutil.rmtree(f, ignore_errors=True)
for k in (env.out_dir, env.top_dir, env.run_dir):
p = os.path.join(k, Options.lockfile)
remove_and_log(p, os.remove)

class Dist(Context.Context):
'''creates an archive containing the project source code'''
@@ -354,7 +349,7 @@ class Dist(Context.Context):

def archive(self):
"""
Create the archive.
Creates the source archive.
"""
import tarfile

@@ -374,14 +369,14 @@ class Dist(Context.Context):
files = self.get_files()

if self.algo.startswith('tar.'):
tar = tarfile.open(arch_name, 'w:' + self.algo.replace('tar.', ''))
tar = tarfile.open(node.abspath(), 'w:' + self.algo.replace('tar.', ''))

for x in files:
self.add_tar_file(x, tar)
tar.close()
elif self.algo == 'zip':
import zipfile
zip = zipfile.ZipFile(arch_name, 'w', compression=zipfile.ZIP_DEFLATED)
zip = zipfile.ZipFile(node.abspath(), 'w', compression=zipfile.ZIP_DEFLATED)

for x in files:
archive_name = self.get_base_name() + '/' + x.path_from(self.base_path)
@@ -391,26 +386,30 @@ class Dist(Context.Context):
self.fatal('Valid algo types are tar.bz2, tar.gz, tar.xz or zip')

try:
from hashlib import sha1 as sha
from hashlib import sha256
except ImportError:
from sha import sha
try:
digest = " (sha=%r)" % sha(node.read()).hexdigest()
except Exception:
digest = ''
else:
digest = ' (sha256=%r)' % sha256(node.read(flags='rb')).hexdigest()

Logs.info('New archive created: %s%s' % (self.arch_name, digest))
Logs.info('New archive created: %s%s', self.arch_name, digest)

def get_tar_path(self, node):
"""
return the path to use for a node in the tar archive, the purpose of this
Return the path to use for a node in the tar archive, the purpose of this
is to let subclases resolve symbolic links or to change file names

:return: absolute path
:rtype: string
"""
return node.abspath()

def add_tar_file(self, x, tar):
"""
Add a file to the tar archive. Transform symlinks into files if the files lie out of the project tree.
Adds a file to the tar archive. Symlinks are not verified.

:param x: file path
:param tar: tar file object
"""
p = self.get_tar_path(x)
tinfo = tar.gettarinfo(name=p, arcname=self.get_tar_prefix() + '/' + x.path_from(self.base_path))
@@ -419,15 +418,18 @@ class Dist(Context.Context):
tinfo.uname = 'root'
tinfo.gname = 'root'

fu = None
try:
fu = open(p, 'rb')
tar.addfile(tinfo, fileobj=fu)
finally:
if fu:
fu.close()
if os.path.isfile(p):
with open(p, 'rb') as f:
tar.addfile(tinfo, fileobj=f)
else:
tar.addfile(tinfo)

def get_tar_prefix(self):
"""
Returns the base path for files added into the archive tar file

:rtype: string
"""
try:
return self.tar_prefix
except AttributeError:
@@ -435,7 +437,8 @@ class Dist(Context.Context):

def get_arch_name(self):
"""
Return the name of the archive to create. Change the default value by setting *arch_name*::
Returns the archive file name.
Set the attribute *arch_name* to change the default value::

def dist(ctx):
ctx.arch_name = 'ctx.tar.bz2'
@@ -450,7 +453,7 @@ class Dist(Context.Context):

def get_base_name(self):
"""
Return the default name of the main directory in the archive, which is set to *appname-version*.
Returns the default name of the main directory in the archive, which is set to *appname-version*.
Set the attribute *base_name* to change the default value::

def dist(ctx):
@@ -468,8 +471,8 @@ class Dist(Context.Context):

def get_excl(self):
"""
Return the patterns to exclude for finding the files in the top-level directory. Set the attribute *excl*
to change the default value::
Returns the patterns to exclude for finding the files in the top-level directory.
Set the attribute *excl* to change the default value::

def dist(ctx):
ctx.excl = 'build **/*.o **/*.class'
@@ -479,7 +482,7 @@ class Dist(Context.Context):
try:
return self.excl
except AttributeError:
self.excl = Node.exclude_regs + ' **/waf-1.8.* **/.waf-1.8* **/waf3-1.8.* **/.waf3-1.8* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*'
self.excl = Node.exclude_regs + ' **/waf-2.* **/.waf-2.* **/waf3-2.* **/.waf3-2.* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*'
if Context.out_dir:
nd = self.root.find_node(Context.out_dir)
if nd:
@@ -488,13 +491,13 @@ class Dist(Context.Context):

def get_files(self):
"""
The files to package are searched automatically by :py:func:`waflib.Node.Node.ant_glob`. Set
*files* to prevent this behaviour::
Files to package are searched automatically by :py:func:`waflib.Node.Node.ant_glob`.
Set *files* to prevent this behaviour::

def dist(ctx):
ctx.files = ctx.path.find_node('wscript')

The files are searched from the directory 'base_path', to change it, set::
Files are also searched from the directory 'base_path', to change it, set::

def dist(ctx):
ctx.base_path = path
@@ -507,18 +510,12 @@ class Dist(Context.Context):
files = self.base_path.ant_glob('**/*', excl=self.get_excl())
return files


def dist(ctx):
'''makes a tarball for redistributing the sources'''
pass

class DistCheck(Dist):
"""
Create an archive of the project, and try to build the project in a temporary directory::

$ waf distcheck
"""

"""creates an archive with dist, then tries to build it"""
fun = 'distcheck'
cmd = 'distcheck'

@@ -530,32 +527,30 @@ class DistCheck(Dist):
self.archive()
self.check()

def make_distcheck_cmd(self, tmpdir):
cfg = []
if Options.options.distcheck_args:
cfg = shlex.split(Options.options.distcheck_args)
else:
cfg = [x for x in sys.argv if x.startswith('-')]
cmd = [sys.executable, sys.argv[0], 'configure', 'build', 'install', 'uninstall', '--destdir=' + tmpdir] + cfg
return cmd

def check(self):
"""
Create the archive, uncompress it and try to build the project
Creates the archive, uncompresses it and tries to build the project
"""
import tempfile, tarfile

t = None
try:
t = tarfile.open(self.get_arch_name())
with tarfile.open(self.get_arch_name()) as t:
for x in t:
t.extract(x)
finally:
if t:
t.close()

cfg = []

if Options.options.distcheck_args:
cfg = shlex.split(Options.options.distcheck_args)
else:
cfg = [x for x in sys.argv if x.startswith('-')]

instdir = tempfile.mkdtemp('.inst', self.get_base_name())
ret = Utils.subprocess.Popen([sys.executable, sys.argv[0], 'configure', 'install', 'uninstall', '--destdir=' + instdir] + cfg, cwd=self.get_base_name()).wait()
cmd = self.make_distcheck_cmd(instdir)
ret = Utils.subprocess.Popen(cmd, cwd=self.get_base_name()).wait()
if ret:
raise Errors.WafError('distcheck failed with code %i' % ret)
raise Errors.WafError('distcheck failed with code %r' % ret)

if os.path.exists(instdir):
raise Errors.WafError('distcheck succeeded, but files were left in %s' % instdir)
@@ -567,33 +562,14 @@ def distcheck(ctx):
'''checks if the project compiles (tarball from 'dist')'''
pass

def update(ctx):
lst = Options.options.files
if lst:
lst = lst.split(',')
else:
path = os.path.join(Context.waf_dir, 'waflib', 'extras')
lst = [x for x in Utils.listdir(path) if x.endswith('.py')]
for x in lst:
tool = x.replace('.py', '')
if not tool:
continue
try:
dl = Configure.download_tool
except AttributeError:
ctx.fatal('The command "update" is dangerous; include the tool "use_config" in your project!')
try:
dl(tool, force=True, ctx=ctx)
except Errors.WafError:
Logs.error('Could not find the tool %r in the remote repository' % x)
else:
Logs.warn('Updated %r' % tool)

def autoconfigure(execute_method):
"""
Decorator used to set the commands that can be configured automatically
Decorator that enables context commands to run *configure* as needed.
"""
def execute(self):
"""
Wraps :py:func:`waflib.Context.Context.execute` on the context class
"""
if not Configure.autoconfig:
return execute_method(self)

@@ -601,7 +577,7 @@ def autoconfigure(execute_method):
do_config = False
try:
env.load(os.path.join(Context.top_dir, Options.lockfile))
except Exception:
except EnvironmentError:
Logs.warn('Configuring the project')
do_config = True
else:
@@ -609,18 +585,30 @@ def autoconfigure(execute_method):
do_config = True
else:
h = 0
for f in env['files']:
h = Utils.h_list((h, Utils.readf(f, 'rb')))
do_config = h != env.hash
for f in env.files:
try:
h = Utils.h_list((h, Utils.readf(f, 'rb')))
except EnvironmentError:
do_config = True
break
else:
do_config = h != env.hash

if do_config:
Options.commands.insert(0, self.cmd)
Options.commands.insert(0, 'configure')
cmd = env.config_cmd or 'configure'
if Configure.autoconfig == 'clobber':
Options.options.__dict__ = env.options
return

return execute_method(self)
tmp = Options.options.__dict__
if env.options:
Options.options.__dict__ = env.options
try:
run_command(cmd)
finally:
Options.options.__dict__ = tmp
else:
run_command(cmd)
run_command(self.cmd)
else:
return execute_method(self)
return execute
Build.BuildContext.execute = autoconfigure(Build.BuildContext.execute)


+ 657
- 478
waflib/Task.py
File diff suppressed because it is too large
View File


+ 205
- 147
waflib/TaskGen.py View File

@@ -1,18 +1,16 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2010 (ita)
# Thomas Nagy, 2005-2018 (ita)

"""
Task generators

The class :py:class:`waflib.TaskGen.task_gen` encapsulates the creation of task objects (low-level code)
The instances can have various parameters, but the creation of task nodes (Task.py)
is always postponed. To achieve this, various methods are called from the method "apply"


is deferred. To achieve this, various methods are called from the method "apply"
"""

import copy, re, os
import copy, re, os, functools
from waflib import Task, Utils, Logs, Errors, ConfigSet, Node

feats = Utils.defaultdict(set)
@@ -22,7 +20,7 @@ HEADER_EXTS = ['.h', '.hpp', '.hxx', '.hh']

class task_gen(object):
"""
Instances of this class create :py:class:`waflib.Task.TaskBase` when
Instances of this class create :py:class:`waflib.Task.Task` when
calling the method :py:meth:`waflib.TaskGen.task_gen.post` from the main thread.
A few notes:

@@ -34,42 +32,28 @@ class task_gen(object):
"""

mappings = Utils.ordered_iter_dict()
"""Mappings are global file extension mappings, they are retrieved in the order of definition"""
"""Mappings are global file extension mappings that are retrieved in the order of definition"""

prec = Utils.defaultdict(list)
"""Dict holding the precedence rules for task generator methods"""
prec = Utils.defaultdict(set)
"""Dict that holds the precedence execution rules for task generator methods"""

def __init__(self, *k, **kw):
"""
The task generator objects predefine various attributes (source, target) for possible
Task generator objects predefine various attributes (source, target) for possible
processing by process_rule (make-like rules) or process_source (extensions, misc methods)

The tasks are stored on the attribute 'tasks'. They are created by calling methods
listed in self.meths *or* referenced in the attribute features
A topological sort is performed to ease the method re-use.
Tasks are stored on the attribute 'tasks'. They are created by calling methods
listed in ``self.meths`` or referenced in the attribute ``features``
A topological sort is performed to execute the methods in correct order.

The extra key/value elements passed in kw are set as attributes
The extra key/value elements passed in ``kw`` are set as attributes
"""

# so we will have to play with directed acyclic graphs
# detect cycles, etc
self.source = ''
self.source = []
self.target = ''

self.meths = []
"""
List of method names to execute (it is usually a good idea to avoid touching this)
"""

self.prec = Utils.defaultdict(list)
"""
Precedence table for sorting the methods in self.meths
"""

self.mappings = {}
"""
List of mappings {extension -> function} for processing files by extension
This is very rarely used, so we do not use an ordered dict here
List of method names to execute (internal)
"""

self.features = []
@@ -79,7 +63,7 @@ class task_gen(object):

self.tasks = []
"""
List of tasks created.
Tasks created are added to this list
"""

if not 'bld' in kw:
@@ -92,31 +76,50 @@ class task_gen(object):
self.env = self.bld.env.derive()
self.path = self.bld.path # emulate chdir when reading scripts

# provide a unique id
# Provide a unique index per folder
# This is part of a measure to prevent output file name collisions
path = self.path.abspath()
try:
self.idx = self.bld.idx[id(self.path)] = self.bld.idx.get(id(self.path), 0) + 1
self.idx = self.bld.idx[path] = self.bld.idx.get(path, 0) + 1
except AttributeError:
self.bld.idx = {}
self.idx = self.bld.idx[id(self.path)] = 1
self.idx = self.bld.idx[path] = 1

# Record the global task generator count
try:
self.tg_idx_count = self.bld.tg_idx_count = self.bld.tg_idx_count + 1
except AttributeError:
self.tg_idx_count = self.bld.tg_idx_count = 1

for key, val in kw.items():
setattr(self, key, val)

def __str__(self):
"""for debugging purposes"""
"""Debugging helper"""
return "<task_gen %r declared in %s>" % (self.name, self.path.abspath())

def __repr__(self):
"""for debugging purposes"""
"""Debugging helper"""
lst = []
for x in self.__dict__.keys():
for x in self.__dict__:
if x not in ('env', 'bld', 'compiled_tasks', 'tasks'):
lst.append("%s=%s" % (x, repr(getattr(self, x))))
return "bld(%s) in %s" % (", ".join(lst), self.path.abspath())

def get_cwd(self):
"""
Current working directory for the task generator, defaults to the build directory.
This is still used in a few places but it should disappear at some point as the classes
define their own working directory.

:rtype: :py:class:`waflib.Node.Node`
"""
return self.bld.bldnode

def get_name(self):
"""
If not set, the name is computed from the target name::
If the attribute ``name`` is not set on the instance,
the name is computed from the target name::

def build(bld):
x = bld(name='foo')
@@ -143,18 +146,20 @@ class task_gen(object):

def to_list(self, val):
"""
Ensure that a parameter is a list
Ensures that a parameter is a list, see :py:func:`waflib.Utils.to_list`

:type val: string or list of string
:param val: input to return as a list
:rtype: list
"""
if isinstance(val, str): return val.split()
else: return val
if isinstance(val, str):
return val.split()
else:
return val

def post(self):
"""
Create task objects. The following operations are performed:
Creates tasks for this task generators. The following operations are performed:

#. The body of this method is called only once and sets the attribute ``posted``
#. The attribute ``features`` is used to add more methods in ``self.meths``
@@ -162,27 +167,25 @@ class task_gen(object):
#. The methods are then executed in order
#. The tasks created are added to :py:attr:`waflib.TaskGen.task_gen.tasks`
"""

# we could add a decorator to let the task run once, but then python 2.3 will be difficult to support
if getattr(self, 'posted', None):
#error("OBJECT ALREADY POSTED" + str( self))
return False
self.posted = True

keys = set(self.meths)
keys.update(feats['*'])

# add the methods listed in the features
self.features = Utils.to_list(self.features)
for x in self.features + ['*']:
for x in self.features:
st = feats[x]
if not st:
if not x in Task.classes:
Logs.warn('feature %r does not exist - bind at least one method to it' % x)
keys.update(list(st)) # ironpython 2.7 wants the cast to list
if st:
keys.update(st)
elif not x in Task.classes:
Logs.warn('feature %r does not exist - bind at least one method to it?', x)

# copy the precedence table
prec = {}
prec_tbl = self.prec or task_gen.prec
prec_tbl = self.prec
for x in prec_tbl:
if x in keys:
prec[x] = prec_tbl[x]
@@ -191,17 +194,19 @@ class task_gen(object):
tmp = []
for a in keys:
for x in prec.values():
if a in x: break
if a in x:
break
else:
tmp.append(a)

tmp.sort()
tmp.sort(reverse=True)

# topological sort
out = []
while tmp:
e = tmp.pop()
if e in keys: out.append(e)
if e in keys:
out.append(e)
try:
nlst = prec[e]
except KeyError:
@@ -214,46 +219,52 @@ class task_gen(object):
break
else:
tmp.append(x)
tmp.sort(reverse=True)

if prec:
raise Errors.WafError('Cycle detected in the method execution %r' % prec)
out.reverse()
buf = ['Cycle detected in the method execution:']
for k, v in prec.items():
buf.append('- %s after %s' % (k, [x for x in v if x in prec]))
raise Errors.WafError('\n'.join(buf))
self.meths = out

# then we run the methods in order
Logs.debug('task_gen: posting %s %d' % (self, id(self)))
Logs.debug('task_gen: posting %s %d', self, id(self))
for x in out:
try:
v = getattr(self, x)
except AttributeError:
raise Errors.WafError('%r is not a valid task generator method' % x)
Logs.debug('task_gen: -> %s (%d)' % (x, id(self)))
Logs.debug('task_gen: -> %s (%d)', x, id(self))
v()

Logs.debug('task_gen: posted %s' % self.name)
Logs.debug('task_gen: posted %s', self.name)
return True

def get_hook(self, node):
"""
Returns the ``@extension`` method to call for a Node of a particular extension.

:param node: Input file to process
:type node: :py:class:`waflib.Tools.Node.Node`
:return: A method able to process the input node by looking at the extension
:rtype: function
"""
name = node.name
if self.mappings:
for k in self.mappings:
for k in self.mappings:
try:
if name.endswith(k):
return self.mappings[k]
for k in task_gen.mappings:
if name.endswith(k):
return task_gen.mappings[k]
raise Errors.WafError("File %r has no mapping in %r (have you forgotten to load a waf tool?)" % (node, task_gen.mappings.keys()))
except TypeError:
# regexps objects
if k.match(name):
return self.mappings[k]
keys = list(self.mappings.keys())
raise Errors.WafError("File %r has no mapping in %r (load a waf tool?)" % (node, keys))

def create_task(self, name, src=None, tgt=None, **kw):
"""
Wrapper for creating task instances. The classes are retrieved from the
context class if possible, then from the global dict Task.classes.
Creates task instances.

:param name: task class name
:type name: string
@@ -262,7 +273,7 @@ class task_gen(object):
:param tgt: output nodes
:type tgt: list of :py:class:`waflib.Tools.Node.Node`
:return: A task object
:rtype: :py:class:`waflib.Task.TaskBase`
:rtype: :py:class:`waflib.Task.Task`
"""
task = Task.classes[name](env=self.env.derive(), generator=self)
if src:
@@ -275,7 +286,7 @@ class task_gen(object):

def clone(self, env):
"""
Make a copy of a task generator. Once the copy is made, it is necessary to ensure that the
Makes a copy of a task generator. Once the copy is made, it is necessary to ensure that the
it does not create the same output files as the original, or the same files may
be compiled several times.

@@ -304,7 +315,7 @@ class task_gen(object):
def declare_chain(name='', rule=None, reentrant=None, color='BLUE',
ext_in=[], ext_out=[], before=[], after=[], decider=None, scan=None, install_path=None, shell=False):
"""
Create a new mapping and a task class for processing files by extension.
Creates a new mapping and a task class for processing files by extension.
See Tools/flex.py for an example.

:param name: name for the task class
@@ -323,7 +334,7 @@ def declare_chain(name='', rule=None, reentrant=None, color='BLUE',
:type before: list of string
:param after: execute instances of this task after classes of the given names
:type after: list of string
:param decider: if present, use it to create the output nodes for the task
:param decider: if present, function that returns a list of output file extensions (overrides ext_out for output files, but not for the build order)
:type decider: function
:param scan: scanner function for the task
:type scan: function
@@ -337,14 +348,13 @@ def declare_chain(name='', rule=None, reentrant=None, color='BLUE',
cls = Task.task_factory(name, rule, color=color, ext_in=ext_in, ext_out=ext_out, before=before, after=after, scan=scan, shell=shell)

def x_file(self, node):
ext = decider and decider(self, node) or cls.ext_out
if ext_in:
_ext_in = ext_in[0]

tsk = self.create_task(name, node)
cnt = 0

keys = set(self.mappings.keys()) | set(self.__class__.mappings.keys())
ext = decider(self, node) if decider else cls.ext_out
for x in ext:
k = node.change_ext(x, ext_in=_ext_in)
tsk.outputs.append(k)
@@ -354,14 +364,14 @@ def declare_chain(name='', rule=None, reentrant=None, color='BLUE',
self.source.append(k)
else:
# reinject downstream files into the build
for y in keys: # ~ nfile * nextensions :-/
for y in self.mappings: # ~ nfile * nextensions :-/
if k.name.endswith(y):
self.source.append(k)
break
cnt += 1

if install_path:
self.bld.install_files(install_path, tsk.outputs)
self.install_task = self.add_install_files(install_to=install_path, install_from=tsk.outputs)
return tsk

for x in cls.ext_in:
@@ -370,7 +380,7 @@ def declare_chain(name='', rule=None, reentrant=None, color='BLUE',

def taskgen_method(func):
"""
Decorator: register a method as a task generator method.
Decorator that registers method as a task generator method.
The function must accept a task generator as first parameter::

from waflib.TaskGen import taskgen_method
@@ -387,8 +397,8 @@ def taskgen_method(func):

def feature(*k):
"""
Decorator: register a task generator method that will be executed when the
object attribute 'feature' contains the corresponding key(s)::
Decorator that registers a task generator method that will be executed when the
object attribute ``feature`` contains the corresponding key(s)::

from waflib.Task import feature
@feature('myfeature')
@@ -409,7 +419,7 @@ def feature(*k):

def before_method(*k):
"""
Decorator: register a task generator method which will be executed
Decorator that registera task generator method which will be executed
before the functions of given name(s)::

from waflib.TaskGen import feature, before
@@ -429,16 +439,14 @@ def before_method(*k):
def deco(func):
setattr(task_gen, func.__name__, func)
for fun_name in k:
if not func.__name__ in task_gen.prec[fun_name]:
task_gen.prec[fun_name].append(func.__name__)
#task_gen.prec[fun_name].sort()
task_gen.prec[func.__name__].add(fun_name)
return func
return deco
before = before_method

def after_method(*k):
"""
Decorator: register a task generator method which will be executed
Decorator that registers a task generator method which will be executed
after the functions of given name(s)::

from waflib.TaskGen import feature, after
@@ -458,16 +466,14 @@ def after_method(*k):
def deco(func):
setattr(task_gen, func.__name__, func)
for fun_name in k:
if not fun_name in task_gen.prec[func.__name__]:
task_gen.prec[func.__name__].append(fun_name)
#task_gen.prec[func.__name__].sort()
task_gen.prec[fun_name].add(func.__name__)
return func
return deco
after = after_method

def extension(*k):
"""
Decorator: register a task generator method which will be invoked during
Decorator that registers a task generator method which will be invoked during
the processing of source files for the extension given::

from waflib import Task
@@ -486,14 +492,11 @@ def extension(*k):
return func
return deco

# ---------------------------------------------------------------
# The following methods are task generator methods commonly used
# they are almost examples, the rest of waf core does not depend on them

@taskgen_method
def to_nodes(self, lst, path=None):
"""
Convert the input list into a list of nodes.
Flatten the input list of string/nodes/lists into a list of nodes.

It is used by :py:func:`waflib.TaskGen.process_source` and :py:func:`waflib.TaskGen.process_rule`.
It is designed for source files, for folders, see :py:func:`waflib.Tools.ccroot.to_incnodes`:

@@ -510,21 +513,23 @@ def to_nodes(self, lst, path=None):
if isinstance(lst, Node.Node):
lst = [lst]

# either a list or a string, convert to a list of nodes
for x in Utils.to_list(lst):
if isinstance(x, str):
node = find(x)
else:
elif hasattr(x, 'name'):
node = x
else:
tmp.extend(self.to_nodes(x))
continue
if not node:
raise Errors.WafError("source not found: %r in %r" % (x, self))
raise Errors.WafError('source not found: %r in %r' % (x, self))
tmp.append(node)
return tmp

@feature('*')
def process_source(self):
"""
Process each element in the attribute ``source`` by extension.
Processes each element in the attribute ``source`` by extension.

#. The *source* list is converted through :py:meth:`waflib.TaskGen.to_nodes` to a list of :py:class:`waflib.Node.Node` first.
#. File extensions are mapped to methods having the signature: ``def meth(self, node)`` by :py:meth:`waflib.TaskGen.extension`
@@ -540,10 +545,29 @@ def process_source(self):
@before_method('process_source')
def process_rule(self):
"""
Process the attribute ``rule``. When present, :py:meth:`waflib.TaskGen.process_source` is disabled::
Processes the attribute ``rule``. When present, :py:meth:`waflib.TaskGen.process_source` is disabled::

def build(bld):
bld(rule='cp ${SRC} ${TGT}', source='wscript', target='bar.txt')

Main attributes processed:

* rule: command to execute, it can be a tuple of strings for multiple commands
* chmod: permissions for the resulting files (integer value such as Utils.O755)
* shell: set to False to execute the command directly (default is True to use a shell)
* scan: scanner function
* vars: list of variables to trigger rebuilds, such as CFLAGS
* cls_str: string to display when executing the task
* cls_keyword: label to display when executing the task
* cache_rule: by default, try to re-use similar classes, set to False to disable
* source: list of Node or string objects representing the source files required by this task
* target: list of Node or string objects representing the files that this task creates
* cwd: current working directory (Node or string)
* stdout: standard output, set to None to prevent waf from capturing the text
* stderr: standard error, set to None to prevent waf from capturing the text
* timeout: timeout for command execution (Python 3)
* always: whether to always run the command (False by default)
* deep_inputs: whether the task must depend on the input file tasks too (False by default)
"""
if not getattr(self, 'rule', None):
return
@@ -557,28 +581,55 @@ def process_rule(self):
except AttributeError:
cache = self.bld.cache_rule_attr = {}

chmod = getattr(self, 'chmod', None)
shell = getattr(self, 'shell', True)
color = getattr(self, 'color', 'BLUE')
scan = getattr(self, 'scan', None)
_vars = getattr(self, 'vars', [])
cls_str = getattr(self, 'cls_str', None)
cls_keyword = getattr(self, 'cls_keyword', None)
use_cache = getattr(self, 'cache_rule', 'True')
deep_inputs = getattr(self, 'deep_inputs', False)

scan_val = has_deps = hasattr(self, 'deps')
if scan:
scan_val = id(scan)

key = Utils.h_list((name, self.rule, chmod, shell, color, cls_str, cls_keyword, scan_val, _vars, deep_inputs))

cls = None
if getattr(self, 'cache_rule', 'True'):
if use_cache:
try:
cls = cache[(name, self.rule)]
cls = cache[key]
except KeyError:
pass
if not cls:

rule = self.rule
if hasattr(self, 'chmod'):
if chmod is not None:
def chmod_fun(tsk):
for x in tsk.outputs:
os.chmod(x.abspath(), self.chmod)
rule = (self.rule, chmod_fun)

cls = Task.task_factory(name, rule,
getattr(self, 'vars', []),
shell=getattr(self, 'shell', True), color=getattr(self, 'color', 'BLUE'),
scan = getattr(self, 'scan', None))
if getattr(self, 'scan', None):
os.chmod(x.abspath(), tsk.generator.chmod)
if isinstance(rule, tuple):
rule = list(rule)
rule.append(chmod_fun)
rule = tuple(rule)
else:
rule = (rule, chmod_fun)

cls = Task.task_factory(name, rule, _vars, shell=shell, color=color)

if cls_str:
setattr(cls, '__str__', self.cls_str)

if cls_keyword:
setattr(cls, 'keyword', self.cls_keyword)

if deep_inputs:
Task.deep_inputs(cls)

if scan:
cls.scan = self.scan
elif getattr(self, 'deps', None):
elif has_deps:
def scan(self):
nodes = []
for x in self.generator.to_list(getattr(self.generator, 'deps', None)):
@@ -589,26 +640,26 @@ def process_rule(self):
return [nodes, []]
cls.scan = scan

if getattr(self, 'update_outputs', None):
Task.update_outputs(cls)
if use_cache:
cache[key] = cls

if getattr(self, 'always', None):
Task.always_run(cls)
# now create one instance
tsk = self.create_task(name)

for x in ('after', 'before', 'ext_in', 'ext_out'):
setattr(cls, x, getattr(self, x, []))
for x in ('after', 'before', 'ext_in', 'ext_out'):
setattr(tsk, x, getattr(self, x, []))

if getattr(self, 'cache_rule', 'True'):
cache[(name, self.rule)] = cls
if hasattr(self, 'stdout'):
tsk.stdout = self.stdout

if getattr(self, 'cls_str', None):
setattr(cls, '__str__', self.cls_str)
if hasattr(self, 'stderr'):
tsk.stderr = self.stderr

if getattr(self, 'cls_keyword', None):
setattr(cls, 'keyword', self.cls_keyword)
if getattr(self, 'timeout', None):
tsk.timeout = self.timeout

# now create one instance
tsk = self.create_task(name)
if getattr(self, 'always', None):
tsk.always_run = True

if getattr(self, 'target', None):
if isinstance(self.target, str):
@@ -622,7 +673,8 @@ def process_rule(self):
x.parent.mkdir() # if a node was given, create the required folders
tsk.outputs.append(x)
if getattr(self, 'install_path', None):
self.bld.install_files(self.install_path, tsk.outputs, chmod=getattr(self, 'chmod', Utils.O644))
self.install_task = self.add_install_files(install_to=self.install_path,
install_from=tsk.outputs, chmod=getattr(self, 'chmod', Utils.O644))

if getattr(self, 'source', None):
tsk.inputs = self.to_nodes(self.source)
@@ -632,10 +684,16 @@ def process_rule(self):
if getattr(self, 'cwd', None):
tsk.cwd = self.cwd

if isinstance(tsk.run, functools.partial):
# Python documentation says: "partial objects defined in classes
# behave like static methods and do not transform into bound
# methods during instance attribute look-up."
tsk.run = functools.partial(tsk.run, tsk)

@feature('seq')
def sequence_order(self):
"""
Add a strict sequential constraint between the tasks generated by task generators.
Adds a strict sequential constraint between the tasks generated by task generators.
It works because task generators are posted in order.
It will not post objects which belong to other folders.

@@ -673,7 +731,7 @@ re_m4 = re.compile('@(\w+)@', re.M)

class subst_pc(Task.Task):
"""
Create *.pc* files from *.pc.in*. The task is executed whenever an input variable used
Creates *.pc* files from *.pc.in*. The task is executed whenever an input variable used
in the substitution changes.
"""

@@ -689,6 +747,8 @@ class subst_pc(Task.Task):
if getattr(self.generator, 'is_copy', None):
for i, x in enumerate(self.outputs):
x.write(self.inputs[i].read('rb'), 'wb')
stat = os.stat(self.inputs[i].abspath()) # Preserve mtime of the copy
os.utime(self.outputs[i].abspath(), (stat.st_atime, stat.st_mtime))
self.force_permissions()
return None

@@ -698,11 +758,11 @@ class subst_pc(Task.Task):
self.force_permissions()
return ret

code = self.inputs[0].read(encoding=getattr(self.generator, 'encoding', 'ISO8859-1'))
code = self.inputs[0].read(encoding=getattr(self.generator, 'encoding', 'latin-1'))
if getattr(self.generator, 'subst_fun', None):
code = self.generator.subst_fun(self, code)
if code is not None:
self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'ISO8859-1'))
self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'latin-1'))
self.force_permissions()
return None

@@ -717,7 +777,6 @@ class subst_pc(Task.Task):
lst.append(g(1))
return "%%(%s)s" % g(1)
return ''
global re_m4
code = getattr(self.generator, 're_m4', re_m4).sub(repl, code)

try:
@@ -733,12 +792,14 @@ class subst_pc(Task.Task):
d[x] = tmp

code = code % d
self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'ISO8859-1'))
self.generator.bld.raw_deps[self.uid()] = self.dep_vars = lst
self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'latin-1'))
self.generator.bld.raw_deps[self.uid()] = lst

# make sure the signature is updated
try: delattr(self, 'cache_sig')
except AttributeError: pass
try:
delattr(self, 'cache_sig')
except AttributeError:
pass

self.force_permissions()

@@ -770,13 +831,14 @@ class subst_pc(Task.Task):
@extension('.pc.in')
def add_pcfile(self, node):
"""
Process *.pc.in* files to *.pc*. Install the results to ``${PREFIX}/lib/pkgconfig/``
Processes *.pc.in* files to *.pc*. Installs the results to ``${PREFIX}/lib/pkgconfig/`` by default

def build(bld):
bld(source='foo.pc.in', install_path='${LIBDIR}/pkgconfig/')
"""
tsk = self.create_task('subst_pc', node, node.change_ext('.pc', '.pc.in'))
self.bld.install_files(getattr(self, 'install_path', '${LIBDIR}/pkgconfig/'), tsk.outputs)
self.install_task = self.add_install_files(
install_to=getattr(self, 'install_path', '${LIBDIR}/pkgconfig/'), install_from=tsk.outputs)

class subst(subst_pc):
pass
@@ -785,7 +847,7 @@ class subst(subst_pc):
@before_method('process_source', 'process_rule')
def process_subst(self):
"""
Define a transformation that substitutes the contents of *source* files to *target* files::
Defines a transformation that substitutes the contents of *source* files to *target* files::

def build(bld):
bld(
@@ -820,7 +882,6 @@ def process_subst(self):
a = self.path.find_node(x)
b = self.path.get_bld().make_node(y)
if not os.path.isfile(b.abspath()):
b.sig = None
b.parent.mkdir()
else:
if isinstance(x, str):
@@ -835,25 +896,22 @@ def process_subst(self):
if not a:
raise Errors.WafError('could not find %r for %r' % (x, self))

has_constraints = False
tsk = self.create_task('subst', a, b)
for k in ('after', 'before', 'ext_in', 'ext_out'):
val = getattr(self, k, None)
if val:
has_constraints = True
setattr(tsk, k, val)

# paranoid safety measure for the general case foo.in->foo.h with ambiguous dependencies
if not has_constraints:
global HEADER_EXTS
for xt in HEADER_EXTS:
if b.name.endswith(xt):
tsk.before = [k for k in ('c', 'cxx') if k in Task.classes]
break
for xt in HEADER_EXTS:
if b.name.endswith(xt):
tsk.ext_in = tsk.ext_in + ['.h']
break

inst_to = getattr(self, 'install_path', None)
if inst_to:
self.bld.install_files(inst_to, b, chmod=getattr(self, 'chmod', Utils.O644))
self.install_task = self.add_install_files(install_to=inst_to,
install_from=b, chmod=getattr(self, 'chmod', Utils.O644))

self.source = []


+ 1
- 1
waflib/Tools/__init__.py View File

@@ -1,3 +1,3 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2010 (ita)
# Thomas Nagy, 2005-2018 (ita)

+ 2
- 2
waflib/Tools/ar.py View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)
# Thomas Nagy, 2006-2018 (ita)
# Ralf Habacker, 2006 (rh)

"""
@@ -16,7 +16,7 @@ def find_ar(conf):
conf.load('ar')

def configure(conf):
"""Find the ar program and set the default flags in ``conf.env.ARFLAGS``"""
"""Finds the ar program and sets the default flags in ``conf.env.ARFLAGS``"""
conf.find_program('ar', var='AR')
conf.add_os_flags('ARFLAGS')
if not conf.env.ARFLAGS:


+ 73
- 0
waflib/Tools/asm.py View File

@@ -0,0 +1,73 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2008-2018 (ita)

"""
Assembly support, used by tools such as gas and nasm

To declare targets using assembly::

def configure(conf):
conf.load('gcc gas')

def build(bld):
bld(
features='c cstlib asm',
source = 'test.S',
target = 'asmtest')

bld(
features='asm asmprogram',
source = 'test.S',
target = 'asmtest')

Support for pure asm programs and libraries should also work::

def configure(conf):
conf.load('nasm')
conf.find_program('ld', 'ASLINK')

def build(bld):
bld(
features='asm asmprogram',
source = 'test.S',
target = 'asmtest')
"""

from waflib import Task
from waflib.Tools.ccroot import link_task, stlink_task
from waflib.TaskGen import extension

class asm(Task.Task):
"""
Compiles asm files by gas/nasm/yasm/...
"""
color = 'BLUE'
run_str = '${AS} ${ASFLAGS} ${ASMPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${AS_SRC_F}${SRC} ${AS_TGT_F}${TGT}'

@extension('.s', '.S', '.asm', '.ASM', '.spp', '.SPP')
def asm_hook(self, node):
"""
Binds the asm extension to the asm task

:param node: input file
:type node: :py:class:`waflib.Node.Node`
"""
return self.create_compiled_task('asm', node)

class asmprogram(link_task):
"Links object files into a c program"
run_str = '${ASLINK} ${ASLINKFLAGS} ${ASLNK_TGT_F}${TGT} ${ASLNK_SRC_F}${SRC}'
ext_out = ['.bin']
inst_to = '${BINDIR}'

class asmshlib(asmprogram):
"Links object files into a c shared library"
inst_to = '${LIBDIR}'

class asmstlib(stlink_task):
"Links object files into a c static library"
pass # do not remove

def configure(conf):
conf.env.ASMPATH_ST = '-I%s'

+ 49
- 0
waflib/Tools/bison.py View File

@@ -0,0 +1,49 @@
#!/usr/bin/env python
# encoding: utf-8
# John O'Meara, 2006
# Thomas Nagy 2009-2018 (ita)

"""
The **bison** program is a code generator which creates C or C++ files.
The generated files are compiled into object files.
"""

from waflib import Task
from waflib.TaskGen import extension

class bison(Task.Task):
"""Compiles bison files"""
color = 'BLUE'
run_str = '${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}'
ext_out = ['.h'] # just to make sure

@extension('.y', '.yc', '.yy')
def big_bison(self, node):
"""
Creates a bison task, which must be executed from the directory of the output file.
"""
has_h = '-d' in self.env.BISONFLAGS

outs = []
if node.name.endswith('.yc'):
outs.append(node.change_ext('.tab.cc'))
if has_h:
outs.append(node.change_ext('.tab.hh'))
else:
outs.append(node.change_ext('.tab.c'))
if has_h:
outs.append(node.change_ext('.tab.h'))

tsk = self.create_task('bison', node, outs)
tsk.cwd = node.parent.get_bld()

# and the c/cxx file must be compiled too
self.source.append(outs[0])

def configure(conf):
"""
Detects the *bison* program
"""
conf.find_program('bison', var='BISON')
conf.env.BISONFLAGS = ['-d']


+ 7
- 7
waflib/Tools/c.py View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)
# Thomas Nagy, 2006-2018 (ita)

"Base for c programs/libraries"

@@ -10,30 +10,30 @@ from waflib.Tools.ccroot import link_task, stlink_task

@TaskGen.extension('.c')
def c_hook(self, node):
"Bind the c file extension to the creation of a :py:class:`waflib.Tools.c.c` instance"
"Binds the c file extensions create :py:class:`waflib.Tools.c.c` instances"
if not self.env.CC and self.env.CXX:
return self.create_compiled_task('cxx', node)
return self.create_compiled_task('c', node)

class c(Task.Task):
"Compile C files into object files"
run_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT[0].abspath()}'
"Compiles C files into object files"
run_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}'
vars = ['CCDEPS'] # unused variable to depend on, just in case
ext_in = ['.h'] # set the build order easily by using ext_out=['.h']
scan = c_preproc.scan

class cprogram(link_task):
"Link object files into a c program"
"Links object files into c programs"
run_str = '${LINK_CC} ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}'
ext_out = ['.bin']
vars = ['LINKDEPS']
inst_to = '${BINDIR}'

class cshlib(cprogram):
"Link object files into a c shared library"
"Links object files into c shared libraries"
inst_to = '${LIBDIR}'

class cstlib(stlink_task):
"Link object files into a c static library"
"Links object files into a c static libraries"
pass # do not remove


+ 21
- 12
waflib/Tools/c_aliases.py View File

@@ -9,6 +9,8 @@ from waflib.Configure import conf

def get_extensions(lst):
"""
Returns the file extensions for the list of files given as input

:param lst: files to process
:list lst: list of string or :py:class:`waflib.Node.Node`
:return: list of file extensions
@@ -16,17 +18,15 @@ def get_extensions(lst):
"""
ret = []
for x in Utils.to_list(lst):
try:
if not isinstance(x, str):
x = x.name
ret.append(x[x.rfind('.') + 1:])
except Exception:
pass
if not isinstance(x, str):
x = x.name
ret.append(x[x.rfind('.') + 1:])
return ret

def sniff_features(**kw):
"""
Look at the source files and return the features for a task generator (mainly cc and cxx)::
Computes and returns the features required for a task generator by
looking at the file extensions. This aimed for C/C++ mainly::

snif_features(source=['foo.c', 'foo.cxx'], type='shlib')
# returns ['cxx', 'c', 'cxxshlib', 'cshlib']
@@ -39,7 +39,7 @@ def sniff_features(**kw):
:rtype: list of string
"""
exts = get_extensions(kw['source'])
type = kw['_type']
typ = kw['typ']
feats = []

# watch the order, cxx will have the precedence
@@ -63,18 +63,27 @@ def sniff_features(**kw):
feats.append('java')
return 'java'

if type in ('program', 'shlib', 'stlib'):
if typ in ('program', 'shlib', 'stlib'):
will_link = False
for x in feats:
if x in ('cxx', 'd', 'fc', 'c'):
feats.append(x + type)
feats.append(x + typ)
will_link = True
if not will_link and not kw.get('features', []):
raise Errors.WafError('Cannot link from %r, try passing eg: features="c cprogram"?' % kw)
return feats

def set_features(kw, _type):
kw['_type'] = _type
def set_features(kw, typ):
"""
Inserts data in the input dict *kw* based on existing data and on the type of target
required (typ).

:param kw: task generator parameters
:type kw: dict
:param typ: type of target
:type typ: string
"""
kw['typ'] = typ
kw['features'] = Utils.to_list(kw.get('features', [])) + Utils.to_list(sniff_features(**kw))

@conf


+ 401
- 287
waflib/Tools/c_config.py
File diff suppressed because it is too large
View File


+ 18
- 36
waflib/Tools/c_osx.py View File

@@ -1,13 +1,13 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy 2008-2010
# Thomas Nagy 2008-2018 (ita)

"""
MacOSX related tools
"""

import os, shutil, platform
from waflib import Task, Utils, Errors
from waflib import Task, Utils
from waflib.TaskGen import taskgen_method, feature, after_method, before_method

app_info = '''
@@ -37,8 +37,8 @@ def set_macosx_deployment_target(self):
"""
see WAF issue 285 and also and also http://trac.macports.org/ticket/17059
"""
if self.env['MACOSX_DEPLOYMENT_TARGET']:
os.environ['MACOSX_DEPLOYMENT_TARGET'] = self.env['MACOSX_DEPLOYMENT_TARGET']
if self.env.MACOSX_DEPLOYMENT_TARGET:
os.environ['MACOSX_DEPLOYMENT_TARGET'] = self.env.MACOSX_DEPLOYMENT_TARGET
elif 'MACOSX_DEPLOYMENT_TARGET' not in os.environ:
if Utils.unversioned_sys_platform() == 'darwin':
os.environ['MACOSX_DEPLOYMENT_TARGET'] = '.'.join(platform.mac_ver()[0].split('.')[:2])
@@ -46,7 +46,7 @@ def set_macosx_deployment_target(self):
@taskgen_method
def create_bundle_dirs(self, name, out):
"""
Create bundle folders, used by :py:func:`create_task_macplist` and :py:func:`create_task_macapp`
Creates bundle folders, used by :py:func:`create_task_macplist` and :py:func:`create_task_macapp`
"""
dir = out.parent.find_or_declare(name)
dir.mkdir()
@@ -78,7 +78,7 @@ def create_task_macapp(self):
bld.env.MACAPP = True
bld.shlib(source='a.c', target='foo')
"""
if self.env['MACAPP'] or getattr(self, 'mac_app', False):
if self.env.MACAPP or getattr(self, 'mac_app', False):
out = self.link_task.outputs[0]

name = bundle_name_for_output(out)
@@ -88,7 +88,7 @@ def create_task_macapp(self):

self.apptask = self.create_task('macapp', self.link_task.outputs, n1)
inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/MacOS/' % name
self.bld.install_files(inst_to, n1, chmod=Utils.O755)
self.add_install_files(install_to=inst_to, install_from=n1, chmod=Utils.O755)

if getattr(self, 'mac_files', None):
# this only accepts files; they will be installed as seen from mac_files_root
@@ -102,38 +102,19 @@ def create_task_macapp(self):
for node in self.to_nodes(self.mac_files):
relpath = node.path_from(mac_files_root or node.parent)
self.create_task('macapp', node, res_dir.make_node(relpath))
self.bld.install_as(os.path.join(inst_to, relpath), node)

if getattr(self, 'mac_resources', None):
# TODO remove in waf 1.9
res_dir = n1.parent.parent.make_node('Resources')
inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Resources' % name
for x in self.to_list(self.mac_resources):
node = self.path.find_node(x)
if not node:
raise Errors.WafError('Missing mac_resource %r in %r' % (x, self))

parent = node.parent
if os.path.isdir(node.abspath()):
nodes = node.ant_glob('**')
else:
nodes = [node]
for node in nodes:
rel = node.path_from(parent)
self.create_task('macapp', node, res_dir.make_node(rel))
self.bld.install_as(inst_to + '/%s' % rel, node)
self.add_install_as(install_to=os.path.join(inst_to, relpath), install_from=node)

if getattr(self.bld, 'is_install', None):
# disable the normal binary installation
# disable regular binary installation
self.install_task.hasrun = Task.SKIP_ME

@feature('cprogram', 'cxxprogram')
@after_method('apply_link')
def create_task_macplist(self):
"""
Create a :py:class:`waflib.Tools.c_osx.macplist` instance.
Creates a :py:class:`waflib.Tools.c_osx.macplist` instance.
"""
if self.env['MACAPP'] or getattr(self, 'mac_app', False):
if self.env.MACAPP or getattr(self, 'mac_app', False):
out = self.link_task.outputs[0]

name = bundle_name_for_output(out)
@@ -160,7 +141,7 @@ def create_task_macplist(self):
plisttask.code = app_info

inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/' % name
self.bld.install_files(inst_to, n1)
self.add_install_files(install_to=inst_to, install_from=n1)

@feature('cshlib', 'cxxshlib')
@before_method('apply_link', 'propagate_uselib_vars')
@@ -177,9 +158,9 @@ def apply_bundle(self):
bld.env.MACBUNDLE = True
bld.shlib(source='a.c', target='foo')
"""
if self.env['MACBUNDLE'] or getattr(self, 'mac_bundle', False):
self.env['LINKFLAGS_cshlib'] = self.env['LINKFLAGS_cxxshlib'] = [] # disable the '-dynamiclib' flag
self.env['cshlib_PATTERN'] = self.env['cxxshlib_PATTERN'] = self.env['macbundle_PATTERN']
if self.env.MACBUNDLE or getattr(self, 'mac_bundle', False):
self.env.LINKFLAGS_cshlib = self.env.LINKFLAGS_cxxshlib = [] # disable the '-dynamiclib' flag
self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.macbundle_PATTERN
use = self.use = self.to_list(getattr(self, 'use', []))
if not 'MACBUNDLE' in use:
use.append('MACBUNDLE')
@@ -188,7 +169,7 @@ app_dirs = ['Contents', 'Contents/MacOS', 'Contents/Resources']

class macapp(Task.Task):
"""
Create mac applications
Creates mac applications
"""
color = 'PINK'
def run(self):
@@ -197,7 +178,7 @@ class macapp(Task.Task):

class macplist(Task.Task):
"""
Create plist files
Creates plist files
"""
color = 'PINK'
ext_in = ['.bin']
@@ -209,3 +190,4 @@ class macplist(Task.Task):
context = getattr(self, 'context', {})
txt = txt.format(**context)
self.outputs[0].write(txt)


+ 260
- 210
waflib/Tools/c_preproc.py View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)
# Thomas Nagy, 2006-2018 (ita)

"""
C/C++ preprocessor for finding dependencies
@@ -28,11 +28,13 @@ A dumb preprocessor is also available in the tool *c_dumbpreproc*

import re, string, traceback
from waflib import Logs, Utils, Errors
from waflib.Logs import debug, error

class PreprocError(Errors.WafError):
pass

FILE_CACHE_SIZE = 100000
LINE_CACHE_SIZE = 100000

POPFILE = '-'
"Constant representing a special token used in :py:meth:`waflib.Tools.c_preproc.c_parser.start` iteration to switch to a header read previously"

@@ -42,15 +44,15 @@ recursion_limit = 150
go_absolute = False
"Set to True to track headers on files in /usr/include, else absolute paths are ignored (but it becomes very slow)"

standard_includes = ['/usr/include']
standard_includes = ['/usr/local/include', '/usr/include']
if Utils.is_win32:
standard_includes = []

use_trigraphs = 0
"""Apply trigraph rules (False by default)"""

# obsolete, do not use
strict_quotes = 0
"""Reserve the "#include <>" quotes for system includes (do not search for those includes). False by default."""

g_optrans = {
'not':'!',
@@ -69,7 +71,7 @@ g_optrans = {

# ignore #warning and #error
re_lines = re.compile(
'^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',
'^[ \t]*(?:#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',
re.IGNORECASE | re.MULTILINE)
"""Match #include lines"""

@@ -137,54 +139,22 @@ skipped = 's'

def repl(m):
"""Replace function used with :py:attr:`waflib.Tools.c_preproc.re_cpp`"""
s = m.group(0)
if s.startswith('/'):
s = m.group()
if s[0] == '/':
return ' '
return s

def filter_comments(filename):
"""
Filter the comments from a c/h file, and return the preprocessor lines.
The regexps :py:attr:`waflib.Tools.c_preproc.re_cpp`, :py:attr:`waflib.Tools.c_preproc.re_nl` and :py:attr:`waflib.Tools.c_preproc.re_lines` are used internally.

:return: the preprocessor directives as a list of (keyword, line)
:rtype: a list of string pairs
"""
# return a list of tuples : keyword, line
code = Utils.readf(filename)
if use_trigraphs:
for (a, b) in trig_def: code = code.split(a).join(b)
code = re_nl.sub('', code)
code = re_cpp.sub(repl, code)
return [(m.group(2), m.group(3)) for m in re.finditer(re_lines, code)]

prec = {}
"""
Operator precendence rules required for parsing expressions of the form::
Operator precedence rules required for parsing expressions of the form::

#if 1 && 2 != 0
"""
ops = ['* / %', '+ -', '<< >>', '< <= >= >', '== !=', '& | ^', '&& ||', ',']
for x in range(len(ops)):
syms = ops[x]
for x, syms in enumerate(ops):
for u in syms.split():
prec[u] = x

def trimquotes(s):
"""
Remove the single quotes around an expression::

trimquotes("'test'") == "test"

:param s: expression to transform
:type s: string
:rtype: string
"""
if not s: return ''
s = s.rstrip()
if s[0] == "'" and s[-1] == "'": return s[1:-1]
return s

def reduce_nums(val_1, val_2, val_op):
"""
Apply arithmetic rules to compute a result
@@ -200,32 +170,56 @@ def reduce_nums(val_1, val_2, val_op):
#print val_1, val_2, val_op

# now perform the operation, make certain a and b are numeric
try: a = 0 + val_1
except TypeError: a = int(val_1)
try: b = 0 + val_2
except TypeError: b = int(val_2)
try:
a = 0 + val_1
except TypeError:
a = int(val_1)
try:
b = 0 + val_2
except TypeError:
b = int(val_2)

d = val_op
if d == '%': c = a%b
elif d=='+': c = a+b
elif d=='-': c = a-b
elif d=='*': c = a*b
elif d=='/': c = a/b
elif d=='^': c = a^b
elif d=='==': c = int(a == b)
elif d=='|' or d == 'bitor': c = a|b
elif d=='||' or d == 'or' : c = int(a or b)
elif d=='&' or d == 'bitand': c = a&b
elif d=='&&' or d == 'and': c = int(a and b)
elif d=='!=' or d == 'not_eq': c = int(a != b)
elif d=='^' or d == 'xor': c = int(a^b)
elif d=='<=': c = int(a <= b)
elif d=='<': c = int(a < b)
elif d=='>': c = int(a > b)
elif d=='>=': c = int(a >= b)
elif d=='<<': c = a<<b
elif d=='>>': c = a>>b
else: c = 0
if d == '%':
c = a % b
elif d=='+':
c = a + b
elif d=='-':
c = a - b
elif d=='*':
c = a * b
elif d=='/':
c = a / b
elif d=='^':
c = a ^ b
elif d=='==':
c = int(a == b)
elif d=='|' or d == 'bitor':
c = a | b
elif d=='||' or d == 'or' :
c = int(a or b)
elif d=='&' or d == 'bitand':
c = a & b
elif d=='&&' or d == 'and':
c = int(a and b)
elif d=='!=' or d == 'not_eq':
c = int(a != b)
elif d=='^' or d == 'xor':
c = int(a^b)
elif d=='<=':
c = int(a <= b)
elif d=='<':
c = int(a < b)
elif d=='>':
c = int(a > b)
elif d=='>=':
c = int(a >= b)
elif d=='<<':
c = a << b
elif d=='>>':
c = a >> b
else:
c = 0
return c

def get_num(lst):
@@ -237,7 +231,8 @@ def get_num(lst):
:return: a pair containing the number and the rest of the list
:rtype: tuple(value, list)
"""
if not lst: raise PreprocError("empty list for get_num")
if not lst:
raise PreprocError('empty list for get_num')
(p, v) = lst[0]
if p == OP:
if v == '(':
@@ -255,7 +250,7 @@ def get_num(lst):
count_par += 1
i += 1
else:
raise PreprocError("rparen expected %r" % lst)
raise PreprocError('rparen expected %r' % lst)

(num, _) = get_term(lst[1:i])
return (num, lst[i+1:])
@@ -272,14 +267,14 @@ def get_num(lst):
num, lst = get_num(lst[1:])
return (~ int(num), lst)
else:
raise PreprocError("Invalid op token %r for get_num" % lst)
raise PreprocError('Invalid op token %r for get_num' % lst)
elif p == NUM:
return v, lst[1:]
elif p == IDENT:
# all macros should have been replaced, remaining identifiers eval to 0
return 0, lst[1:]
else:
raise PreprocError("Invalid token %r for get_num" % lst)
raise PreprocError('Invalid token %r for get_num' % lst)

def get_term(lst):
"""
@@ -293,7 +288,8 @@ def get_term(lst):
:rtype: value, list
"""

if not lst: raise PreprocError("empty list for get_term")
if not lst:
raise PreprocError('empty list for get_term')
num, lst = get_num(lst)
if not lst:
return (num, [])
@@ -318,7 +314,7 @@ def get_term(lst):
break
i += 1
else:
raise PreprocError("rparen expected %r" % lst)
raise PreprocError('rparen expected %r' % lst)

if int(num):
return get_term(lst[1:i])
@@ -336,7 +332,7 @@ def get_term(lst):
# operator precedence
p2, v2 = lst[0]
if p2 != OP:
raise PreprocError("op expected %r" % lst)
raise PreprocError('op expected %r' % lst)

if prec[v2] >= prec[v]:
num2 = reduce_nums(num, num2, v)
@@ -347,7 +343,7 @@ def get_term(lst):
return get_term([(NUM, num), (p, v), (NUM, num3)] + lst)


raise PreprocError("cannot reduce %r" % lst)
raise PreprocError('cannot reduce %r' % lst)

def reduce_eval(lst):
"""
@@ -432,7 +428,7 @@ def reduce_tokens(lst, defs, ban=[]):
else:
lst[i] = (NUM, 0)
else:
raise PreprocError("Invalid define expression %r" % lst)
raise PreprocError('Invalid define expression %r' % lst)

elif p == IDENT and v in defs:

@@ -447,8 +443,8 @@ def reduce_tokens(lst, defs, ban=[]):
del lst[i]
accu = to_add[:]
reduce_tokens(accu, defs, ban+[v])
for x in range(len(accu)):
lst.insert(i, accu[x])
for tmp in accu:
lst.insert(i, tmp)
i += 1
else:
# collect the arguments for the funcall
@@ -457,11 +453,11 @@ def reduce_tokens(lst, defs, ban=[]):
del lst[i]

if i >= len(lst):
raise PreprocError("expected '(' after %r (got nothing)" % v)
raise PreprocError('expected ( after %r (got nothing)' % v)

(p2, v2) = lst[i]
if p2 != OP or v2 != '(':
raise PreprocError("expected '(' after %r" % v)
raise PreprocError('expected ( after %r' % v)

del lst[i]

@@ -476,18 +472,22 @@ def reduce_tokens(lst, defs, ban=[]):
one_param.append((p2, v2))
count_paren += 1
elif v2 == ')':
if one_param: args.append(one_param)
if one_param:
args.append(one_param)
break
elif v2 == ',':
if not one_param: raise PreprocError("empty param in funcall %s" % v)
if not one_param:
raise PreprocError('empty param in funcall %r' % v)
args.append(one_param)
one_param = []
else:
one_param.append((p2, v2))
else:
one_param.append((p2, v2))
if v2 == '(': count_paren += 1
elif v2 == ')': count_paren -= 1
if v2 == '(':
count_paren += 1
elif v2 == ')':
count_paren -= 1
else:
raise PreprocError('malformed macro')

@@ -524,7 +524,6 @@ def reduce_tokens(lst, defs, ban=[]):
accu.append((p2, v2))
accu.extend(toks)
elif to_add[j+1][0] == IDENT and to_add[j+1][1] == '__VA_ARGS__':
# TODO not sure
# first collect the tokens
va_toks = []
st = len(macro_def[0])
@@ -532,7 +531,8 @@ def reduce_tokens(lst, defs, ban=[]):
for x in args[pt-st+1:]:
va_toks.extend(x)
va_toks.append((OP, ','))
if va_toks: va_toks.pop() # extra comma
if va_toks:
va_toks.pop() # extra comma
if len(accu)>1:
(p3, v3) = accu[-1]
(p4, v4) = accu[-2]
@@ -580,8 +580,15 @@ def eval_macro(lst, defs):
:rtype: int
"""
reduce_tokens(lst, defs, [])
if not lst: raise PreprocError("missing tokens to evaluate")
(p, v) = reduce_eval(lst)
if not lst:
raise PreprocError('missing tokens to evaluate')

if lst:
p, v = lst[0]
if p == IDENT and v not in defs:
raise PreprocError('missing macro %r' % lst)

p, v = reduce_eval(lst)
return int(v) != 0

def extract_macro(txt):
@@ -601,7 +608,8 @@ def extract_macro(txt):
p, name = t[0]

p, v = t[1]
if p != OP: raise PreprocError("expected open parenthesis")
if p != OP:
raise PreprocError('expected (')

i = 1
pindex = 0
@@ -620,27 +628,27 @@ def extract_macro(txt):
elif p == OP and v == ')':
break
else:
raise PreprocError("unexpected token (3)")
raise PreprocError('unexpected token (3)')
elif prev == IDENT:
if p == OP and v == ',':
prev = v
elif p == OP and v == ')':
break
else:
raise PreprocError("comma or ... expected")
raise PreprocError('comma or ... expected')
elif prev == ',':
if p == IDENT:
params[v] = pindex
pindex += 1
prev = p
elif p == OP and v == '...':
raise PreprocError("not implemented (1)")
raise PreprocError('not implemented (1)')
else:
raise PreprocError("comma or ... expected (2)")
raise PreprocError('comma or ... expected (2)')
elif prev == '...':
raise PreprocError("not implemented (2)")
raise PreprocError('not implemented (2)')
else:
raise PreprocError("unexpected else")
raise PreprocError('unexpected else')

#~ print (name, [params, t[i+1:]])
return (name, [params, t[i+1:]])
@@ -652,7 +660,7 @@ def extract_macro(txt):
# empty define, assign an empty token
return (v, [[], [('T','')]])

re_include = re.compile('^\s*(<(?P<a>.*)>|"(?P<b>.*)")')
re_include = re.compile('^\s*(<(?:.*)>|"(?:.*)")')
def extract_include(txt, defs):
"""
Process a line in the form::
@@ -668,15 +676,15 @@ def extract_include(txt, defs):
"""
m = re_include.search(txt)
if m:
if m.group('a'): return '<', m.group('a')
if m.group('b'): return '"', m.group('b')
txt = m.group(1)
return txt[0], txt[1:-1]

# perform preprocessing and look at the result, it must match an include
toks = tokenize(txt)
reduce_tokens(toks, defs, ['waf_include'])

if not toks:
raise PreprocError("could not parse include %s" % txt)
raise PreprocError('could not parse include %r' % txt)

if len(toks) == 1:
if toks[0][0] == STR:
@@ -686,7 +694,7 @@ def extract_include(txt, defs):
ret = '<', stringize(toks).lstrip('<').rstrip('>')
return ret

raise PreprocError("could not parse include %s." % txt)
raise PreprocError('could not parse include %r' % txt)

def parse_char(txt):
"""
@@ -698,21 +706,26 @@ def parse_char(txt):
:rtype: string
"""

if not txt: raise PreprocError("attempted to parse a null char")
if not txt:
raise PreprocError('attempted to parse a null char')
if txt[0] != '\\':
return ord(txt)
c = txt[1]
if c == 'x':
if len(txt) == 4 and txt[3] in string.hexdigits: return int(txt[2:], 16)
if len(txt) == 4 and txt[3] in string.hexdigits:
return int(txt[2:], 16)
return int(txt[2:], 16)
elif c.isdigit():
if c == '0' and len(txt)==2: return 0
if c == '0' and len(txt)==2:
return 0
for i in 3, 2, 1:
if len(txt) > i and txt[1:1+i].isdigit():
return (1+i, int(txt[1:1+i], 8))
else:
try: return chr_esc[c]
except KeyError: raise PreprocError("could not parse char literal '%s'" % txt)
try:
return chr_esc[c]
except KeyError:
raise PreprocError('could not parse char literal %r' % txt)

def tokenize(s):
"""
@@ -725,7 +738,6 @@ def tokenize(s):
"""
return tokenize_private(s)[:] # force a copy of the results

@Utils.run_once
def tokenize_private(s):
ret = []
for match in re_clexer.finditer(s):
@@ -734,28 +746,32 @@ def tokenize_private(s):
v = m(name)
if v:
if name == IDENT:
try:
g_optrans[v];
if v in g_optrans:
name = OP
except KeyError:
# c++ specific
if v.lower() == "true":
v = 1
name = NUM
elif v.lower() == "false":
v = 0
name = NUM
elif v.lower() == "true":
v = 1
name = NUM
elif v.lower() == "false":
v = 0
name = NUM
elif name == NUM:
if m('oct'): v = int(v, 8)
elif m('hex'): v = int(m('hex'), 16)
elif m('n0'): v = m('n0')
if m('oct'):
v = int(v, 8)
elif m('hex'):
v = int(m('hex'), 16)
elif m('n0'):
v = m('n0')
else:
v = m('char')
if v: v = parse_char(v)
else: v = m('n2') or m('n4')
if v:
v = parse_char(v)
else:
v = m('n2') or m('n4')
elif name == OP:
if v == '%:': v = '#'
elif v == '%:%:': v = '##'
if v == '%:':
v = '#'
elif v == '%:%:':
v = '##'
elif name == STR:
# remove the quotes around the string
v = v[1:-1]
@@ -763,15 +779,20 @@ def tokenize_private(s):
break
return ret

@Utils.run_once
def define_name(line):
"""
:param line: define line
:type line: string
:rtype: string
:return: the define name
"""
return re_mac.match(line).group(0)
def format_defines(lst):
ret = []
for y in lst:
if y:
pos = y.find('=')
if pos == -1:
# "-DFOO" should give "#define FOO 1"
ret.append(y)
elif pos > 0:
# all others are assumed to be -DX=Y
ret.append('%s %s' % (y[:pos], y[pos+1:]))
else:
raise ValueError('Invalid define expression %r' % y)
return ret

class c_parser(object):
"""
@@ -803,9 +824,12 @@ class c_parser(object):
self.curfile = ''
"""Current file"""

self.ban_includes = set([])
self.ban_includes = set()
"""Includes that must not be read (#pragma once)"""

self.listed = set()
"""Include nodes/names already listed to avoid duplicates in self.nodes/self.names"""

def cached_find_resource(self, node, filename):
"""
Find a file from the input directory
@@ -818,13 +842,13 @@ class c_parser(object):
:rtype: :py:class:`waflib.Node.Node`
"""
try:
nd = node.ctx.cache_nd
cache = node.ctx.preproc_cache_node
except AttributeError:
nd = node.ctx.cache_nd = {}
cache = node.ctx.preproc_cache_node = Utils.lru_cache(FILE_CACHE_SIZE)

tup = (node, filename)
key = (node, filename)
try:
return nd[tup]
return cache[key]
except KeyError:
ret = node.find_resource(filename)
if ret:
@@ -834,10 +858,10 @@ class c_parser(object):
tmp = node.ctx.srcnode.search_node(ret.path_from(node.ctx.bldnode))
if tmp and getattr(tmp, 'children', None):
ret = None
nd[tup] = ret
cache[key] = ret
return ret

def tryfind(self, filename):
def tryfind(self, filename, kind='"', env=None):
"""
Try to obtain a node from the filename based from the include paths. Will add
the node found to :py:attr:`waflib.Tools.c_preproc.c_parser.nodes` or the file name to
@@ -851,29 +875,70 @@ class c_parser(object):
"""
if filename.endswith('.moc'):
# we could let the qt4 module use a subclass, but then the function "scan" below must be duplicated
# in the qt4 and in the qt5 classes. So we have two lines here and it is sufficient. TODO waf 1.9
# in the qt4 and in the qt5 classes. So we have two lines here and it is sufficient.
self.names.append(filename)
return None

self.curfile = filename

# for msvc it should be a for loop over the whole stack
found = self.cached_find_resource(self.currentnode_stack[-1], filename)
found = None
if kind == '"':
if env.MSVC_VERSION:
for n in reversed(self.currentnode_stack):
found = self.cached_find_resource(n, filename)
if found:
break
else:
found = self.cached_find_resource(self.currentnode_stack[-1], filename)

for n in self.nodepaths:
if found:
break
found = self.cached_find_resource(n, filename)
if not found:
for n in self.nodepaths:
found = self.cached_find_resource(n, filename)
if found:
break

listed = self.listed
if found and not found in self.ban_includes:
# TODO duplicates do not increase the no-op build times too much, but they may be worth removing
self.nodes.append(found)
if found not in listed:
listed.add(found)
self.nodes.append(found)
self.addlines(found)
else:
if not filename in self.names:
if filename not in listed:
listed.add(filename)
self.names.append(filename)
return found

def filter_comments(self, node):
"""
Filter the comments from a c/h file, and return the preprocessor lines.
The regexps :py:attr:`waflib.Tools.c_preproc.re_cpp`, :py:attr:`waflib.Tools.c_preproc.re_nl` and :py:attr:`waflib.Tools.c_preproc.re_lines` are used internally.

:return: the preprocessor directives as a list of (keyword, line)
:rtype: a list of string pairs
"""
# return a list of tuples : keyword, line
code = node.read()
if use_trigraphs:
for (a, b) in trig_def:
code = code.split(a).join(b)
code = re_nl.sub('', code)
code = re_cpp.sub(repl, code)
return re_lines.findall(code)

def parse_lines(self, node):
try:
cache = node.ctx.preproc_cache_lines
except AttributeError:
cache = node.ctx.preproc_cache_lines = Utils.lru_cache(LINE_CACHE_SIZE)
try:
return cache[node]
except KeyError:
cache[node] = lines = self.filter_comments(node)
lines.append((POPFILE, ''))
lines.reverse()
return lines

def addlines(self, node):
"""
Add the lines from a header in the list of preprocessor lines to parse
@@ -883,34 +948,23 @@ class c_parser(object):
"""

self.currentnode_stack.append(node.parent)
filepath = node.abspath()

self.count_files += 1
if self.count_files > recursion_limit:
# issue #812
raise PreprocError("recursion limit exceeded")
pc = self.parse_cache
debug('preproc: reading file %r', filepath)
try:
lns = pc[filepath]
except KeyError:
pass
else:
self.lines.extend(lns)
return
raise PreprocError('recursion limit exceeded')

if Logs.verbose:
Logs.debug('preproc: reading file %r', node)
try:
lines = filter_comments(filepath)
lines.append((POPFILE, ''))
lines.reverse()
pc[filepath] = lines # cache the lines filtered
self.lines.extend(lines)
except IOError:
raise PreprocError("could not read the file %s" % filepath)
lines = self.parse_lines(node)
except EnvironmentError:
raise PreprocError('could not read the file %r' % node)
except Exception:
if Logs.verbose > 0:
error("parsing %s failed" % filepath)
traceback.print_exc()
Logs.error('parsing %r failed %s', node, traceback.format_exc())
else:
self.lines.extend(lines)

def start(self, node, env):
"""
@@ -922,27 +976,16 @@ class c_parser(object):
:param env: config set containing additional defines to take into account
:type env: :py:class:`waflib.ConfigSet.ConfigSet`
"""

debug('preproc: scanning %s (in %s)', node.name, node.parent.name)

bld = node.ctx
try:
self.parse_cache = bld.parse_cache
except AttributeError:
self.parse_cache = bld.parse_cache = {}
Logs.debug('preproc: scanning %s (in %s)', node.name, node.parent.name)

self.current_file = node
self.addlines(node)

# macros may be defined on the command-line, so they must be parsed as if they were part of the file
if env['DEFINES']:
try:
lst = ['%s %s' % (x[0], trimquotes('='.join(x[1:]))) for x in [y.split('=') for y in env['DEFINES']]]
lst.reverse()
self.lines.extend([('define', x) for x in lst])
except AttributeError:
# if the defines are invalid the compiler will tell the user
pass
if env.DEFINES:
lst = format_defines(env.DEFINES)
lst.reverse()
self.lines.extend([('define', x) for x in lst])

while self.lines:
(token, line) = self.lines.pop()
@@ -952,8 +995,6 @@ class c_parser(object):
continue

try:
ve = Logs.verbose
if ve: debug('preproc: line is %s - %s state is %s', token, line, self.state)
state = self.state

# make certain we define the state if we are about to enter in an if block
@@ -969,23 +1010,27 @@ class c_parser(object):

if token == 'if':
ret = eval_macro(tokenize(line), self.defs)
if ret: state[-1] = accepted
else: state[-1] = ignored
if ret:
state[-1] = accepted
else:
state[-1] = ignored
elif token == 'ifdef':
m = re_mac.match(line)
if m and m.group(0) in self.defs: state[-1] = accepted
else: state[-1] = ignored
if m and m.group() in self.defs:
state[-1] = accepted
else:
state[-1] = ignored
elif token == 'ifndef':
m = re_mac.match(line)
if m and m.group(0) in self.defs: state[-1] = ignored
else: state[-1] = accepted
if m and m.group() in self.defs:
state[-1] = ignored
else:
state[-1] = accepted
elif token == 'include' or token == 'import':
(kind, inc) = extract_include(line, self.defs)
if ve: debug('preproc: include found %s (%s) ', inc, kind)
if kind == '"' or not strict_quotes:
self.current_file = self.tryfind(inc)
if token == 'import':
self.ban_includes.add(self.current_file)
self.current_file = self.tryfind(inc, kind, env)
if token == 'import':
self.ban_includes.add(self.current_file)
elif token == 'elif':
if state[-1] == accepted:
state[-1] = skipped
@@ -993,24 +1038,35 @@ class c_parser(object):
if eval_macro(tokenize(line), self.defs):
state[-1] = accepted
elif token == 'else':
if state[-1] == accepted: state[-1] = skipped
elif state[-1] == ignored: state[-1] = accepted
if state[-1] == accepted:
state[-1] = skipped
elif state[-1] == ignored:
state[-1] = accepted
elif token == 'define':
try:
self.defs[define_name(line)] = line
except Exception:
raise PreprocError("Invalid define line %s" % line)
self.defs[self.define_name(line)] = line
except AttributeError:
raise PreprocError('Invalid define line %r' % line)
elif token == 'undef':
m = re_mac.match(line)
if m and m.group(0) in self.defs:
self.defs.__delitem__(m.group(0))
if m and m.group() in self.defs:
self.defs.__delitem__(m.group())
#print "undef %s" % name
elif token == 'pragma':
if re_pragma_once.match(line.lower()):
self.ban_includes.add(self.current_file)
except Exception as e:
if Logs.verbose:
debug('preproc: line parsing failed (%s): %s %s', e, line, Utils.ex_stack())
Logs.debug('preproc: line parsing failed (%s): %s %s', e, line, traceback.format_exc())

def define_name(self, line):
"""
:param line: define line
:type line: string
:rtype: string
:return: the define name
"""
return re_mac.match(line).group()

def scan(task):
"""
@@ -1020,9 +1076,6 @@ def scan(task):

This function is bound as a task method on :py:class:`waflib.Tools.c.c` and :py:class:`waflib.Tools.cxx.cxx` for example
"""

global go_absolute

try:
incn = task.generator.includes_nodes
except AttributeError:
@@ -1035,7 +1088,4 @@ def scan(task):

tmp = c_parser(nodepaths)
tmp.start(task.inputs[0], task.env)
if Logs.verbose:
debug('deps: deps for %r: %r; unresolved %r' % (task.inputs, tmp.nodes, tmp.names))
return (tmp.nodes, tmp.names)


+ 13
- 12
waflib/Tools/c_tests.py View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2010 (ita)
# Thomas Nagy, 2016-2018 (ita)

"""
Various configuration tests.
@@ -58,7 +58,7 @@ def link_lib_test_fun(self):
@conf
def check_library(self, mode=None, test_exec=True):
"""
Check if libraries can be linked with the current linker. Uses :py:func:`waflib.Tools.c_tests.link_lib_test_fun`.
Checks if libraries can be linked with the current linker. Uses :py:func:`waflib.Tools.c_tests.link_lib_test_fun`.

:param mode: c or cxx or d
:type mode: string
@@ -72,8 +72,7 @@ def check_library(self, mode=None, test_exec=True):
features = 'link_lib_test',
msg = 'Checking for libraries',
mode = mode,
test_exec = test_exec,
)
test_exec = test_exec)

########################################################################################

@@ -89,7 +88,7 @@ INLINE_VALUES = ['inline', '__inline__', '__inline']
@conf
def check_inline(self, **kw):
"""
Check for the right value for inline macro.
Checks for the right value for inline macro.
Define INLINE_MACRO to 1 if the define is found.
If the inline macro is not 'inline', add a define to the ``config.h`` (#define inline __inline__)

@@ -98,7 +97,6 @@ def check_inline(self, **kw):
:param features: by default *c* or *cxx* depending on the compiler present
:type features: list of string
"""

self.start_msg('Checking for inline')

if not 'define_name' in kw:
@@ -135,7 +133,7 @@ int main(int argc, char **argv) {
@conf
def check_large_file(self, **kw):
"""
Check for large file support and define the macro HAVE_LARGEFILE
Checks for large file support and define the macro HAVE_LARGEFILE
The test is skipped on win32 systems (DEST_BINFMT == pe).

:param define_name: define to set, by default *HAVE_LARGEFILE*
@@ -143,7 +141,6 @@ def check_large_file(self, **kw):
:param execute: execute the test (yes by default)
:type execute: bool
"""

if not 'define_name' in kw:
kw['define_name'] = 'HAVE_LARGEFILE'
if not 'execute' in kw:
@@ -197,9 +194,12 @@ extern int foo;
'''

class grep_for_endianness(Task.Task):
"""
Task that reads a binary and tries to determine the endianness
"""
color = 'PINK'
def run(self):
txt = self.inputs[0].read(flags='rb').decode('iso8859-1')
txt = self.inputs[0].read(flags='rb').decode('latin-1')
if txt.find('LiTTleEnDian') > -1:
self.generator.tmp.append('little')
elif txt.find('BIGenDianSyS') > -1:
@@ -211,18 +211,19 @@ class grep_for_endianness(Task.Task):
@after_method('process_source')
def grep_for_endianness_fun(self):
"""
Used by the endiannes configuration test
Used by the endianness configuration test
"""
self.create_task('grep_for_endianness', self.compiled_tasks[0].outputs[0])

@conf
def check_endianness(self):
"""
Execute a configuration test to determine the endianness
Executes a configuration test to determine the endianness
"""
tmp = []
def check_msg(self):
return tmp[0]
self.check(fragment=ENDIAN_FRAGMENT, features='c grep_for_endianness', msg="Checking for endianness", define='ENDIANNESS', tmp=tmp, okmsg=check_msg)
self.check(fragment=ENDIAN_FRAGMENT, features='c grep_for_endianness',
msg='Checking for endianness', define='ENDIANNESS', tmp=tmp, okmsg=check_msg)
return tmp[0]


+ 97
- 43
waflib/Tools/ccroot.py View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2010 (ita)
# Thomas Nagy, 2005-2018 (ita)

"""
Classes and methods shared by tools providing support for C-like language such
@@ -8,7 +8,7 @@ as C/C++/D/Assembly/Go (this support module is almost never used alone).
"""

import os, re
from waflib import Task, Utils, Node, Errors
from waflib import Task, Utils, Node, Errors, Logs
from waflib.TaskGen import after_method, before_method, feature, taskgen_method, extension
from waflib.Tools import c_aliases, c_preproc, c_config, c_osx, c_tests
from waflib.Configure import conf
@@ -77,7 +77,7 @@ def to_incnodes(self, inlst):
:return: list of include folders as nodes
"""
lst = []
seen = set([])
seen = set()
for x in self.to_list(inlst):
if x in seen or not x:
continue
@@ -118,9 +118,10 @@ def apply_incpaths(self):
and the list of include paths in ``tg.env.INCLUDES``.
"""

lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env['INCLUDES'])
lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env.INCLUDES)
self.includes_nodes = lst
self.env['INCPATHS'] = [x.abspath() for x in lst]
cwd = self.get_cwd()
self.env.INCPATHS = [x.path_from(cwd) for x in lst]

class link_task(Task.Task):
"""
@@ -130,6 +131,9 @@ class link_task(Task.Task):
"""
color = 'YELLOW'

weight = 3
"""Try to process link tasks as early as possible"""

inst_to = None
"""Default installation path for the link task outputs, or None to disable"""

@@ -142,6 +146,12 @@ class link_task(Task.Task):
The settings are retrieved from ``env.clsname_PATTERN``
"""
if isinstance(target, str):
base = self.generator.path
if target.startswith('#'):
# for those who like flat structures
target = target[1:]
base = self.generator.bld.bldnode

pattern = self.env[self.__class__.__name__ + '_PATTERN']
if not pattern:
pattern = '%s'
@@ -151,7 +161,7 @@ class link_task(Task.Task):
nums = self.generator.vnum.split('.')
if self.env.DEST_BINFMT == 'pe':
# include the version in the dll file name,
# the import lib file name stays unversionned.
# the import lib file name stays unversioned.
name = name + '-' + nums[0]
elif self.env.DEST_OS == 'openbsd':
pattern = '%s.%s' % (pattern, nums[0])
@@ -162,9 +172,51 @@ class link_task(Task.Task):
tmp = folder + os.sep + pattern % name
else:
tmp = pattern % name
target = self.generator.path.find_or_declare(tmp)
target = base.find_or_declare(tmp)
self.set_outputs(target)

def exec_command(self, *k, **kw):
ret = super(link_task, self).exec_command(*k, **kw)
if not ret and self.env.DO_MANIFEST:
ret = self.exec_mf()
return ret

def exec_mf(self):
"""
Create manifest files for VS-like compilers (msvc, ifort, ...)
"""
if not self.env.MT:
return 0

manifest = None
for out_node in self.outputs:
if out_node.name.endswith('.manifest'):
manifest = out_node.abspath()
break
else:
# Should never get here. If we do, it means the manifest file was
# never added to the outputs list, thus we don't have a manifest file
# to embed, so we just return.
return 0

# embedding mode. Different for EXE's and DLL's.
# see: http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx
mode = ''
for x in Utils.to_list(self.generator.features):
if x in ('cprogram', 'cxxprogram', 'fcprogram', 'fcprogram_test'):
mode = 1
elif x in ('cshlib', 'cxxshlib', 'fcshlib'):
mode = 2

Logs.debug('msvc: embedding manifest in mode %r', mode)

lst = [] + self.env.MT
lst.extend(Utils.to_list(self.env.MTFLAGS))
lst.extend(['-manifest', manifest])
lst.append('-outputresource:%s;%s' % (self.outputs[0].abspath(), mode))

return super(link_task, self).exec_command(lst)

class stlink_task(link_task):
"""
Base for static link tasks, which use *ar* most of the time.
@@ -178,8 +230,10 @@ class stlink_task(link_task):
def rm_tgt(cls):
old = cls.run
def wrap(self):
try: os.remove(self.outputs[0].abspath())
except OSError: pass
try:
os.remove(self.outputs[0].abspath())
except OSError:
pass
return old(self)
setattr(cls, 'run', wrap)
rm_tgt(stlink_task)
@@ -219,10 +273,12 @@ def apply_link(self):
try:
inst_to = self.install_path
except AttributeError:
inst_to = self.link_task.__class__.inst_to
inst_to = self.link_task.inst_to
if inst_to:
# install a copy of the node list we have at this moment (implib not added)
self.install_task = self.bld.install_files(inst_to, self.link_task.outputs[:], env=self.env, chmod=self.link_task.chmod, task=self.link_task)
self.install_task = self.add_install_files(
install_to=inst_to, install_from=self.link_task.outputs[:],
chmod=self.link_task.chmod, task=self.link_task)

@taskgen_method
def use_rec(self, name, **kw):
@@ -282,7 +338,7 @@ def process_use(self):
See :py:func:`waflib.Tools.ccroot.use_rec`.
"""

use_not = self.tmp_use_not = set([])
use_not = self.tmp_use_not = set()
self.tmp_use_seen = [] # we would like an ordered set
use_prec = self.tmp_use_prec = {}
self.uselib = self.to_list(getattr(self, 'uselib', []))
@@ -297,7 +353,7 @@ def process_use(self):
del use_prec[x]

# topological sort
out = []
out = self.tmp_use_sorted = []
tmp = []
for x in self.tmp_use_seen:
for k in use_prec.values():
@@ -333,14 +389,15 @@ def process_use(self):
if var == 'LIB' or y.tmp_use_stlib or x in names:
self.env.append_value(var, [y.target[y.target.rfind(os.sep) + 1:]])
self.link_task.dep_nodes.extend(y.link_task.outputs)
tmp_path = y.link_task.outputs[0].parent.path_from(self.bld.bldnode)
tmp_path = y.link_task.outputs[0].parent.path_from(self.get_cwd())
self.env.append_unique(var + 'PATH', [tmp_path])
else:
if y.tmp_use_objects:
self.add_objects_from_tgen(y)

if getattr(y, 'export_includes', None):
self.includes.extend(y.to_incnodes(y.export_includes))
# self.includes may come from a global variable #2035
self.includes = self.includes + y.to_incnodes(y.export_includes)

if getattr(y, 'export_defines', None):
self.env.append_value('DEFINES', self.to_list(y.export_defines))
@@ -390,7 +447,7 @@ def get_uselib_vars(self):
:return: the *uselib* variables associated to the *features* attribute (see :py:attr:`waflib.Tools.ccroot.USELIB_VARS`)
:rtype: list of string
"""
_vars = set([])
_vars = set()
for x in self.features:
if x in USELIB_VARS:
_vars |= USELIB_VARS[x]
@@ -405,7 +462,7 @@ def propagate_uselib_vars(self):
def build(bld):
bld.env.AFLAGS_aaa = ['bar']
from waflib.Tools.ccroot import USELIB_VARS
USELIB_VARS['aaa'] = set('AFLAGS')
USELIB_VARS['aaa'] = ['AFLAGS']

tg = bld(features='aaa', aflags='test')

@@ -447,20 +504,20 @@ def apply_implib(self):
name = self.target.name
else:
name = os.path.split(self.target)[1]
implib = self.env['implib_PATTERN'] % name
implib = self.env.implib_PATTERN % name
implib = dll.parent.find_or_declare(implib)
self.env.append_value('LINKFLAGS', self.env['IMPLIB_ST'] % implib.bldpath())
self.env.append_value('LINKFLAGS', self.env.IMPLIB_ST % implib.bldpath())
self.link_task.outputs.append(implib)

if getattr(self, 'defs', None) and self.env.DEST_BINFMT == 'pe':
node = self.path.find_resource(self.defs)
if not node:
raise Errors.WafError('invalid def file %r' % self.defs)
if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME):
self.env.append_value('LINKFLAGS', '/def:%s' % node.path_from(self.bld.bldnode))
if self.env.def_PATTERN:
self.env.append_value('LINKFLAGS', self.env.def_PATTERN % node.path_from(self.get_cwd()))
self.link_task.dep_nodes.append(node)
else:
#gcc for windows takes *.def file a an input without any special flag
# gcc for windows takes *.def file as input without any special flag
self.link_task.inputs.append(node)

# where to put the import library
@@ -475,10 +532,11 @@ def apply_implib(self):
except AttributeError:
# else, put the library in BINDIR and the import library in LIBDIR
inst_to = '${IMPLIBDIR}'
self.install_task.dest = '${BINDIR}'
self.install_task.install_to = '${BINDIR}'
if not self.env.IMPLIBDIR:
self.env.IMPLIBDIR = self.env.LIBDIR
self.implib_install_task = self.bld.install_files(inst_to, implib, env=self.env, chmod=self.link_task.chmod, task=self.link_task)
self.implib_install_task = self.add_install_files(install_to=inst_to, install_from=implib,
chmod=self.link_task.chmod, task=self.link_task)

# ============ the code above must not know anything about vnum processing on unix platforms =========

@@ -535,34 +593,34 @@ def apply_vnum(self):

# the following task is just to enable execution from the build dir :-/
if self.env.DEST_OS != 'openbsd':
outs = [node.parent.find_or_declare(name3)]
outs = [node.parent.make_node(name3)]
if name2 != name3:
outs.append(node.parent.find_or_declare(name2))
outs.append(node.parent.make_node(name2))
self.create_task('vnum', node, outs)

if getattr(self, 'install_task', None):
self.install_task.hasrun = Task.SKIP_ME
bld = self.bld
path = self.install_task.dest
self.install_task.hasrun = Task.SKIPPED
self.install_task.no_errcheck_out = True
path = self.install_task.install_to
if self.env.DEST_OS == 'openbsd':
libname = self.link_task.outputs[0].name
t1 = bld.install_as('%s%s%s' % (path, os.sep, libname), node, env=self.env, chmod=self.link_task.chmod)
t1 = self.add_install_as(install_to='%s/%s' % (path, libname), install_from=node, chmod=self.link_task.chmod)
self.vnum_install_task = (t1,)
else:
t1 = bld.install_as(path + os.sep + name3, node, env=self.env, chmod=self.link_task.chmod)
t3 = bld.symlink_as(path + os.sep + libname, name3)
t1 = self.add_install_as(install_to=path + os.sep + name3, install_from=node, chmod=self.link_task.chmod)
t3 = self.add_symlink_as(install_to=path + os.sep + libname, install_from=name3)
if name2 != name3:
t2 = bld.symlink_as(path + os.sep + name2, name3)
t2 = self.add_symlink_as(install_to=path + os.sep + name2, install_from=name3)
self.vnum_install_task = (t1, t2, t3)
else:
self.vnum_install_task = (t1, t3)

if '-dynamiclib' in self.env['LINKFLAGS']:
if '-dynamiclib' in self.env.LINKFLAGS:
# this requires after(propagate_uselib_vars)
try:
inst_to = self.install_path
except AttributeError:
inst_to = self.link_task.__class__.inst_to
inst_to = self.link_task.inst_to
if inst_to:
p = Utils.subst_vars(inst_to, self.env)
path = os.path.join(p, name2)
@@ -575,7 +633,6 @@ class vnum(Task.Task):
Create the symbolic links for a versioned shared library. Instances are created by :py:func:`waflib.Tools.ccroot.apply_vnum`
"""
color = 'CYAN'
quient = True
ext_in = ['.bin']
def keyword(self):
return 'Symlinking'
@@ -600,9 +657,6 @@ class fake_shlib(link_task):
for t in self.run_after:
if not t.hasrun:
return Task.ASK_LATER

for x in self.outputs:
x.sig = Utils.h_file(x.abspath())
return Task.SKIP_ME

class fake_stlib(stlink_task):
@@ -613,9 +667,6 @@ class fake_stlib(stlink_task):
for t in self.run_after:
if not t.hasrun:
return Task.ASK_LATER

for x in self.outputs:
x.sig = Utils.h_file(x.abspath())
return Task.SKIP_ME

@conf
@@ -658,7 +709,10 @@ def process_lib(self):
for y in names:
node = x.find_node(y)
if node:
node.sig = Utils.h_file(node.abspath())
try:
Utils.h_file(node.abspath())
except EnvironmentError:
raise ValueError('Could not read %r' % y)
break
else:
continue


+ 1
- 1
waflib/Tools/clang.py View File

@@ -12,7 +12,7 @@ from waflib.Configure import conf
@conf
def find_clang(conf):
"""
Find the program clang and execute it to ensure it really is clang
Finds the program clang and executes it to ensure it really is clang
"""
cc = conf.find_program('clang', var='CC')
conf.get_cc_version(cc, clang=True)


+ 2
- 2
waflib/Tools/clangxx.py View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy 2009-2010 (ita)
# Thomas Nagy 2009-2018 (ita)

"""
Detect the Clang++ C++ compiler
@@ -12,7 +12,7 @@ from waflib.Configure import conf
@conf
def find_clangxx(conf):
"""
Find the program clang++, and execute it to ensure it really is clang++
Finds the program clang++, and executes it to ensure it really is clang++
"""
cxx = conf.find_program('clang++', var='CXX')
conf.get_cc_version(cxx, clang=True)


+ 15
- 9
waflib/Tools/compiler_c.py View File

@@ -47,10 +47,10 @@ c_compiler = {
'osf1V': ['gcc'],
'gnu': ['gcc', 'clang'],
'java': ['gcc', 'msvc', 'clang', 'icc'],
'default':['gcc', 'clang'],
'default':['clang', 'gcc'],
}
"""
Dict mapping the platform names to Waf tools finding specific C compilers::
Dict mapping platform names to Waf tools finding specific C compilers::

from waflib.Tools.compiler_c import c_compiler
c_compiler['linux'] = ['gcc', 'icc', 'suncc']
@@ -63,10 +63,14 @@ def default_compilers():

def configure(conf):
"""
Try to find a suitable C compiler or raise a :py:class:`waflib.Errors.ConfigurationError`.
Detects a suitable C compiler

:raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found
"""
try: test_for_compiler = conf.options.check_c_compiler or default_compilers()
except AttributeError: conf.fatal("Add options(opt): opt.load('compiler_c')")
try:
test_for_compiler = conf.options.check_c_compiler or default_compilers()
except AttributeError:
conf.fatal("Add options(opt): opt.load('compiler_c')")

for compiler in re.split('[ ,]+', test_for_compiler):
conf.env.stash()
@@ -76,19 +80,21 @@ def configure(conf):
except conf.errors.ConfigurationError as e:
conf.env.revert()
conf.end_msg(False)
debug('compiler_c: %r' % e)
debug('compiler_c: %r', e)
else:
if conf.env['CC']:
if conf.env.CC:
conf.end_msg(conf.env.get_flat('CC'))
conf.env['COMPILER_CC'] = compiler
conf.env.COMPILER_CC = compiler
conf.env.commit()
break
conf.env.revert()
conf.end_msg(False)
else:
conf.fatal('could not configure a C compiler!')

def options(opt):
"""
Restrict the compiler detection from the command-line::
This is how to provide compiler preferences on the command-line::

$ waf configure --check-c-compiler=gcc
"""


+ 14
- 8
waflib/Tools/compiler_cxx.py View File

@@ -48,7 +48,7 @@ cxx_compiler = {
'osf1V': ['g++'],
'gnu': ['g++', 'clang++'],
'java': ['g++', 'msvc', 'clang++', 'icpc'],
'default': ['g++', 'clang++']
'default': ['clang++', 'g++']
}
"""
Dict mapping the platform names to Waf tools finding specific C++ compilers::
@@ -64,10 +64,14 @@ def default_compilers():

def configure(conf):
"""
Try to find a suitable C++ compiler or raise a :py:class:`waflib.Errors.ConfigurationError`.
Detects a suitable C++ compiler

:raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found
"""
try: test_for_compiler = conf.options.check_cxx_compiler or default_compilers()
except AttributeError: conf.fatal("Add options(opt): opt.load('compiler_cxx')")
try:
test_for_compiler = conf.options.check_cxx_compiler or default_compilers()
except AttributeError:
conf.fatal("Add options(opt): opt.load('compiler_cxx')")

for compiler in re.split('[ ,]+', test_for_compiler):
conf.env.stash()
@@ -77,19 +81,21 @@ def configure(conf):
except conf.errors.ConfigurationError as e:
conf.env.revert()
conf.end_msg(False)
debug('compiler_cxx: %r' % e)
debug('compiler_cxx: %r', e)
else:
if conf.env['CXX']:
if conf.env.CXX:
conf.end_msg(conf.env.get_flat('CXX'))
conf.env['COMPILER_CXX'] = compiler
conf.env.COMPILER_CXX = compiler
conf.env.commit()
break
conf.env.revert()
conf.end_msg(False)
else:
conf.fatal('could not configure a C++ compiler!')

def options(opt):
"""
Restrict the compiler detection from the command-line::
This is how to provide compiler preferences on the command-line::

$ waf configure --check-cxx-compiler=gxx
"""


+ 85
- 0
waflib/Tools/compiler_d.py View File

@@ -0,0 +1,85 @@
#!/usr/bin/env python
# encoding: utf-8
# Carlos Rafael Giani, 2007 (dv)
# Thomas Nagy, 2016-2018 (ita)

"""
Try to detect a D compiler from the list of supported compilers::

def options(opt):
opt.load('compiler_d')
def configure(cnf):
cnf.load('compiler_d')
def build(bld):
bld.program(source='main.d', target='app')

Only three D compilers are really present at the moment:

* gdc
* dmd, the ldc compiler having a very similar command-line interface
* ldc2
"""

import re
from waflib import Utils, Logs

d_compiler = {
'default' : ['gdc', 'dmd', 'ldc2']
}
"""
Dict mapping the platform names to lists of names of D compilers to try, in order of preference::

from waflib.Tools.compiler_d import d_compiler
d_compiler['default'] = ['gdc', 'dmd', 'ldc2']
"""

def default_compilers():
build_platform = Utils.unversioned_sys_platform()
possible_compiler_list = d_compiler.get(build_platform, d_compiler['default'])
return ' '.join(possible_compiler_list)

def configure(conf):
"""
Detects a suitable D compiler

:raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found
"""
try:
test_for_compiler = conf.options.check_d_compiler or default_compilers()
except AttributeError:
conf.fatal("Add options(opt): opt.load('compiler_d')")

for compiler in re.split('[ ,]+', test_for_compiler):
conf.env.stash()
conf.start_msg('Checking for %r (D compiler)' % compiler)
try:
conf.load(compiler)
except conf.errors.ConfigurationError as e:
conf.env.revert()
conf.end_msg(False)
Logs.debug('compiler_d: %r', e)
else:
if conf.env.D:
conf.end_msg(conf.env.get_flat('D'))
conf.env.COMPILER_D = compiler
conf.env.commit()
break
conf.env.revert()
conf.end_msg(False)
else:
conf.fatal('could not configure a D compiler!')

def options(opt):
"""
This is how to provide compiler preferences on the command-line::

$ waf configure --check-d-compiler=dmd
"""
test_for_compiler = default_compilers()
d_compiler_opts = opt.add_option_group('Configuration options')
d_compiler_opts.add_option('--check-d-compiler', default=None,
help='list of D compilers to try [%s]' % test_for_compiler, dest='check_d_compiler')

for x in test_for_compiler.split():
opt.load('%s' % x)


+ 73
- 0
waflib/Tools/compiler_fc.py View File

@@ -0,0 +1,73 @@
#!/usr/bin/env python
# encoding: utf-8

import re
from waflib import Utils, Logs
from waflib.Tools import fc

fc_compiler = {
'win32' : ['gfortran','ifort'],
'darwin' : ['gfortran', 'g95', 'ifort'],
'linux' : ['gfortran', 'g95', 'ifort'],
'java' : ['gfortran', 'g95', 'ifort'],
'default': ['gfortran'],
'aix' : ['gfortran']
}
"""
Dict mapping the platform names to lists of names of Fortran compilers to try, in order of preference::

from waflib.Tools.compiler_c import c_compiler
c_compiler['linux'] = ['gfortran', 'g95', 'ifort']
"""

def default_compilers():
build_platform = Utils.unversioned_sys_platform()
possible_compiler_list = fc_compiler.get(build_platform, fc_compiler['default'])
return ' '.join(possible_compiler_list)

def configure(conf):
"""
Detects a suitable Fortran compiler

:raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found
"""
try:
test_for_compiler = conf.options.check_fortran_compiler or default_compilers()
except AttributeError:
conf.fatal("Add options(opt): opt.load('compiler_fc')")
for compiler in re.split('[ ,]+', test_for_compiler):
conf.env.stash()
conf.start_msg('Checking for %r (Fortran compiler)' % compiler)
try:
conf.load(compiler)
except conf.errors.ConfigurationError as e:
conf.env.revert()
conf.end_msg(False)
Logs.debug('compiler_fortran: %r', e)
else:
if conf.env.FC:
conf.end_msg(conf.env.get_flat('FC'))
conf.env.COMPILER_FORTRAN = compiler
conf.env.commit()
break
conf.env.revert()
conf.end_msg(False)
else:
conf.fatal('could not configure a Fortran compiler!')

def options(opt):
"""
This is how to provide compiler preferences on the command-line::

$ waf configure --check-fortran-compiler=ifort
"""
test_for_compiler = default_compilers()
opt.load_special_tools('fc_*.py')
fortran_compiler_opts = opt.add_option_group('Configuration options')
fortran_compiler_opts.add_option('--check-fortran-compiler', default=None,
help='list of Fortran compiler to try [%s]' % test_for_compiler,
dest="check_fortran_compiler")

for x in test_for_compiler.split():
opt.load('%s' % x)


+ 211
- 0
waflib/Tools/cs.py View File

@@ -0,0 +1,211 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2018 (ita)

"""
C# support. A simple example::

def configure(conf):
conf.load('cs')
def build(bld):
bld(features='cs', source='main.cs', gen='foo')

Note that the configuration may compile C# snippets::

FRAG = '''
namespace Moo {
public class Test { public static int Main(string[] args) { return 0; } }
}'''
def configure(conf):
conf.check(features='cs', fragment=FRAG, compile_filename='test.cs', gen='test.exe',
bintype='exe', csflags=['-pkg:gtk-sharp-2.0'], msg='Checking for Gtksharp support')
"""

from waflib import Utils, Task, Options, Errors
from waflib.TaskGen import before_method, after_method, feature
from waflib.Tools import ccroot
from waflib.Configure import conf

ccroot.USELIB_VARS['cs'] = set(['CSFLAGS', 'ASSEMBLIES', 'RESOURCES'])
ccroot.lib_patterns['csshlib'] = ['%s']

@feature('cs')
@before_method('process_source')
def apply_cs(self):
"""
Create a C# task bound to the attribute *cs_task*. There can be only one C# task by task generator.
"""
cs_nodes = []
no_nodes = []
for x in self.to_nodes(self.source):
if x.name.endswith('.cs'):
cs_nodes.append(x)
else:
no_nodes.append(x)
self.source = no_nodes

bintype = getattr(self, 'bintype', self.gen.endswith('.dll') and 'library' or 'exe')
self.cs_task = tsk = self.create_task('mcs', cs_nodes, self.path.find_or_declare(self.gen))
tsk.env.CSTYPE = '/target:%s' % bintype
tsk.env.OUT = '/out:%s' % tsk.outputs[0].abspath()
self.env.append_value('CSFLAGS', '/platform:%s' % getattr(self, 'platform', 'anycpu'))

inst_to = getattr(self, 'install_path', bintype=='exe' and '${BINDIR}' or '${LIBDIR}')
if inst_to:
# note: we are making a copy, so the files added to cs_task.outputs won't be installed automatically
mod = getattr(self, 'chmod', bintype=='exe' and Utils.O755 or Utils.O644)
self.install_task = self.add_install_files(install_to=inst_to, install_from=self.cs_task.outputs[:], chmod=mod)

@feature('cs')
@after_method('apply_cs')
def use_cs(self):
"""
C# applications honor the **use** keyword::

def build(bld):
bld(features='cs', source='My.cs', bintype='library', gen='my.dll', name='mylib')
bld(features='cs', source='Hi.cs', includes='.', bintype='exe', gen='hi.exe', use='mylib', name='hi')
"""
names = self.to_list(getattr(self, 'use', []))
get = self.bld.get_tgen_by_name
for x in names:
try:
y = get(x)
except Errors.WafError:
self.env.append_value('CSFLAGS', '/reference:%s' % x)
continue
y.post()

tsk = getattr(y, 'cs_task', None) or getattr(y, 'link_task', None)
if not tsk:
self.bld.fatal('cs task has no link task for use %r' % self)
self.cs_task.dep_nodes.extend(tsk.outputs) # dependency
self.cs_task.set_run_after(tsk) # order (redundant, the order is inferred from the nodes inputs/outputs)
self.env.append_value('CSFLAGS', '/reference:%s' % tsk.outputs[0].abspath())

@feature('cs')
@after_method('apply_cs', 'use_cs')
def debug_cs(self):
"""
The C# targets may create .mdb or .pdb files::

def build(bld):
bld(features='cs', source='My.cs', bintype='library', gen='my.dll', csdebug='full')
# csdebug is a value in (True, 'full', 'pdbonly')
"""
csdebug = getattr(self, 'csdebug', self.env.CSDEBUG)
if not csdebug:
return

node = self.cs_task.outputs[0]
if self.env.CS_NAME == 'mono':
out = node.parent.find_or_declare(node.name + '.mdb')
else:
out = node.change_ext('.pdb')
self.cs_task.outputs.append(out)

if getattr(self, 'install_task', None):
self.pdb_install_task = self.add_install_files(
install_to=self.install_task.install_to, install_from=out)

if csdebug == 'pdbonly':
val = ['/debug+', '/debug:pdbonly']
elif csdebug == 'full':
val = ['/debug+', '/debug:full']
else:
val = ['/debug-']
self.env.append_value('CSFLAGS', val)

@feature('cs')
@after_method('debug_cs')
def doc_cs(self):
"""
The C# targets may create .xml documentation files::

def build(bld):
bld(features='cs', source='My.cs', bintype='library', gen='my.dll', csdoc=True)
# csdoc is a boolean value
"""
csdoc = getattr(self, 'csdoc', self.env.CSDOC)
if not csdoc:
return

node = self.cs_task.outputs[0]
out = node.change_ext('.xml')
self.cs_task.outputs.append(out)

if getattr(self, 'install_task', None):
self.doc_install_task = self.add_install_files(
install_to=self.install_task.install_to, install_from=out)

self.env.append_value('CSFLAGS', '/doc:%s' % out.abspath())

class mcs(Task.Task):
"""
Compile C# files
"""
color = 'YELLOW'
run_str = '${MCS} ${CSTYPE} ${CSFLAGS} ${ASS_ST:ASSEMBLIES} ${RES_ST:RESOURCES} ${OUT} ${SRC}'

def split_argfile(self, cmd):
inline = [cmd[0]]
infile = []
for x in cmd[1:]:
# csc doesn't want /noconfig in @file
if x.lower() == '/noconfig':
inline.append(x)
else:
infile.append(self.quote_flag(x))
return (inline, infile)

def configure(conf):
"""
Find a C# compiler, set the variable MCS for the compiler and CS_NAME (mono or csc)
"""
csc = getattr(Options.options, 'cscbinary', None)
if csc:
conf.env.MCS = csc
conf.find_program(['csc', 'mcs', 'gmcs'], var='MCS')
conf.env.ASS_ST = '/r:%s'
conf.env.RES_ST = '/resource:%s'

conf.env.CS_NAME = 'csc'
if str(conf.env.MCS).lower().find('mcs') > -1:
conf.env.CS_NAME = 'mono'

def options(opt):
"""
Add a command-line option for the configuration::

$ waf configure --with-csc-binary=/foo/bar/mcs
"""
opt.add_option('--with-csc-binary', type='string', dest='cscbinary')

class fake_csshlib(Task.Task):
"""
Task used for reading a foreign .net assembly and adding the dependency on it
"""
color = 'YELLOW'
inst_to = None

def runnable_status(self):
return Task.SKIP_ME

@conf
def read_csshlib(self, name, paths=[]):
"""
Read a foreign .net assembly for the *use* system::

def build(bld):
bld.read_csshlib('ManagedLibrary.dll', paths=[bld.env.mylibrarypath])
bld(features='cs', source='Hi.cs', bintype='exe', gen='hi.exe', use='ManagedLibrary.dll')

:param name: Name of the library
:type name: string
:param paths: Folders in which the library may be found
:type paths: list of string
:return: A task generator having the feature *fake_lib* which will call :py:func:`waflib.Tools.ccroot.process_lib`
:rtype: :py:class:`waflib.TaskGen.task_gen`
"""
return self(name=name, features='fake_lib', lib_paths=paths, lib_type='csshlib')


+ 7
- 7
waflib/Tools/cxx.py View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2010 (ita)
# Thomas Nagy, 2005-2018 (ita)

"Base for c++ programs and libraries"

@@ -10,31 +10,31 @@ from waflib.Tools.ccroot import link_task, stlink_task

@TaskGen.extension('.cpp','.cc','.cxx','.C','.c++')
def cxx_hook(self, node):
"Bind the c++ file extensions to the creation of a :py:class:`waflib.Tools.cxx.cxx` instance"
"Binds c++ file extensions to create :py:class:`waflib.Tools.cxx.cxx` instances"
return self.create_compiled_task('cxx', node)

if not '.c' in TaskGen.task_gen.mappings:
TaskGen.task_gen.mappings['.c'] = TaskGen.task_gen.mappings['.cpp']

class cxx(Task.Task):
"Compile C++ files into object files"
run_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT[0].abspath()}'
"Compiles C++ files into object files"
run_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}'
vars = ['CXXDEPS'] # unused variable to depend on, just in case
ext_in = ['.h'] # set the build order easily by using ext_out=['.h']
scan = c_preproc.scan

class cxxprogram(link_task):
"Link object files into a c++ program"
"Links object files into c++ programs"
run_str = '${LINK_CXX} ${LINKFLAGS} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}'
vars = ['LINKDEPS']
ext_out = ['.bin']
inst_to = '${BINDIR}'

class cxxshlib(cxxprogram):
"Link object files into a c++ shared library"
"Links object files into c++ shared libraries"
inst_to = '${LIBDIR}'

class cxxstlib(stlink_task):
"Link object files into a c++ static library"
"Links object files into c++ static libraries"
pass # do not remove


+ 97
- 0
waflib/Tools/d.py View File

@@ -0,0 +1,97 @@
#!/usr/bin/env python
# encoding: utf-8
# Carlos Rafael Giani, 2007 (dv)
# Thomas Nagy, 2007-2018 (ita)

from waflib import Utils, Task, Errors
from waflib.TaskGen import taskgen_method, feature, extension
from waflib.Tools import d_scan, d_config
from waflib.Tools.ccroot import link_task, stlink_task

class d(Task.Task):
"Compile a d file into an object file"
color = 'GREEN'
run_str = '${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_SRC_F:SRC} ${D_TGT_F:TGT}'
scan = d_scan.scan

class d_with_header(d):
"Compile a d file and generate a header"
run_str = '${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_HDR_F:tgt.outputs[1].bldpath()} ${D_SRC_F:SRC} ${D_TGT_F:tgt.outputs[0].bldpath()}'

class d_header(Task.Task):
"Compile d headers"
color = 'BLUE'
run_str = '${D} ${D_HEADER} ${SRC}'

class dprogram(link_task):
"Link object files into a d program"
run_str = '${D_LINKER} ${LINKFLAGS} ${DLNK_SRC_F}${SRC} ${DLNK_TGT_F:TGT} ${RPATH_ST:RPATH} ${DSTLIB_MARKER} ${DSTLIBPATH_ST:STLIBPATH} ${DSTLIB_ST:STLIB} ${DSHLIB_MARKER} ${DLIBPATH_ST:LIBPATH} ${DSHLIB_ST:LIB}'
inst_to = '${BINDIR}'

class dshlib(dprogram):
"Link object files into a d shared library"
inst_to = '${LIBDIR}'

class dstlib(stlink_task):
"Link object files into a d static library"
pass # do not remove

@extension('.d', '.di', '.D')
def d_hook(self, node):
"""
Compile *D* files. To get .di files as well as .o files, set the following::

def build(bld):
bld.program(source='foo.d', target='app', generate_headers=True)

"""
ext = Utils.destos_to_binfmt(self.env.DEST_OS) == 'pe' and 'obj' or 'o'
out = '%s.%d.%s' % (node.name, self.idx, ext)
def create_compiled_task(self, name, node):
task = self.create_task(name, node, node.parent.find_or_declare(out))
try:
self.compiled_tasks.append(task)
except AttributeError:
self.compiled_tasks = [task]
return task

if getattr(self, 'generate_headers', None):
tsk = create_compiled_task(self, 'd_with_header', node)
tsk.outputs.append(node.change_ext(self.env.DHEADER_ext))
else:
tsk = create_compiled_task(self, 'd', node)
return tsk

@taskgen_method
def generate_header(self, filename):
"""
See feature request #104::

def build(bld):
tg = bld.program(source='foo.d', target='app')
tg.generate_header('blah.d')
# is equivalent to:
#tg = bld.program(source='foo.d', target='app', header_lst='blah.d')

:param filename: header to create
:type filename: string
"""
try:
self.header_lst.append([filename, self.install_path])
except AttributeError:
self.header_lst = [[filename, self.install_path]]

@feature('d')
def process_header(self):
"""
Process the attribute 'header_lst' to create the d header compilation tasks::

def build(bld):
bld.program(source='foo.d', target='app', header_lst='blah.d')
"""
for i in getattr(self, 'header_lst', []):
node = self.path.find_resource(i[0])
if not node:
raise Errors.WafError('file %r not found on d obj' % i[0])
self.create_task('d_header', node, node.change_ext('.di'))


+ 64
- 0
waflib/Tools/d_config.py View File

@@ -0,0 +1,64 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2016-2018 (ita)

from waflib import Utils
from waflib.Configure import conf

@conf
def d_platform_flags(self):
"""
Sets the extensions dll/so for d programs and libraries
"""
v = self.env
if not v.DEST_OS:
v.DEST_OS = Utils.unversioned_sys_platform()
binfmt = Utils.destos_to_binfmt(self.env.DEST_OS)
if binfmt == 'pe':
v.dprogram_PATTERN = '%s.exe'
v.dshlib_PATTERN = 'lib%s.dll'
v.dstlib_PATTERN = 'lib%s.a'
elif binfmt == 'mac-o':
v.dprogram_PATTERN = '%s'
v.dshlib_PATTERN = 'lib%s.dylib'
v.dstlib_PATTERN = 'lib%s.a'
else:
v.dprogram_PATTERN = '%s'
v.dshlib_PATTERN = 'lib%s.so'
v.dstlib_PATTERN = 'lib%s.a'

DLIB = '''
version(D_Version2) {
import std.stdio;
int main() {
writefln("phobos2");
return 0;
}
} else {
version(Tango) {
import tango.stdc.stdio;
int main() {
printf("tango");
return 0;
}
} else {
import std.stdio;
int main() {
writefln("phobos1");
return 0;
}
}
}
'''
"""Detection string for the D standard library"""

@conf
def check_dlibrary(self, execute=True):
"""
Detects the kind of standard library that comes with the compiler,
and sets conf.env.DLIBRARY to tango, phobos1 or phobos2
"""
ret = self.check_cc(features='d dprogram', fragment=DLIB, compile_filename='test.d', execute=execute, define_ret=True)
if execute:
self.env.DLIBRARY = ret.strip()


+ 211
- 0
waflib/Tools/d_scan.py View File

@@ -0,0 +1,211 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2016-2018 (ita)

"""
Provide a scanner for finding dependencies on d files
"""

import re
from waflib import Utils

def filter_comments(filename):
"""
:param filename: d file name
:type filename: string
:rtype: list
:return: a list of characters
"""
txt = Utils.readf(filename)
i = 0
buf = []
max = len(txt)
begin = 0
while i < max:
c = txt[i]
if c == '"' or c == "'": # skip a string or character literal
buf.append(txt[begin:i])
delim = c
i += 1
while i < max:
c = txt[i]
if c == delim:
break
elif c == '\\': # skip the character following backslash
i += 1
i += 1
i += 1
begin = i
elif c == '/': # try to replace a comment with whitespace
buf.append(txt[begin:i])
i += 1
if i == max:
break
c = txt[i]
if c == '+': # eat nesting /+ +/ comment
i += 1
nesting = 1
c = None
while i < max:
prev = c
c = txt[i]
if prev == '/' and c == '+':
nesting += 1
c = None
elif prev == '+' and c == '/':
nesting -= 1
if nesting == 0:
break
c = None
i += 1
elif c == '*': # eat /* */ comment
i += 1
c = None
while i < max:
prev = c
c = txt[i]
if prev == '*' and c == '/':
break
i += 1
elif c == '/': # eat // comment
i += 1
while i < max and txt[i] != '\n':
i += 1
else: # no comment
begin = i - 1
continue
i += 1
begin = i
buf.append(' ')
else:
i += 1
buf.append(txt[begin:])
return buf

class d_parser(object):
"""
Parser for d files
"""
def __init__(self, env, incpaths):
#self.code = ''
#self.module = ''
#self.imports = []

self.allnames = []

self.re_module = re.compile("module\s+([^;]+)")
self.re_import = re.compile("import\s+([^;]+)")
self.re_import_bindings = re.compile("([^:]+):(.*)")
self.re_import_alias = re.compile("[^=]+=(.+)")

self.env = env

self.nodes = []
self.names = []

self.incpaths = incpaths

def tryfind(self, filename):
"""
Search file a file matching an module/import directive

:param filename: file to read
:type filename: string
"""
found = 0
for n in self.incpaths:
found = n.find_resource(filename.replace('.', '/') + '.d')
if found:
self.nodes.append(found)
self.waiting.append(found)
break
if not found:
if not filename in self.names:
self.names.append(filename)

def get_strings(self, code):
"""
:param code: d code to parse
:type code: string
:return: the modules that the code uses
:rtype: a list of match objects
"""
#self.imports = []
self.module = ''
lst = []

# get the module name (if present)

mod_name = self.re_module.search(code)
if mod_name:
self.module = re.sub('\s+', '', mod_name.group(1)) # strip all whitespaces

# go through the code, have a look at all import occurrences

# first, lets look at anything beginning with "import" and ending with ";"
import_iterator = self.re_import.finditer(code)
if import_iterator:
for import_match in import_iterator:
import_match_str = re.sub('\s+', '', import_match.group(1)) # strip all whitespaces

# does this end with an import bindings declaration?
# (import bindings always terminate the list of imports)
bindings_match = self.re_import_bindings.match(import_match_str)
if bindings_match:
import_match_str = bindings_match.group(1)
# if so, extract the part before the ":" (since the module declaration(s) is/are located there)

# split the matching string into a bunch of strings, separated by a comma
matches = import_match_str.split(',')

for match in matches:
alias_match = self.re_import_alias.match(match)
if alias_match:
# is this an alias declaration? (alias = module name) if so, extract the module name
match = alias_match.group(1)

lst.append(match)
return lst

def start(self, node):
"""
The parsing starts here

:param node: input file
:type node: :py:class:`waflib.Node.Node`
"""
self.waiting = [node]
# while the stack is not empty, add the dependencies
while self.waiting:
nd = self.waiting.pop(0)
self.iter(nd)

def iter(self, node):
"""
Find all the modules that a file depends on, uses :py:meth:`waflib.Tools.d_scan.d_parser.tryfind` to process dependent files

:param node: input file
:type node: :py:class:`waflib.Node.Node`
"""
path = node.abspath() # obtain the absolute path
code = "".join(filter_comments(path)) # read the file and filter the comments
names = self.get_strings(code) # obtain the import strings
for x in names:
# optimization
if x in self.allnames:
continue
self.allnames.append(x)

# for each name, see if it is like a node or not
self.tryfind(x)

def scan(self):
"look for .d/.di used by a d file"
env = self.env
gruik = d_parser(env, self.generator.includes_nodes)
node = self.inputs[0]
gruik.start(node)
nodes = gruik.nodes
names = gruik.names
return (nodes, names)


+ 70
- 0
waflib/Tools/dbus.py View File

@@ -0,0 +1,70 @@
#!/usr/bin/env python
# encoding: utf-8
# Ali Sabil, 2007

"""
Compiles dbus files with **dbus-binding-tool**

Typical usage::

def options(opt):
opt.load('compiler_c dbus')
def configure(conf):
conf.load('compiler_c dbus')
def build(bld):
tg = bld.program(
includes = '.',
source = bld.path.ant_glob('*.c'),
target = 'gnome-hello')
tg.add_dbus_file('test.xml', 'test_prefix', 'glib-server')
"""

from waflib import Task, Errors
from waflib.TaskGen import taskgen_method, before_method

@taskgen_method
def add_dbus_file(self, filename, prefix, mode):
"""
Adds a dbus file to the list of dbus files to process. Store them in the attribute *dbus_lst*.

:param filename: xml file to compile
:type filename: string
:param prefix: dbus binding tool prefix (--prefix=prefix)
:type prefix: string
:param mode: dbus binding tool mode (--mode=mode)
:type mode: string
"""
if not hasattr(self, 'dbus_lst'):
self.dbus_lst = []
if not 'process_dbus' in self.meths:
self.meths.append('process_dbus')
self.dbus_lst.append([filename, prefix, mode])

@before_method('process_source')
def process_dbus(self):
"""
Processes the dbus files stored in the attribute *dbus_lst* to create :py:class:`waflib.Tools.dbus.dbus_binding_tool` instances.
"""
for filename, prefix, mode in getattr(self, 'dbus_lst', []):
node = self.path.find_resource(filename)
if not node:
raise Errors.WafError('file not found ' + filename)
tsk = self.create_task('dbus_binding_tool', node, node.change_ext('.h'))
tsk.env.DBUS_BINDING_TOOL_PREFIX = prefix
tsk.env.DBUS_BINDING_TOOL_MODE = mode

class dbus_binding_tool(Task.Task):
"""
Compiles a dbus file
"""
color = 'BLUE'
ext_out = ['.h']
run_str = '${DBUS_BINDING_TOOL} --prefix=${DBUS_BINDING_TOOL_PREFIX} --mode=${DBUS_BINDING_TOOL_MODE} --output=${TGT} ${SRC}'
shell = True # temporary workaround for #795

def configure(conf):
"""
Detects the program dbus-binding-tool and sets ``conf.env.DBUS_BINDING_TOOL``
"""
conf.find_program('dbus-binding-tool', var='DBUS_BINDING_TOOL')


+ 80
- 0
waflib/Tools/dmd.py View File

@@ -0,0 +1,80 @@
#!/usr/bin/env python
# encoding: utf-8
# Carlos Rafael Giani, 2007 (dv)
# Thomas Nagy, 2008-2018 (ita)

import sys
from waflib.Tools import ar, d
from waflib.Configure import conf

@conf
def find_dmd(conf):
"""
Finds the program *dmd*, *dmd2*, or *ldc* and set the variable *D*
"""
conf.find_program(['dmd', 'dmd2', 'ldc'], var='D')

# make sure that we're dealing with dmd1, dmd2, or ldc(1)
out = conf.cmd_and_log(conf.env.D + ['--help'])
if out.find("D Compiler v") == -1:
out = conf.cmd_and_log(conf.env.D + ['-version'])
if out.find("based on DMD v1.") == -1:
conf.fatal("detected compiler is not dmd/ldc")

@conf
def common_flags_ldc(conf):
"""
Sets the D flags required by *ldc*
"""
v = conf.env
v.DFLAGS = ['-d-version=Posix']
v.LINKFLAGS = []
v.DFLAGS_dshlib = ['-relocation-model=pic']

@conf
def common_flags_dmd(conf):
"""
Set the flags required by *dmd* or *dmd2*
"""
v = conf.env

v.D_SRC_F = ['-c']
v.D_TGT_F = '-of%s'

v.D_LINKER = v.D
v.DLNK_SRC_F = ''
v.DLNK_TGT_F = '-of%s'
v.DINC_ST = '-I%s'

v.DSHLIB_MARKER = v.DSTLIB_MARKER = ''
v.DSTLIB_ST = v.DSHLIB_ST = '-L-l%s'
v.DSTLIBPATH_ST = v.DLIBPATH_ST = '-L-L%s'

v.LINKFLAGS_dprogram= ['-quiet']

v.DFLAGS_dshlib = ['-fPIC']
v.LINKFLAGS_dshlib = ['-L-shared']

v.DHEADER_ext = '.di'
v.DFLAGS_d_with_header = ['-H', '-Hf']
v.D_HDR_F = '%s'

def configure(conf):
"""
Configuration for *dmd*, *dmd2*, and *ldc*
"""
conf.find_dmd()

if sys.platform == 'win32':
out = conf.cmd_and_log(conf.env.D + ['--help'])
if out.find('D Compiler v2.') > -1:
conf.fatal('dmd2 on Windows is not supported, use gdc or ldc2 instead')

conf.load('ar')
conf.load('d')
conf.common_flags_dmd()
conf.d_platform_flags()

if str(conf.env.D).find('ldc') > -1:
conf.common_flags_ldc()


+ 51
- 37
waflib/Tools/errcheck.py View File

@@ -3,9 +3,9 @@
# Thomas Nagy, 2011 (ita)

"""
errcheck: highlight common mistakes
Common mistakes highlighting.

There is a performance hit, so this tool is only loaded when running "waf -v"
There is a performance impact, so this tool is only loaded when running ``waf -v``
"""

typos = {
@@ -18,13 +18,14 @@ typos = {
'importpath':'includes',
'installpath':'install_path',
'iscopy':'is_copy',
'uses':'use',
}

meths_typos = ['__call__', 'program', 'shlib', 'stlib', 'objects']

import sys
from waflib import Logs, Build, Node, Task, TaskGen, ConfigSet, Errors, Utils
import waflib.Tools.ccroot
from waflib.Tools import ccroot

def check_same_targets(self):
mp = Utils.defaultdict(list)
@@ -33,6 +34,8 @@ def check_same_targets(self):
def check_task(tsk):
if not isinstance(tsk, Task.Task):
return
if hasattr(tsk, 'no_errcheck_out'):
return

for node in tsk.outputs:
mp[node].append(tsk)
@@ -58,30 +61,34 @@ def check_same_targets(self):
Logs.error(msg)
for x in v:
if Logs.verbose > 1:
Logs.error(' %d. %r' % (1 + v.index(x), x.generator))
Logs.error(' %d. %r', 1 + v.index(x), x.generator)
else:
Logs.error(' %d. %r in %r' % (1 + v.index(x), x.generator.name, getattr(x.generator, 'path', None)))
Logs.error(' %d. %r in %r', 1 + v.index(x), x.generator.name, getattr(x.generator, 'path', None))
Logs.error('If you think that this is an error, set no_errcheck_out on the task instance')

if not dupe:
for (k, v) in uids.items():
if len(v) > 1:
Logs.error('* Several tasks use the same identifier. Please check the information on\n https://waf.io/apidocs/Task.html?highlight=uid#waflib.Task.Task.uid')
tg_details = tsk.generator.name
if Logs.verbose > 2:
tg_details = tsk.generator
for tsk in v:
Logs.error(' - object %r (%r) defined in %r' % (tsk.__class__.__name__, tsk, tsk.generator))
Logs.error(' - object %r (%r) defined in %r', tsk.__class__.__name__, tsk, tg_details)

def check_invalid_constraints(self):
feat = set([])
feat = set()
for x in list(TaskGen.feats.values()):
feat.union(set(x))
for (x, y) in TaskGen.task_gen.prec.items():
feat.add(x)
feat.union(set(y))
ext = set([])
ext = set()
for x in TaskGen.task_gen.mappings.values():
ext.add(x.__name__)
invalid = ext & feat
if invalid:
Logs.error('The methods %r have invalid annotations: @extension <-> @feature/@before_method/@after_method' % list(invalid))
Logs.error('The methods %r have invalid annotations: @extension <-> @feature/@before_method/@after_method', list(invalid))

# the build scripts have been read, so we can check for invalid after/before attributes on task classes
for cls in list(Task.classes.values()):
@@ -90,15 +97,15 @@ def check_invalid_constraints(self):

for x in ('before', 'after'):
for y in Utils.to_list(getattr(cls, x, [])):
if not Task.classes.get(y, None):
Logs.error('Erroneous order constraint %r=%r on task class %r' % (x, y, cls.__name__))
if not Task.classes.get(y):
Logs.error('Erroneous order constraint %r=%r on task class %r', x, y, cls.__name__)
if getattr(cls, 'rule', None):
Logs.error('Erroneous attribute "rule" on task class %r (rename to "run_str")' % cls.__name__)
Logs.error('Erroneous attribute "rule" on task class %r (rename to "run_str")', cls.__name__)

def replace(m):
"""
We could add properties, but they would not work in some cases:
bld.program(...) requires 'source' in the attributes
Replaces existing BuildContext methods to verify parameter names,
for example ``bld(source=)`` has no ending *s*
"""
oldcall = getattr(Build.BuildContext, m)
def call(self, *k, **kw):
@@ -107,13 +114,13 @@ def replace(m):
if x in kw:
if x == 'iscopy' and 'subst' in getattr(self, 'features', ''):
continue
Logs.error('Fix the typo %r -> %r on %r' % (x, typos[x], ret))
Logs.error('Fix the typo %r -> %r on %r', x, typos[x], ret)
return ret
setattr(Build.BuildContext, m, call)

def enhance_lib():
"""
modify existing classes and methods
Modifies existing classes and methods to enable error verification
"""
for m in meths_typos:
replace(m)
@@ -121,26 +128,36 @@ def enhance_lib():
# catch '..' in ant_glob patterns
def ant_glob(self, *k, **kw):
if k:
lst=Utils.to_list(k[0])
lst = Utils.to_list(k[0])
for pat in lst:
if '..' in pat.split('/'):
Logs.error("In ant_glob pattern %r: '..' means 'two dots', not 'parent directory'" % k[0])
if kw.get('remove', True):
try:
if self.is_child_of(self.ctx.bldnode) and not kw.get('quiet', False):
Logs.error('Using ant_glob on the build folder (%r) is dangerous (quiet=True to disable this warning)' % self)
except AttributeError:
pass
sp = pat.split('/')
if '..' in sp:
Logs.error("In ant_glob pattern %r: '..' means 'two dots', not 'parent directory'", k[0])
if '.' in sp:
Logs.error("In ant_glob pattern %r: '.' means 'one dot', not 'current directory'", k[0])
return self.old_ant_glob(*k, **kw)
Node.Node.old_ant_glob = Node.Node.ant_glob
Node.Node.ant_glob = ant_glob

# catch ant_glob on build folders
def ant_iter(self, accept=None, maxdepth=25, pats=[], dir=False, src=True, remove=True, quiet=False):
if remove:
try:
if self.is_child_of(self.ctx.bldnode) and not quiet:
quiet = True
Logs.error('Calling ant_glob on build folders (%r) is dangerous: add quiet=True / remove=False', self)
except AttributeError:
pass
return self.old_ant_iter(accept, maxdepth, pats, dir, src, remove, quiet)
Node.Node.old_ant_iter = Node.Node.ant_iter
Node.Node.ant_iter = ant_iter

# catch conflicting ext_in/ext_out/before/after declarations
old = Task.is_before
def is_before(t1, t2):
ret = old(t1, t2)
if ret and old(t2, t1):
Logs.error('Contradictory order constraints in classes %r %r' % (t1, t2))
Logs.error('Contradictory order constraints in classes %r %r', t1, t2)
return ret
Task.is_before = is_before

@@ -152,7 +169,7 @@ def enhance_lib():
Logs.error('feature shlib -> cshlib, dshlib or cxxshlib')
for x in ('c', 'cxx', 'd', 'fc'):
if not x in lst and lst and lst[0] in [x+y for y in ('program', 'shlib', 'stlib')]:
Logs.error('%r features is probably missing %r' % (self, x))
Logs.error('%r features is probably missing %r', self, x)
TaskGen.feature('*')(check_err_features)

# check for erroneous order constraints
@@ -160,12 +177,12 @@ def enhance_lib():
if not hasattr(self, 'rule') and not 'subst' in Utils.to_list(self.features):
for x in ('before', 'after', 'ext_in', 'ext_out'):
if hasattr(self, x):
Logs.warn('Erroneous order constraint %r on non-rule based task generator %r' % (x, self))
Logs.warn('Erroneous order constraint %r on non-rule based task generator %r', x, self)
else:
for x in ('before', 'after'):
for y in self.to_list(getattr(self, x, [])):
if not Task.classes.get(y, None):
Logs.error('Erroneous order constraint %s=%r on %r (no such class)' % (x, y, self))
if not Task.classes.get(y):
Logs.error('Erroneous order constraint %s=%r on %r (no such class)', x, y, self)
TaskGen.feature('*')(check_err_order)

# check for @extension used with @feature/@before_method/@after_method
@@ -200,24 +217,21 @@ def enhance_lib():
TaskGen.task_gen.use_rec = use_rec

# check for env.append
def getattri(self, name, default=None):
def _getattr(self, name, default=None):
if name == 'append' or name == 'add':
raise Errors.WafError('env.append and env.add do not exist: use env.append_value/env.append_unique')
elif name == 'prepend':
raise Errors.WafError('env.prepend does not exist: use env.prepend_value')
if name in self.__slots__:
return object.__getattr__(self, name, default)
return super(ConfigSet.ConfigSet, self).__getattr__(name, default)
else:
return self[name]
ConfigSet.ConfigSet.__getattr__ = getattri
ConfigSet.ConfigSet.__getattr__ = _getattr


def options(opt):
"""
Add a few methods
Error verification can be enabled by default (not just on ``waf -v``) by adding to the user script options
"""
enhance_lib()

def configure(conf):
pass


+ 189
- 0
waflib/Tools/fc.py View File

@@ -0,0 +1,189 @@
#! /usr/bin/env python
# encoding: utf-8
# DC 2008
# Thomas Nagy 2016-2018 (ita)

"""
Fortran support
"""

from waflib import Utils, Task, Errors
from waflib.Tools import ccroot, fc_config, fc_scan
from waflib.TaskGen import extension
from waflib.Configure import conf

ccroot.USELIB_VARS['fc'] = set(['FCFLAGS', 'DEFINES', 'INCLUDES', 'FCPPFLAGS'])
ccroot.USELIB_VARS['fcprogram_test'] = ccroot.USELIB_VARS['fcprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS'])
ccroot.USELIB_VARS['fcshlib'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS'])
ccroot.USELIB_VARS['fcstlib'] = set(['ARFLAGS', 'LINKDEPS'])

@extension('.f','.F','.f90','.F90','.for','.FOR','.f95','.F95','.f03','.F03','.f08','.F08')
def fc_hook(self, node):
"Binds the Fortran file extensions create :py:class:`waflib.Tools.fc.fc` instances"
return self.create_compiled_task('fc', node)

@conf
def modfile(conf, name):
"""
Turns a module name into the right module file name.
Defaults to all lower case.
"""
return {'lower' :name.lower() + '.mod',
'lower.MOD' :name.lower() + '.MOD',
'UPPER.mod' :name.upper() + '.mod',
'UPPER' :name.upper() + '.MOD'}[conf.env.FC_MOD_CAPITALIZATION or 'lower']

def get_fortran_tasks(tsk):
"""
Obtains all fortran tasks from the same build group. Those tasks must not have
the attribute 'nomod' or 'mod_fortran_done'

:return: a list of :py:class:`waflib.Tools.fc.fc` instances
"""
bld = tsk.generator.bld
tasks = bld.get_tasks_group(bld.get_group_idx(tsk.generator))
return [x for x in tasks if isinstance(x, fc) and not getattr(x, 'nomod', None) and not getattr(x, 'mod_fortran_done', None)]

class fc(Task.Task):
"""
Fortran tasks can only run when all fortran tasks in a current task group are ready to be executed
This may cause a deadlock if some fortran task is waiting for something that cannot happen (circular dependency)
Should this ever happen, set the 'nomod=True' on those tasks instances to break the loop
"""
color = 'GREEN'
run_str = '${FC} ${FCFLAGS} ${FCINCPATH_ST:INCPATHS} ${FCDEFINES_ST:DEFINES} ${_FCMODOUTFLAGS} ${FC_TGT_F}${TGT[0].abspath()} ${FC_SRC_F}${SRC[0].abspath()} ${FCPPFLAGS}'
vars = ["FORTRANMODPATHFLAG"]

def scan(self):
"""Fortran dependency scanner"""
tmp = fc_scan.fortran_parser(self.generator.includes_nodes)
tmp.task = self
tmp.start(self.inputs[0])
return (tmp.nodes, tmp.names)

def runnable_status(self):
"""
Sets the mod file outputs and the dependencies on the mod files over all Fortran tasks
executed by the main thread so there are no concurrency issues
"""
if getattr(self, 'mod_fortran_done', None):
return super(fc, self).runnable_status()

# now, if we reach this part it is because this fortran task is the first in the list
bld = self.generator.bld

# obtain the fortran tasks
lst = get_fortran_tasks(self)

# disable this method for other tasks
for tsk in lst:
tsk.mod_fortran_done = True

# wait for all the .f tasks to be ready for execution
# and ensure that the scanners are called at least once
for tsk in lst:
ret = tsk.runnable_status()
if ret == Task.ASK_LATER:
# we have to wait for one of the other fortran tasks to be ready
# this may deadlock if there are dependencies between fortran tasks
# but this should not happen (we are setting them here!)
for x in lst:
x.mod_fortran_done = None

return Task.ASK_LATER

ins = Utils.defaultdict(set)
outs = Utils.defaultdict(set)

# the .mod files to create
for tsk in lst:
key = tsk.uid()
for x in bld.raw_deps[key]:
if x.startswith('MOD@'):
name = bld.modfile(x.replace('MOD@', ''))
node = bld.srcnode.find_or_declare(name)
tsk.set_outputs(node)
outs[node].add(tsk)

# the .mod files to use
for tsk in lst:
key = tsk.uid()
for x in bld.raw_deps[key]:
if x.startswith('USE@'):
name = bld.modfile(x.replace('USE@', ''))
node = bld.srcnode.find_resource(name)
if node and node not in tsk.outputs:
if not node in bld.node_deps[key]:
bld.node_deps[key].append(node)
ins[node].add(tsk)

# if the intersection matches, set the order
for k in ins.keys():
for a in ins[k]:
a.run_after.update(outs[k])
for x in outs[k]:
self.generator.bld.producer.revdeps[x].add(a)

# the scanner cannot output nodes, so we have to set them
# ourselves as task.dep_nodes (additional input nodes)
tmp = []
for t in outs[k]:
tmp.extend(t.outputs)
a.dep_nodes.extend(tmp)
a.dep_nodes.sort(key=lambda x: x.abspath())

# the task objects have changed: clear the signature cache
for tsk in lst:
try:
delattr(tsk, 'cache_sig')
except AttributeError:
pass

return super(fc, self).runnable_status()

class fcprogram(ccroot.link_task):
"""Links Fortran programs"""
color = 'YELLOW'
run_str = '${FC} ${LINKFLAGS} ${FCLNK_SRC_F}${SRC} ${FCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FCSTLIB_MARKER} ${FCSTLIBPATH_ST:STLIBPATH} ${FCSTLIB_ST:STLIB} ${FCSHLIB_MARKER} ${FCLIBPATH_ST:LIBPATH} ${FCLIB_ST:LIB} ${LDFLAGS}'
inst_to = '${BINDIR}'

class fcshlib(fcprogram):
"""Links Fortran libraries"""
inst_to = '${LIBDIR}'

class fcstlib(ccroot.stlink_task):
"""Links Fortran static libraries (uses ar by default)"""
pass # do not remove the pass statement

class fcprogram_test(fcprogram):
"""Custom link task to obtain compiler outputs for Fortran configuration tests"""

def runnable_status(self):
"""This task is always executed"""
ret = super(fcprogram_test, self).runnable_status()
if ret == Task.SKIP_ME:
ret = Task.RUN_ME
return ret

def exec_command(self, cmd, **kw):
"""Stores the compiler std our/err onto the build context, to bld.out + bld.err"""
bld = self.generator.bld

kw['shell'] = isinstance(cmd, str)
kw['stdout'] = kw['stderr'] = Utils.subprocess.PIPE
kw['cwd'] = self.get_cwd()
bld.out = bld.err = ''

bld.to_log('command: %s\n' % cmd)

kw['output'] = 0
try:
(bld.out, bld.err) = bld.cmd_and_log(cmd, **kw)
except Errors.WafError:
return -1

if bld.out:
bld.to_log('out: %s\n' % bld.out)
if bld.err:
bld.to_log('err: %s\n' % bld.err)


+ 488
- 0
waflib/Tools/fc_config.py View File

@@ -0,0 +1,488 @@
#! /usr/bin/env python
# encoding: utf-8
# DC 2008
# Thomas Nagy 2016-2018 (ita)

"""
Fortran configuration helpers
"""

import re, os, sys, shlex
from waflib.Configure import conf
from waflib.TaskGen import feature, before_method

FC_FRAGMENT = ' program main\n end program main\n'
FC_FRAGMENT2 = ' PROGRAM MAIN\n END\n' # what's the actual difference between these?

@conf
def fc_flags(conf):
"""
Defines common fortran configuration flags and file extensions
"""
v = conf.env

v.FC_SRC_F = []
v.FC_TGT_F = ['-c', '-o']
v.FCINCPATH_ST = '-I%s'
v.FCDEFINES_ST = '-D%s'

if not v.LINK_FC:
v.LINK_FC = v.FC

v.FCLNK_SRC_F = []
v.FCLNK_TGT_F = ['-o']

v.FCFLAGS_fcshlib = ['-fpic']
v.LINKFLAGS_fcshlib = ['-shared']
v.fcshlib_PATTERN = 'lib%s.so'

v.fcstlib_PATTERN = 'lib%s.a'

v.FCLIB_ST = '-l%s'
v.FCLIBPATH_ST = '-L%s'
v.FCSTLIB_ST = '-l%s'
v.FCSTLIBPATH_ST = '-L%s'
v.FCSTLIB_MARKER = '-Wl,-Bstatic'
v.FCSHLIB_MARKER = '-Wl,-Bdynamic'

v.SONAME_ST = '-Wl,-h,%s'

@conf
def fc_add_flags(conf):
"""
Adds FCFLAGS / LDFLAGS / LINKFLAGS from os.environ to conf.env
"""
conf.add_os_flags('FCPPFLAGS', dup=False)
conf.add_os_flags('FCFLAGS', dup=False)
conf.add_os_flags('LINKFLAGS', dup=False)
conf.add_os_flags('LDFLAGS', dup=False)

@conf
def check_fortran(self, *k, **kw):
"""
Compiles a Fortran program to ensure that the settings are correct
"""
self.check_cc(
fragment = FC_FRAGMENT,
compile_filename = 'test.f',
features = 'fc fcprogram',
msg = 'Compiling a simple fortran app')

@conf
def check_fc(self, *k, **kw):
"""
Same as :py:func:`waflib.Tools.c_config.check` but defaults to the *Fortran* programming language
(this overrides the C defaults in :py:func:`waflib.Tools.c_config.validate_c`)
"""
kw['compiler'] = 'fc'
if not 'compile_mode' in kw:
kw['compile_mode'] = 'fc'
if not 'type' in kw:
kw['type'] = 'fcprogram'
if not 'compile_filename' in kw:
kw['compile_filename'] = 'test.f90'
if not 'code' in kw:
kw['code'] = FC_FRAGMENT
return self.check(*k, **kw)

# ------------------------------------------------------------------------
# --- These are the default platform modifiers, refactored here for
# convenience. gfortran and g95 have much overlap.
# ------------------------------------------------------------------------

@conf
def fortran_modifier_darwin(conf):
"""
Defines Fortran flags and extensions for OSX systems
"""
v = conf.env
v.FCFLAGS_fcshlib = ['-fPIC']
v.LINKFLAGS_fcshlib = ['-dynamiclib']
v.fcshlib_PATTERN = 'lib%s.dylib'
v.FRAMEWORKPATH_ST = '-F%s'
v.FRAMEWORK_ST = ['-framework']

v.LINKFLAGS_fcstlib = []

v.FCSHLIB_MARKER = ''
v.FCSTLIB_MARKER = ''
v.SONAME_ST = ''

@conf
def fortran_modifier_win32(conf):
"""
Defines Fortran flags for Windows platforms
"""
v = conf.env
v.fcprogram_PATTERN = v.fcprogram_test_PATTERN = '%s.exe'

v.fcshlib_PATTERN = '%s.dll'
v.implib_PATTERN = '%s.dll.a'
v.IMPLIB_ST = '-Wl,--out-implib,%s'

v.FCFLAGS_fcshlib = []

# Auto-import is enabled by default even without this option,
# but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
# that the linker emits otherwise.
v.append_value('LINKFLAGS', ['-Wl,--enable-auto-import'])

@conf
def fortran_modifier_cygwin(conf):
"""
Defines Fortran flags for use on cygwin
"""
fortran_modifier_win32(conf)
v = conf.env
v.fcshlib_PATTERN = 'cyg%s.dll'
v.append_value('LINKFLAGS_fcshlib', ['-Wl,--enable-auto-image-base'])
v.FCFLAGS_fcshlib = []

# ------------------------------------------------------------------------

@conf
def check_fortran_dummy_main(self, *k, **kw):
"""
Determines if a main function is needed by compiling a code snippet with
the C compiler and linking it with the Fortran compiler (useful on unix-like systems)
"""
if not self.env.CC:
self.fatal('A c compiler is required for check_fortran_dummy_main')

lst = ['MAIN__', '__MAIN', '_MAIN', 'MAIN_', 'MAIN']
lst.extend([m.lower() for m in lst])
lst.append('')

self.start_msg('Detecting whether we need a dummy main')
for main in lst:
kw['fortran_main'] = main
try:
self.check_cc(
fragment = 'int %s() { return 0; }\n' % (main or 'test'),
features = 'c fcprogram',
mandatory = True
)
if not main:
self.env.FC_MAIN = -1
self.end_msg('no')
else:
self.env.FC_MAIN = main
self.end_msg('yes %s' % main)
break
except self.errors.ConfigurationError:
pass
else:
self.end_msg('not found')
self.fatal('could not detect whether fortran requires a dummy main, see the config.log')

# ------------------------------------------------------------------------

GCC_DRIVER_LINE = re.compile('^Driving:')
POSIX_STATIC_EXT = re.compile('\S+\.a')
POSIX_LIB_FLAGS = re.compile('-l\S+')

@conf
def is_link_verbose(self, txt):
"""Returns True if 'useful' link options can be found in txt"""
assert isinstance(txt, str)
for line in txt.splitlines():
if not GCC_DRIVER_LINE.search(line):
if POSIX_STATIC_EXT.search(line) or POSIX_LIB_FLAGS.search(line):
return True
return False

@conf
def check_fortran_verbose_flag(self, *k, **kw):
"""
Checks what kind of verbose (-v) flag works, then sets it to env.FC_VERBOSE_FLAG
"""
self.start_msg('fortran link verbose flag')
for x in ('-v', '--verbose', '-verbose', '-V'):
try:
self.check_cc(
features = 'fc fcprogram_test',
fragment = FC_FRAGMENT2,
compile_filename = 'test.f',
linkflags = [x],
mandatory=True)
except self.errors.ConfigurationError:
pass
else:
# output is on stderr or stdout (for xlf)
if self.is_link_verbose(self.test_bld.err) or self.is_link_verbose(self.test_bld.out):
self.end_msg(x)
break
else:
self.end_msg('failure')
self.fatal('Could not obtain the fortran link verbose flag (see config.log)')

self.env.FC_VERBOSE_FLAG = x
return x

# ------------------------------------------------------------------------

# linkflags which match those are ignored
LINKFLAGS_IGNORED = [r'-lang*', r'-lcrt[a-zA-Z0-9\.]*\.o', r'-lc$', r'-lSystem', r'-libmil', r'-LIST:*', r'-LNO:*']
if os.name == 'nt':
LINKFLAGS_IGNORED.extend([r'-lfrt*', r'-luser32', r'-lkernel32', r'-ladvapi32', r'-lmsvcrt', r'-lshell32', r'-lmingw', r'-lmoldname'])
else:
LINKFLAGS_IGNORED.append(r'-lgcc*')
RLINKFLAGS_IGNORED = [re.compile(f) for f in LINKFLAGS_IGNORED]

def _match_ignore(line):
"""Returns True if the line should be ignored (Fortran verbose flag test)"""
for i in RLINKFLAGS_IGNORED:
if i.match(line):
return True
return False

def parse_fortran_link(lines):
"""Given the output of verbose link of Fortran compiler, this returns a
list of flags necessary for linking using the standard linker."""
final_flags = []
for line in lines:
if not GCC_DRIVER_LINE.match(line):
_parse_flink_line(line, final_flags)
return final_flags

SPACE_OPTS = re.compile('^-[LRuYz]$')
NOSPACE_OPTS = re.compile('^-[RL]')

def _parse_flink_token(lexer, token, tmp_flags):
# Here we go (convention for wildcard is shell, not regex !)
# 1 TODO: we first get some root .a libraries
# 2 TODO: take everything starting by -bI:*
# 3 Ignore the following flags: -lang* | -lcrt*.o | -lc |
# -lgcc* | -lSystem | -libmil | -LANG:=* | -LIST:* | -LNO:*)
# 4 take into account -lkernel32
# 5 For options of the kind -[[LRuYz]], as they take one argument
# after, the actual option is the next token
# 6 For -YP,*: take and replace by -Larg where arg is the old
# argument
# 7 For -[lLR]*: take

# step 3
if _match_ignore(token):
pass
# step 4
elif token.startswith('-lkernel32') and sys.platform == 'cygwin':
tmp_flags.append(token)
# step 5
elif SPACE_OPTS.match(token):
t = lexer.get_token()
if t.startswith('P,'):
t = t[2:]
for opt in t.split(os.pathsep):
tmp_flags.append('-L%s' % opt)
# step 6
elif NOSPACE_OPTS.match(token):
tmp_flags.append(token)
# step 7
elif POSIX_LIB_FLAGS.match(token):
tmp_flags.append(token)
else:
# ignore anything not explicitly taken into account
pass

t = lexer.get_token()
return t

def _parse_flink_line(line, final_flags):
"""private"""
lexer = shlex.shlex(line, posix = True)
lexer.whitespace_split = True

t = lexer.get_token()
tmp_flags = []
while t:
t = _parse_flink_token(lexer, t, tmp_flags)

final_flags.extend(tmp_flags)
return final_flags

@conf
def check_fortran_clib(self, autoadd=True, *k, **kw):
"""
Obtains the flags for linking with the C library
if this check works, add uselib='CLIB' to your task generators
"""
if not self.env.FC_VERBOSE_FLAG:
self.fatal('env.FC_VERBOSE_FLAG is not set: execute check_fortran_verbose_flag?')

self.start_msg('Getting fortran runtime link flags')
try:
self.check_cc(
fragment = FC_FRAGMENT2,
compile_filename = 'test.f',
features = 'fc fcprogram_test',
linkflags = [self.env.FC_VERBOSE_FLAG]
)
except Exception:
self.end_msg(False)
if kw.get('mandatory', True):
conf.fatal('Could not find the c library flags')
else:
out = self.test_bld.err
flags = parse_fortran_link(out.splitlines())
self.end_msg('ok (%s)' % ' '.join(flags))
self.env.LINKFLAGS_CLIB = flags
return flags
return []

def getoutput(conf, cmd, stdin=False):
"""
Obtains Fortran command outputs
"""
from waflib import Errors
if conf.env.env:
env = conf.env.env
else:
env = dict(os.environ)
env['LANG'] = 'C'
input = stdin and '\n'.encode() or None
try:
out, err = conf.cmd_and_log(cmd, env=env, output=0, input=input)
except Errors.WafError as e:
# An WafError might indicate an error code during the command
# execution, in this case we still obtain the stderr and stdout,
# which we can use to find the version string.
if not (hasattr(e, 'stderr') and hasattr(e, 'stdout')):
raise e
else:
# Ignore the return code and return the original
# stdout and stderr.
out = e.stdout
err = e.stderr
except Exception:
conf.fatal('could not determine the compiler version %r' % cmd)
return (out, err)

# ------------------------------------------------------------------------

ROUTINES_CODE = """\
subroutine foobar()
return
end
subroutine foo_bar()
return
end
"""

MAIN_CODE = """
void %(dummy_func_nounder)s(void);
void %(dummy_func_under)s(void);
int %(main_func_name)s() {
%(dummy_func_nounder)s();
%(dummy_func_under)s();
return 0;
}
"""

@feature('link_main_routines_func')
@before_method('process_source')
def link_main_routines_tg_method(self):
"""
The configuration test declares a unique task generator,
so we create other task generators from there for fortran link tests
"""
def write_test_file(task):
task.outputs[0].write(task.generator.code)
bld = self.bld
bld(rule=write_test_file, target='main.c', code=MAIN_CODE % self.__dict__)
bld(rule=write_test_file, target='test.f', code=ROUTINES_CODE)
bld(features='fc fcstlib', source='test.f', target='test')
bld(features='c fcprogram', source='main.c', target='app', use='test')

def mangling_schemes():
"""
Generate triplets for use with mangle_name
(used in check_fortran_mangling)
the order is tuned for gfortan
"""
for u in ('_', ''):
for du in ('', '_'):
for c in ("lower", "upper"):
yield (u, du, c)

def mangle_name(u, du, c, name):
"""Mangle a name from a triplet (used in check_fortran_mangling)"""
return getattr(name, c)() + u + (name.find('_') != -1 and du or '')

@conf
def check_fortran_mangling(self, *k, **kw):
"""
Detect the mangling scheme, sets FORTRAN_MANGLING to the triplet found

This test will compile a fortran static library, then link a c app against it
"""
if not self.env.CC:
self.fatal('A c compiler is required for link_main_routines')
if not self.env.FC:
self.fatal('A fortran compiler is required for link_main_routines')
if not self.env.FC_MAIN:
self.fatal('Checking for mangling requires self.env.FC_MAIN (execute "check_fortran_dummy_main" first?)')

self.start_msg('Getting fortran mangling scheme')
for (u, du, c) in mangling_schemes():
try:
self.check_cc(
compile_filename = [],
features = 'link_main_routines_func',
msg = 'nomsg',
errmsg = 'nomsg',
dummy_func_nounder = mangle_name(u, du, c, 'foobar'),
dummy_func_under = mangle_name(u, du, c, 'foo_bar'),
main_func_name = self.env.FC_MAIN
)
except self.errors.ConfigurationError:
pass
else:
self.end_msg("ok ('%s', '%s', '%s-case')" % (u, du, c))
self.env.FORTRAN_MANGLING = (u, du, c)
break
else:
self.end_msg(False)
self.fatal('mangler not found')
return (u, du, c)

@feature('pyext')
@before_method('propagate_uselib_vars', 'apply_link')
def set_lib_pat(self):
"""Sets the Fortran flags for linking with Python"""
self.env.fcshlib_PATTERN = self.env.pyext_PATTERN

@conf
def detect_openmp(self):
"""
Detects openmp flags and sets the OPENMP ``FCFLAGS``/``LINKFLAGS``
"""
for x in ('-fopenmp','-openmp','-mp','-xopenmp','-omp','-qsmp=omp'):
try:
self.check_fc(
msg = 'Checking for OpenMP flag %s' % x,
fragment = 'program main\n call omp_get_num_threads()\nend program main',
fcflags = x,
linkflags = x,
uselib_store = 'OPENMP'
)
except self.errors.ConfigurationError:
pass
else:
break
else:
self.fatal('Could not find OpenMP')

@conf
def check_gfortran_o_space(self):
if self.env.FC_NAME != 'GFORTRAN' or int(self.env.FC_VERSION[0]) > 4:
# This is for old compilers and only for gfortran.
# No idea how other implementations handle this. Be safe and bail out.
return
self.env.stash()
self.env.FCLNK_TGT_F = ['-o', '']
try:
self.check_fc(msg='Checking if the -o link must be split from arguments', fragment=FC_FRAGMENT, features='fc fcshlib')
except self.errors.ConfigurationError:
self.env.revert()
else:
self.env.commit()

+ 114
- 0
waflib/Tools/fc_scan.py View File

@@ -0,0 +1,114 @@
#! /usr/bin/env python
# encoding: utf-8
# DC 2008
# Thomas Nagy 2016-2018 (ita)

import re

INC_REGEX = """(?:^|['">]\s*;)\s*(?:|#\s*)INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])"""
USE_REGEX = """(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"""
MOD_REGEX = """(?:^|;)\s*MODULE(?!\s*PROCEDURE)(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"""

re_inc = re.compile(INC_REGEX, re.I)
re_use = re.compile(USE_REGEX, re.I)
re_mod = re.compile(MOD_REGEX, re.I)

class fortran_parser(object):
"""
This parser returns:

* the nodes corresponding to the module names to produce
* the nodes corresponding to the include files used
* the module names used by the fortran files
"""
def __init__(self, incpaths):
self.seen = []
"""Files already parsed"""

self.nodes = []
"""List of :py:class:`waflib.Node.Node` representing the dependencies to return"""

self.names = []
"""List of module names to return"""

self.incpaths = incpaths
"""List of :py:class:`waflib.Node.Node` representing the include paths"""

def find_deps(self, node):
"""
Parses a Fortran file to obtain the dependencies used/provided

:param node: fortran file to read
:type node: :py:class:`waflib.Node.Node`
:return: lists representing the includes, the modules used, and the modules created by a fortran file
:rtype: tuple of list of strings
"""
txt = node.read()
incs = []
uses = []
mods = []
for line in txt.splitlines():
# line by line regexp search? optimize?
m = re_inc.search(line)
if m:
incs.append(m.group(1))
m = re_use.search(line)
if m:
uses.append(m.group(1))
m = re_mod.search(line)
if m:
mods.append(m.group(1))
return (incs, uses, mods)

def start(self, node):
"""
Start parsing. Use the stack ``self.waiting`` to hold nodes to iterate on

:param node: fortran file
:type node: :py:class:`waflib.Node.Node`
"""
self.waiting = [node]
while self.waiting:
nd = self.waiting.pop(0)
self.iter(nd)

def iter(self, node):
"""
Processes a single file during dependency parsing. Extracts files used
modules used and modules provided.
"""
incs, uses, mods = self.find_deps(node)
for x in incs:
if x in self.seen:
continue
self.seen.append(x)
self.tryfind_header(x)

for x in uses:
name = "USE@%s" % x
if not name in self.names:
self.names.append(name)

for x in mods:
name = "MOD@%s" % x
if not name in self.names:
self.names.append(name)

def tryfind_header(self, filename):
"""
Adds an include file to the list of nodes to process

:param filename: file name
:type filename: string
"""
found = None
for n in self.incpaths:
found = n.find_resource(filename)
if found:
self.nodes.append(found)
self.waiting.append(found)
break
if not found:
if not filename in self.names:
self.names.append(filename)


+ 62
- 0
waflib/Tools/flex.py View File

@@ -0,0 +1,62 @@
#!/usr/bin/env python
# encoding: utf-8
# John O'Meara, 2006
# Thomas Nagy, 2006-2018 (ita)

"""
The **flex** program is a code generator which creates C or C++ files.
The generated files are compiled into object files.
"""

import os, re
from waflib import Task, TaskGen
from waflib.Tools import ccroot

def decide_ext(self, node):
if 'cxx' in self.features:
return ['.lex.cc']
return ['.lex.c']

def flexfun(tsk):
env = tsk.env
bld = tsk.generator.bld
wd = bld.variant_dir
def to_list(xx):
if isinstance(xx, str):
return [xx]
return xx
tsk.last_cmd = lst = []
lst.extend(to_list(env.FLEX))
lst.extend(to_list(env.FLEXFLAGS))
inputs = [a.path_from(tsk.get_cwd()) for a in tsk.inputs]
if env.FLEX_MSYS:
inputs = [x.replace(os.sep, '/') for x in inputs]
lst.extend(inputs)
lst = [x for x in lst if x]
txt = bld.cmd_and_log(lst, cwd=wd, env=env.env or None, quiet=0)
tsk.outputs[0].write(txt.replace('\r\n', '\n').replace('\r', '\n')) # issue #1207

TaskGen.declare_chain(
name = 'flex',
rule = flexfun, # issue #854
ext_in = '.l',
decider = decide_ext,
)

# To support the following:
# bld(features='c', flexflags='-P/foo')
Task.classes['flex'].vars = ['FLEXFLAGS', 'FLEX']
ccroot.USELIB_VARS['c'].add('FLEXFLAGS')
ccroot.USELIB_VARS['cxx'].add('FLEXFLAGS')

def configure(conf):
"""
Detect the *flex* program
"""
conf.find_program('flex', var='FLEX')
conf.env.FLEXFLAGS = ['-t']

if re.search (r"\\msys\\[0-9.]+\\bin\\flex.exe$", conf.env.FLEX[0]):
# this is the flex shipped with MSYS
conf.env.FLEX_MSYS = True


+ 66
- 0
waflib/Tools/g95.py View File

@@ -0,0 +1,66 @@
#! /usr/bin/env python
# encoding: utf-8
# KWS 2010
# Thomas Nagy 2016-2018 (ita)

import re
from waflib import Utils
from waflib.Tools import fc, fc_config, fc_scan, ar
from waflib.Configure import conf

@conf
def find_g95(conf):
fc = conf.find_program('g95', var='FC')
conf.get_g95_version(fc)
conf.env.FC_NAME = 'G95'

@conf
def g95_flags(conf):
v = conf.env
v.FCFLAGS_fcshlib = ['-fPIC']
v.FORTRANMODFLAG = ['-fmod=', ''] # template for module path
v.FCFLAGS_DEBUG = ['-Werror'] # why not

@conf
def g95_modifier_win32(conf):
fc_config.fortran_modifier_win32(conf)

@conf
def g95_modifier_cygwin(conf):
fc_config.fortran_modifier_cygwin(conf)

@conf
def g95_modifier_darwin(conf):
fc_config.fortran_modifier_darwin(conf)

@conf
def g95_modifier_platform(conf):
dest_os = conf.env.DEST_OS or Utils.unversioned_sys_platform()
g95_modifier_func = getattr(conf, 'g95_modifier_' + dest_os, None)
if g95_modifier_func:
g95_modifier_func()

@conf
def get_g95_version(conf, fc):
"""get the compiler version"""

version_re = re.compile(r"g95\s*(?P<major>\d*)\.(?P<minor>\d*)").search
cmd = fc + ['--version']
out, err = fc_config.getoutput(conf, cmd, stdin=False)
if out:
match = version_re(out)
else:
match = version_re(err)
if not match:
conf.fatal('cannot determine g95 version')
k = match.groupdict()
conf.env.FC_VERSION = (k['major'], k['minor'])

def configure(conf):
conf.find_g95()
conf.find_ar()
conf.fc_flags()
conf.fc_add_flags()
conf.g95_flags()
conf.g95_modifier_platform()


+ 18
- 0
waflib/Tools/gas.py View File

@@ -0,0 +1,18 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2008-2018 (ita)

"Detect as/gas/gcc for compiling assembly files"

import waflib.Tools.asm # - leave this
from waflib.Tools import ar

def configure(conf):
"""
Find the programs gas/as/gcc and set the variable *AS*
"""
conf.find_program(['gas', 'gcc'], var='AS')
conf.env.AS_TGT_F = ['-c', '-o']
conf.env.ASLNK_TGT_F = ['-o']
conf.find_ar()
conf.load('asm')

+ 55
- 58
waflib/Tools/gcc.py View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)
# Thomas Nagy, 2006-2018 (ita)
# Ralf Habacker, 2006 (rh)
# Yinon Ehrlich, 2009

@@ -27,54 +27,51 @@ def gcc_common_flags(conf):
"""
v = conf.env

v['CC_SRC_F'] = []
v['CC_TGT_F'] = ['-c', '-o']
v.CC_SRC_F = []
v.CC_TGT_F = ['-c', '-o']

# linker
if not v['LINK_CC']: v['LINK_CC'] = v['CC']
v['CCLNK_SRC_F'] = []
v['CCLNK_TGT_F'] = ['-o']
v['CPPPATH_ST'] = '-I%s'
v['DEFINES_ST'] = '-D%s'
if not v.LINK_CC:
v.LINK_CC = v.CC

v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STLIB_ST'] = '-l%s'
v['STLIBPATH_ST'] = '-L%s'
v['RPATH_ST'] = '-Wl,-rpath,%s'
v.CCLNK_SRC_F = []
v.CCLNK_TGT_F = ['-o']
v.CPPPATH_ST = '-I%s'
v.DEFINES_ST = '-D%s'

v['SONAME_ST'] = '-Wl,-h,%s'
v['SHLIB_MARKER'] = '-Wl,-Bdynamic'
v['STLIB_MARKER'] = '-Wl,-Bstatic'
v.LIB_ST = '-l%s' # template for adding libs
v.LIBPATH_ST = '-L%s' # template for adding libpaths
v.STLIB_ST = '-l%s'
v.STLIBPATH_ST = '-L%s'
v.RPATH_ST = '-Wl,-rpath,%s'

# program
v['cprogram_PATTERN'] = '%s'
v.SONAME_ST = '-Wl,-h,%s'
v.SHLIB_MARKER = '-Wl,-Bdynamic'
v.STLIB_MARKER = '-Wl,-Bstatic'

# shared librar
v['CFLAGS_cshlib'] = ['-fPIC']
v['LINKFLAGS_cshlib'] = ['-shared']
v['cshlib_PATTERN'] = 'lib%s.so'
v.cprogram_PATTERN = '%s'

# static lib
v['LINKFLAGS_cstlib'] = ['-Wl,-Bstatic']
v['cstlib_PATTERN'] = 'lib%s.a'
v.CFLAGS_cshlib = ['-fPIC']
v.LINKFLAGS_cshlib = ['-shared']
v.cshlib_PATTERN = 'lib%s.so'

# osx stuff
v['LINKFLAGS_MACBUNDLE'] = ['-bundle', '-undefined', 'dynamic_lookup']
v['CFLAGS_MACBUNDLE'] = ['-fPIC']
v['macbundle_PATTERN'] = '%s.bundle'
v.LINKFLAGS_cstlib = ['-Wl,-Bstatic']
v.cstlib_PATTERN = 'lib%s.a'

v.LINKFLAGS_MACBUNDLE = ['-bundle', '-undefined', 'dynamic_lookup']
v.CFLAGS_MACBUNDLE = ['-fPIC']
v.macbundle_PATTERN = '%s.bundle'

@conf
def gcc_modifier_win32(conf):
"""Configuration flags for executing gcc on Windows"""
v = conf.env
v['cprogram_PATTERN'] = '%s.exe'
v.cprogram_PATTERN = '%s.exe'

v['cshlib_PATTERN'] = '%s.dll'
v['implib_PATTERN'] = 'lib%s.dll.a'
v['IMPLIB_ST'] = '-Wl,--out-implib,%s'
v.cshlib_PATTERN = '%s.dll'
v.implib_PATTERN = '%s.dll.a'
v.IMPLIB_ST = '-Wl,--out-implib,%s'

v['CFLAGS_cshlib'] = []
v.CFLAGS_cshlib = []

# Auto-import is enabled by default even without this option,
# but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
@@ -86,42 +83,42 @@ def gcc_modifier_cygwin(conf):
"""Configuration flags for executing gcc on Cygwin"""
gcc_modifier_win32(conf)
v = conf.env
v['cshlib_PATTERN'] = 'cyg%s.dll'
v.cshlib_PATTERN = 'cyg%s.dll'
v.append_value('LINKFLAGS_cshlib', ['-Wl,--enable-auto-image-base'])
v['CFLAGS_cshlib'] = []
v.CFLAGS_cshlib = []

@conf
def gcc_modifier_darwin(conf):
"""Configuration flags for executing gcc on MacOS"""
v = conf.env
v['CFLAGS_cshlib'] = ['-fPIC']
v['LINKFLAGS_cshlib'] = ['-dynamiclib']
v['cshlib_PATTERN'] = 'lib%s.dylib'
v['FRAMEWORKPATH_ST'] = '-F%s'
v['FRAMEWORK_ST'] = ['-framework']
v['ARCH_ST'] = ['-arch']
v.CFLAGS_cshlib = ['-fPIC']
v.LINKFLAGS_cshlib = ['-dynamiclib']
v.cshlib_PATTERN = 'lib%s.dylib'
v.FRAMEWORKPATH_ST = '-F%s'
v.FRAMEWORK_ST = ['-framework']
v.ARCH_ST = ['-arch']

v['LINKFLAGS_cstlib'] = []
v.LINKFLAGS_cstlib = []

v['SHLIB_MARKER'] = []
v['STLIB_MARKER'] = []
v['SONAME_ST'] = []
v.SHLIB_MARKER = []
v.STLIB_MARKER = []
v.SONAME_ST = []

@conf
def gcc_modifier_aix(conf):
"""Configuration flags for executing gcc on AIX"""
v = conf.env
v['LINKFLAGS_cprogram'] = ['-Wl,-brtl']
v['LINKFLAGS_cshlib'] = ['-shared','-Wl,-brtl,-bexpfull']
v['SHLIB_MARKER'] = []
v.LINKFLAGS_cprogram = ['-Wl,-brtl']
v.LINKFLAGS_cshlib = ['-shared','-Wl,-brtl,-bexpfull']
v.SHLIB_MARKER = []

@conf
def gcc_modifier_hpux(conf):
v = conf.env
v['SHLIB_MARKER'] = []
v['STLIB_MARKER'] = []
v['CFLAGS_cshlib'] = ['-fPIC','-DPIC']
v['cshlib_PATTERN'] = 'lib%s.sl'
v.SHLIB_MARKER = []
v.STLIB_MARKER = []
v.CFLAGS_cshlib = ['-fPIC','-DPIC']
v.cshlib_PATTERN = 'lib%s.sl'

@conf
def gcc_modifier_openbsd(conf):
@@ -130,9 +127,9 @@ def gcc_modifier_openbsd(conf):
@conf
def gcc_modifier_osf1V(conf):
v = conf.env
v['SHLIB_MARKER'] = []
v['STLIB_MARKER'] = []
v['SONAME_ST'] = []
v.SHLIB_MARKER = []
v.STLIB_MARKER = []
v.SONAME_ST = []

@conf
def gcc_modifier_platform(conf):
@@ -155,5 +152,5 @@ def configure(conf):
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()
conf.check_gcc_o_space()


+ 55
- 0
waflib/Tools/gdc.py View File

@@ -0,0 +1,55 @@
#!/usr/bin/env python
# encoding: utf-8
# Carlos Rafael Giani, 2007 (dv)

from waflib.Tools import ar, d
from waflib.Configure import conf

@conf
def find_gdc(conf):
"""
Finds the program gdc and set the variable *D*
"""
conf.find_program('gdc', var='D')

out = conf.cmd_and_log(conf.env.D + ['--version'])
if out.find("gdc") == -1:
conf.fatal("detected compiler is not gdc")

@conf
def common_flags_gdc(conf):
"""
Sets the flags required by *gdc*
"""
v = conf.env

v.DFLAGS = []

v.D_SRC_F = ['-c']
v.D_TGT_F = '-o%s'

v.D_LINKER = v.D
v.DLNK_SRC_F = ''
v.DLNK_TGT_F = '-o%s'
v.DINC_ST = '-I%s'

v.DSHLIB_MARKER = v.DSTLIB_MARKER = ''
v.DSTLIB_ST = v.DSHLIB_ST = '-l%s'
v.DSTLIBPATH_ST = v.DLIBPATH_ST = '-L%s'

v.LINKFLAGS_dshlib = ['-shared']

v.DHEADER_ext = '.di'
v.DFLAGS_d_with_header = '-fintfc'
v.D_HDR_F = '-fintfc-file=%s'

def configure(conf):
"""
Configuration for gdc
"""
conf.find_gdc()
conf.load('ar')
conf.load('d')
conf.common_flags_gdc()
conf.d_platform_flags()


+ 93
- 0
waflib/Tools/gfortran.py View File

@@ -0,0 +1,93 @@
#! /usr/bin/env python
# encoding: utf-8
# DC 2008
# Thomas Nagy 2016-2018 (ita)

import re
from waflib import Utils
from waflib.Tools import fc, fc_config, fc_scan, ar
from waflib.Configure import conf

@conf
def find_gfortran(conf):
"""Find the gfortran program (will look in the environment variable 'FC')"""
fc = conf.find_program(['gfortran','g77'], var='FC')
# (fallback to g77 for systems, where no gfortran is available)
conf.get_gfortran_version(fc)
conf.env.FC_NAME = 'GFORTRAN'

@conf
def gfortran_flags(conf):
v = conf.env
v.FCFLAGS_fcshlib = ['-fPIC']
v.FORTRANMODFLAG = ['-J', ''] # template for module path
v.FCFLAGS_DEBUG = ['-Werror'] # why not

@conf
def gfortran_modifier_win32(conf):
fc_config.fortran_modifier_win32(conf)

@conf
def gfortran_modifier_cygwin(conf):
fc_config.fortran_modifier_cygwin(conf)

@conf
def gfortran_modifier_darwin(conf):
fc_config.fortran_modifier_darwin(conf)

@conf
def gfortran_modifier_platform(conf):
dest_os = conf.env.DEST_OS or Utils.unversioned_sys_platform()
gfortran_modifier_func = getattr(conf, 'gfortran_modifier_' + dest_os, None)
if gfortran_modifier_func:
gfortran_modifier_func()

@conf
def get_gfortran_version(conf, fc):
"""Get the compiler version"""

# ensure this is actually gfortran, not an imposter.
version_re = re.compile(r"GNU\s*Fortran", re.I).search
cmd = fc + ['--version']
out, err = fc_config.getoutput(conf, cmd, stdin=False)
if out:
match = version_re(out)
else:
match = version_re(err)
if not match:
conf.fatal('Could not determine the compiler type')

# --- now get more detailed info -- see c_config.get_cc_version
cmd = fc + ['-dM', '-E', '-']
out, err = fc_config.getoutput(conf, cmd, stdin=True)

if out.find('__GNUC__') < 0:
conf.fatal('Could not determine the compiler type')

k = {}
out = out.splitlines()
import shlex

for line in out:
lst = shlex.split(line)
if len(lst)>2:
key = lst[1]
val = lst[2]
k[key] = val

def isD(var):
return var in k

def isT(var):
return var in k and k[var] != '0'

conf.env.FC_VERSION = (k['__GNUC__'], k['__GNUC_MINOR__'], k['__GNUC_PATCHLEVEL__'])

def configure(conf):
conf.find_gfortran()
conf.find_ar()
conf.fc_flags()
conf.fc_add_flags()
conf.gfortran_flags()
conf.gfortran_modifier_platform()
conf.check_gfortran_o_space()

+ 489
- 0
waflib/Tools/glib2.py View File

@@ -0,0 +1,489 @@
#! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2018 (ita)

"""
Support for GLib2 tools:

* marshal
* enums
* gsettings
* gresource
"""

import os
import functools
from waflib import Context, Task, Utils, Options, Errors, Logs
from waflib.TaskGen import taskgen_method, before_method, feature, extension
from waflib.Configure import conf

################## marshal files

@taskgen_method
def add_marshal_file(self, filename, prefix):
"""
Adds a file to the list of marshal files to process. Store them in the attribute *marshal_list*.

:param filename: xml file to compile
:type filename: string
:param prefix: marshal prefix (--prefix=prefix)
:type prefix: string
"""
if not hasattr(self, 'marshal_list'):
self.marshal_list = []
self.meths.append('process_marshal')
self.marshal_list.append((filename, prefix))

@before_method('process_source')
def process_marshal(self):
"""
Processes the marshal files stored in the attribute *marshal_list* to create :py:class:`waflib.Tools.glib2.glib_genmarshal` instances.
Adds the c file created to the list of source to process.
"""
for f, prefix in getattr(self, 'marshal_list', []):
node = self.path.find_resource(f)

if not node:
raise Errors.WafError('file not found %r' % f)

h_node = node.change_ext('.h')
c_node = node.change_ext('.c')

task = self.create_task('glib_genmarshal', node, [h_node, c_node])
task.env.GLIB_GENMARSHAL_PREFIX = prefix
self.source = self.to_nodes(getattr(self, 'source', []))
self.source.append(c_node)

class glib_genmarshal(Task.Task):
vars = ['GLIB_GENMARSHAL_PREFIX', 'GLIB_GENMARSHAL']
color = 'BLUE'
ext_out = ['.h']
def run(self):
bld = self.generator.bld

get = self.env.get_flat
cmd1 = "%s %s --prefix=%s --header > %s" % (
get('GLIB_GENMARSHAL'),
self.inputs[0].srcpath(),
get('GLIB_GENMARSHAL_PREFIX'),
self.outputs[0].abspath()
)

ret = bld.exec_command(cmd1)
if ret:
return ret

#print self.outputs[1].abspath()
c = '''#include "%s"\n''' % self.outputs[0].name
self.outputs[1].write(c)

cmd2 = "%s %s --prefix=%s --body >> %s" % (
get('GLIB_GENMARSHAL'),
self.inputs[0].srcpath(),
get('GLIB_GENMARSHAL_PREFIX'),
self.outputs[1].abspath()
)
return bld.exec_command(cmd2)

########################## glib-mkenums

@taskgen_method
def add_enums_from_template(self, source='', target='', template='', comments=''):
"""
Adds a file to the list of enum files to process. Stores them in the attribute *enums_list*.

:param source: enum file to process
:type source: string
:param target: target file
:type target: string
:param template: template file
:type template: string
:param comments: comments
:type comments: string
"""
if not hasattr(self, 'enums_list'):
self.enums_list = []
self.meths.append('process_enums')
self.enums_list.append({'source': source,
'target': target,
'template': template,
'file-head': '',
'file-prod': '',
'file-tail': '',
'enum-prod': '',
'value-head': '',
'value-prod': '',
'value-tail': '',
'comments': comments})

@taskgen_method
def add_enums(self, source='', target='',
file_head='', file_prod='', file_tail='', enum_prod='',
value_head='', value_prod='', value_tail='', comments=''):
"""
Adds a file to the list of enum files to process. Stores them in the attribute *enums_list*.

:param source: enum file to process
:type source: string
:param target: target file
:type target: string
:param file_head: unused
:param file_prod: unused
:param file_tail: unused
:param enum_prod: unused
:param value_head: unused
:param value_prod: unused
:param value_tail: unused
:param comments: comments
:type comments: string
"""
if not hasattr(self, 'enums_list'):
self.enums_list = []
self.meths.append('process_enums')
self.enums_list.append({'source': source,
'template': '',
'target': target,
'file-head': file_head,
'file-prod': file_prod,
'file-tail': file_tail,
'enum-prod': enum_prod,
'value-head': value_head,
'value-prod': value_prod,
'value-tail': value_tail,
'comments': comments})

@before_method('process_source')
def process_enums(self):
"""
Processes the enum files stored in the attribute *enum_list* to create :py:class:`waflib.Tools.glib2.glib_mkenums` instances.
"""
for enum in getattr(self, 'enums_list', []):
task = self.create_task('glib_mkenums')
env = task.env

inputs = []

# process the source
source_list = self.to_list(enum['source'])
if not source_list:
raise Errors.WafError('missing source ' + str(enum))
source_list = [self.path.find_resource(k) for k in source_list]
inputs += source_list
env.GLIB_MKENUMS_SOURCE = [k.abspath() for k in source_list]

# find the target
if not enum['target']:
raise Errors.WafError('missing target ' + str(enum))
tgt_node = self.path.find_or_declare(enum['target'])
if tgt_node.name.endswith('.c'):
self.source.append(tgt_node)
env.GLIB_MKENUMS_TARGET = tgt_node.abspath()


options = []

if enum['template']: # template, if provided
template_node = self.path.find_resource(enum['template'])
options.append('--template %s' % (template_node.abspath()))
inputs.append(template_node)
params = {'file-head' : '--fhead',
'file-prod' : '--fprod',
'file-tail' : '--ftail',
'enum-prod' : '--eprod',
'value-head' : '--vhead',
'value-prod' : '--vprod',
'value-tail' : '--vtail',
'comments': '--comments'}
for param, option in params.items():
if enum[param]:
options.append('%s %r' % (option, enum[param]))

env.GLIB_MKENUMS_OPTIONS = ' '.join(options)

# update the task instance
task.set_inputs(inputs)
task.set_outputs(tgt_node)

class glib_mkenums(Task.Task):
"""
Processes enum files
"""
run_str = '${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}'
color = 'PINK'
ext_out = ['.h']

######################################### gsettings

@taskgen_method
def add_settings_schemas(self, filename_list):
"""
Adds settings files to process to *settings_schema_files*

:param filename_list: files
:type filename_list: list of string
"""
if not hasattr(self, 'settings_schema_files'):
self.settings_schema_files = []

if not isinstance(filename_list, list):
filename_list = [filename_list]

self.settings_schema_files.extend(filename_list)

@taskgen_method
def add_settings_enums(self, namespace, filename_list):
"""
Called only once by task generator to set the enums namespace.

:param namespace: namespace
:type namespace: string
:param filename_list: enum files to process
:type filename_list: file list
"""
if hasattr(self, 'settings_enum_namespace'):
raise Errors.WafError("Tried to add gsettings enums to %r more than once" % self.name)
self.settings_enum_namespace = namespace

if not isinstance(filename_list, list):
filename_list = [filename_list]
self.settings_enum_files = filename_list

@feature('glib2')
def process_settings(self):
"""
Processes the schema files in *settings_schema_files* to create :py:class:`waflib.Tools.glib2.glib_mkenums` instances. The
same files are validated through :py:class:`waflib.Tools.glib2.glib_validate_schema` tasks.

"""
enums_tgt_node = []
install_files = []

settings_schema_files = getattr(self, 'settings_schema_files', [])
if settings_schema_files and not self.env.GLIB_COMPILE_SCHEMAS:
raise Errors.WafError ("Unable to process GSettings schemas - glib-compile-schemas was not found during configure")

# 1. process gsettings_enum_files (generate .enums.xml)
#
if hasattr(self, 'settings_enum_files'):
enums_task = self.create_task('glib_mkenums')

source_list = self.settings_enum_files
source_list = [self.path.find_resource(k) for k in source_list]
enums_task.set_inputs(source_list)
enums_task.env.GLIB_MKENUMS_SOURCE = [k.abspath() for k in source_list]

target = self.settings_enum_namespace + '.enums.xml'
tgt_node = self.path.find_or_declare(target)
enums_task.set_outputs(tgt_node)
enums_task.env.GLIB_MKENUMS_TARGET = tgt_node.abspath()
enums_tgt_node = [tgt_node]

install_files.append(tgt_node)

options = '--comments "<!-- @comment@ -->" --fhead "<schemalist>" --vhead " <@type@ id=\\"%s.@EnumName@\\">" --vprod " <value nick=\\"@valuenick@\\" value=\\"@valuenum@\\"/>" --vtail " </@type@>" --ftail "</schemalist>" ' % (self.settings_enum_namespace)
enums_task.env.GLIB_MKENUMS_OPTIONS = options

# 2. process gsettings_schema_files (validate .gschema.xml files)
#
for schema in settings_schema_files:
schema_task = self.create_task ('glib_validate_schema')

schema_node = self.path.find_resource(schema)
if not schema_node:
raise Errors.WafError("Cannot find the schema file %r" % schema)
install_files.append(schema_node)
source_list = enums_tgt_node + [schema_node]

schema_task.set_inputs (source_list)
schema_task.env.GLIB_COMPILE_SCHEMAS_OPTIONS = [("--schema-file=" + k.abspath()) for k in source_list]

target_node = schema_node.change_ext('.xml.valid')
schema_task.set_outputs (target_node)
schema_task.env.GLIB_VALIDATE_SCHEMA_OUTPUT = target_node.abspath()

# 3. schemas install task
def compile_schemas_callback(bld):
if not bld.is_install:
return
compile_schemas = Utils.to_list(bld.env.GLIB_COMPILE_SCHEMAS)
destdir = Options.options.destdir
paths = bld._compile_schemas_registered
if destdir:
paths = (os.path.join(destdir, path.lstrip(os.sep)) for path in paths)
for path in paths:
Logs.pprint('YELLOW', 'Updating GSettings schema cache %r' % path)
if self.bld.exec_command(compile_schemas + [path]):
Logs.warn('Could not update GSettings schema cache %r' % path)

if self.bld.is_install:
schemadir = self.env.GSETTINGSSCHEMADIR
if not schemadir:
raise Errors.WafError ('GSETTINGSSCHEMADIR not defined (should have been set up automatically during configure)')

if install_files:
self.add_install_files(install_to=schemadir, install_from=install_files)
registered_schemas = getattr(self.bld, '_compile_schemas_registered', None)
if not registered_schemas:
registered_schemas = self.bld._compile_schemas_registered = set()
self.bld.add_post_fun(compile_schemas_callback)
registered_schemas.add(schemadir)

class glib_validate_schema(Task.Task):
"""
Validates schema files
"""
run_str = 'rm -f ${GLIB_VALIDATE_SCHEMA_OUTPUT} && ${GLIB_COMPILE_SCHEMAS} --dry-run ${GLIB_COMPILE_SCHEMAS_OPTIONS} && touch ${GLIB_VALIDATE_SCHEMA_OUTPUT}'
color = 'PINK'

################## gresource

@extension('.gresource.xml')
def process_gresource_source(self, node):
"""
Creates tasks that turn ``.gresource.xml`` files to C code
"""
if not self.env.GLIB_COMPILE_RESOURCES:
raise Errors.WafError ("Unable to process GResource file - glib-compile-resources was not found during configure")

if 'gresource' in self.features:
return

h_node = node.change_ext('_xml.h')
c_node = node.change_ext('_xml.c')
self.create_task('glib_gresource_source', node, [h_node, c_node])
self.source.append(c_node)

@feature('gresource')
def process_gresource_bundle(self):
"""
Creates tasks to turn ``.gresource`` files from ``.gresource.xml`` files::

def build(bld):
bld(
features='gresource',
source=['resources1.gresource.xml', 'resources2.gresource.xml'],
install_path='${LIBDIR}/${PACKAGE}'
)

:param source: XML files to process
:type source: list of string
:param install_path: installation path
:type install_path: string
"""
for i in self.to_list(self.source):
node = self.path.find_resource(i)

task = self.create_task('glib_gresource_bundle', node, node.change_ext(''))
inst_to = getattr(self, 'install_path', None)
if inst_to:
self.add_install_files(install_to=inst_to, install_from=task.outputs)

class glib_gresource_base(Task.Task):
"""
Base class for gresource based tasks
"""
color = 'BLUE'
base_cmd = '${GLIB_COMPILE_RESOURCES} --sourcedir=${SRC[0].parent.srcpath()} --sourcedir=${SRC[0].bld_dir()}'

def scan(self):
"""
Scans gresource dependencies through ``glib-compile-resources --generate-dependencies command``
"""
bld = self.generator.bld
kw = {}
kw['cwd'] = self.get_cwd()
kw['quiet'] = Context.BOTH

cmd = Utils.subst_vars('${GLIB_COMPILE_RESOURCES} --sourcedir=%s --sourcedir=%s --generate-dependencies %s' % (
self.inputs[0].parent.srcpath(),
self.inputs[0].bld_dir(),
self.inputs[0].bldpath()
), self.env)

output = bld.cmd_and_log(cmd, **kw)

nodes = []
names = []
for dep in output.splitlines():
if dep:
node = bld.bldnode.find_node(dep)
if node:
nodes.append(node)
else:
names.append(dep)

return (nodes, names)

class glib_gresource_source(glib_gresource_base):
"""
Task to generate C source code (.h and .c files) from a gresource.xml file
"""
vars = ['GLIB_COMPILE_RESOURCES']
fun_h = Task.compile_fun_shell(glib_gresource_base.base_cmd + ' --target=${TGT[0].abspath()} --generate-header ${SRC}')
fun_c = Task.compile_fun_shell(glib_gresource_base.base_cmd + ' --target=${TGT[1].abspath()} --generate-source ${SRC}')
ext_out = ['.h']

def run(self):
return self.fun_h[0](self) or self.fun_c[0](self)

class glib_gresource_bundle(glib_gresource_base):
"""
Task to generate a .gresource binary file from a gresource.xml file
"""
run_str = glib_gresource_base.base_cmd + ' --target=${TGT} ${SRC}'
shell = True # temporary workaround for #795

@conf
def find_glib_genmarshal(conf):
conf.find_program('glib-genmarshal', var='GLIB_GENMARSHAL')

@conf
def find_glib_mkenums(conf):
if not conf.env.PERL:
conf.find_program('perl', var='PERL')
conf.find_program('glib-mkenums', interpreter='PERL', var='GLIB_MKENUMS')

@conf
def find_glib_compile_schemas(conf):
# when cross-compiling, gsettings.m4 locates the program with the following:
# pkg-config --variable glib_compile_schemas gio-2.0
conf.find_program('glib-compile-schemas', var='GLIB_COMPILE_SCHEMAS')

def getstr(varname):
return getattr(Options.options, varname, getattr(conf.env,varname, ''))

gsettingsschemadir = getstr('GSETTINGSSCHEMADIR')
if not gsettingsschemadir:
datadir = getstr('DATADIR')
if not datadir:
prefix = conf.env.PREFIX
datadir = os.path.join(prefix, 'share')
gsettingsschemadir = os.path.join(datadir, 'glib-2.0', 'schemas')

conf.env.GSETTINGSSCHEMADIR = gsettingsschemadir

@conf
def find_glib_compile_resources(conf):
conf.find_program('glib-compile-resources', var='GLIB_COMPILE_RESOURCES')

def configure(conf):
"""
Finds the following programs:

* *glib-genmarshal* and set *GLIB_GENMARSHAL*
* *glib-mkenums* and set *GLIB_MKENUMS*
* *glib-compile-schemas* and set *GLIB_COMPILE_SCHEMAS* (not mandatory)
* *glib-compile-resources* and set *GLIB_COMPILE_RESOURCES* (not mandatory)
"""
conf.find_glib_genmarshal()
conf.find_glib_mkenums()
conf.find_glib_compile_schemas(mandatory=False)
conf.find_glib_compile_resources(mandatory=False)

def options(opt):
"""
Adds the ``--gsettingsschemadir`` command-line option
"""
gr = opt.add_option_group('Installation directories')
gr.add_option('--gsettingsschemadir', help='GSettings schema location [DATADIR/glib-2.0/schemas]', default='', dest='GSETTINGSSCHEMADIR')


+ 131
- 0
waflib/Tools/gnu_dirs.py View File

@@ -0,0 +1,131 @@
#!/usr/bin/env python
# encoding: utf-8
# Ali Sabil, 2007

"""
Sets various standard variables such as INCLUDEDIR. SBINDIR and others. To use this module just call::

opt.load('gnu_dirs')

and::

conf.load('gnu_dirs')

Add options for the standard GNU directories, this tool will add the options
found in autotools, and will update the environment with the following
installation variables:

============== ========================================= =======================
Variable Description Default Value
============== ========================================= =======================
PREFIX installation prefix /usr/local
EXEC_PREFIX installation prefix for binaries PREFIX
BINDIR user commands EXEC_PREFIX/bin
SBINDIR system binaries EXEC_PREFIX/sbin
LIBEXECDIR program-specific binaries EXEC_PREFIX/libexec
SYSCONFDIR host-specific configuration PREFIX/etc
SHAREDSTATEDIR architecture-independent variable data PREFIX/com
LOCALSTATEDIR variable data PREFIX/var
LIBDIR object code libraries EXEC_PREFIX/lib
INCLUDEDIR header files PREFIX/include
OLDINCLUDEDIR header files for non-GCC compilers /usr/include
DATAROOTDIR architecture-independent data root PREFIX/share
DATADIR architecture-independent data DATAROOTDIR
INFODIR GNU "info" documentation DATAROOTDIR/info
LOCALEDIR locale-dependent data DATAROOTDIR/locale
MANDIR manual pages DATAROOTDIR/man
DOCDIR documentation root DATAROOTDIR/doc/APPNAME
HTMLDIR HTML documentation DOCDIR
DVIDIR DVI documentation DOCDIR
PDFDIR PDF documentation DOCDIR
PSDIR PostScript documentation DOCDIR
============== ========================================= =======================
"""

import os, re
from waflib import Utils, Options, Context

gnuopts = '''
bindir, user commands, ${EXEC_PREFIX}/bin
sbindir, system binaries, ${EXEC_PREFIX}/sbin
libexecdir, program-specific binaries, ${EXEC_PREFIX}/libexec
sysconfdir, host-specific configuration, ${PREFIX}/etc
sharedstatedir, architecture-independent variable data, ${PREFIX}/com
localstatedir, variable data, ${PREFIX}/var
libdir, object code libraries, ${EXEC_PREFIX}/lib%s
includedir, header files, ${PREFIX}/include
oldincludedir, header files for non-GCC compilers, /usr/include
datarootdir, architecture-independent data root, ${PREFIX}/share
datadir, architecture-independent data, ${DATAROOTDIR}
infodir, GNU "info" documentation, ${DATAROOTDIR}/info
localedir, locale-dependent data, ${DATAROOTDIR}/locale
mandir, manual pages, ${DATAROOTDIR}/man
docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE}
htmldir, HTML documentation, ${DOCDIR}
dvidir, DVI documentation, ${DOCDIR}
pdfdir, PDF documentation, ${DOCDIR}
psdir, PostScript documentation, ${DOCDIR}
''' % Utils.lib64()

_options = [x.split(', ') for x in gnuopts.splitlines() if x]

def configure(conf):
"""
Reads the command-line options to set lots of variables in *conf.env*. The variables
BINDIR and LIBDIR will be overwritten.
"""
def get_param(varname, default):
return getattr(Options.options, varname, '') or default

env = conf.env
env.LIBDIR = env.BINDIR = []
env.EXEC_PREFIX = get_param('EXEC_PREFIX', env.PREFIX)
env.PACKAGE = getattr(Context.g_module, 'APPNAME', None) or env.PACKAGE

complete = False
iter = 0
while not complete and iter < len(_options) + 1:
iter += 1
complete = True
for name, help, default in _options:
name = name.upper()
if not env[name]:
try:
env[name] = Utils.subst_vars(get_param(name, default).replace('/', os.sep), env)
except TypeError:
complete = False

if not complete:
lst = [x for x, _, _ in _options if not env[x.upper()]]
raise conf.errors.WafError('Variable substitution failure %r' % lst)

def options(opt):
"""
Adds lots of command-line options, for example::

--exec-prefix: EXEC_PREFIX
"""
inst_dir = opt.add_option_group('Installation prefix',
'By default, "waf install" will put the files in\
"/usr/local/bin", "/usr/local/lib" etc. An installation prefix other\
than "/usr/local" can be given using "--prefix", for example "--prefix=$HOME"')

for k in ('--prefix', '--destdir'):
option = opt.parser.get_option(k)
if option:
opt.parser.remove_option(k)
inst_dir.add_option(option)

inst_dir.add_option('--exec-prefix',
help = 'installation prefix for binaries [PREFIX]',
default = '',
dest = 'EXEC_PREFIX')

dirs_options = opt.add_option_group('Installation directories')

for name, help, default in _options:
option_name = '--' + name
str_default = default
str_help = '%s [%s]' % (help, re.sub(r'\$\{([^}]+)\}', r'\1', str_default))
dirs_options.add_option(option_name, help=str_help, default='', dest=name.upper())


+ 56
- 58
waflib/Tools/gxx.py View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)
# Thomas Nagy, 2006-2018 (ita)
# Ralf Habacker, 2006 (rh)
# Yinon Ehrlich, 2009

@@ -14,7 +14,7 @@ from waflib.Configure import conf
@conf
def find_gxx(conf):
"""
Find the program g++, and if present, try to detect its version number
Finds the program g++, and if present, try to detect its version number
"""
cxx = conf.find_program(['g++', 'c++'], var='CXX')
conf.get_cc_version(cxx, gcc=True)
@@ -27,54 +27,51 @@ def gxx_common_flags(conf):
"""
v = conf.env

v['CXX_SRC_F'] = []
v['CXX_TGT_F'] = ['-c', '-o']
v.CXX_SRC_F = []
v.CXX_TGT_F = ['-c', '-o']

# linker
if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
v['CXXLNK_SRC_F'] = []
v['CXXLNK_TGT_F'] = ['-o']
v['CPPPATH_ST'] = '-I%s'
v['DEFINES_ST'] = '-D%s'
if not v.LINK_CXX:
v.LINK_CXX = v.CXX

v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STLIB_ST'] = '-l%s'
v['STLIBPATH_ST'] = '-L%s'
v['RPATH_ST'] = '-Wl,-rpath,%s'
v.CXXLNK_SRC_F = []
v.CXXLNK_TGT_F = ['-o']
v.CPPPATH_ST = '-I%s'
v.DEFINES_ST = '-D%s'

v['SONAME_ST'] = '-Wl,-h,%s'
v['SHLIB_MARKER'] = '-Wl,-Bdynamic'
v['STLIB_MARKER'] = '-Wl,-Bstatic'
v.LIB_ST = '-l%s' # template for adding libs
v.LIBPATH_ST = '-L%s' # template for adding libpaths
v.STLIB_ST = '-l%s'
v.STLIBPATH_ST = '-L%s'
v.RPATH_ST = '-Wl,-rpath,%s'

# program
v['cxxprogram_PATTERN'] = '%s'
v.SONAME_ST = '-Wl,-h,%s'
v.SHLIB_MARKER = '-Wl,-Bdynamic'
v.STLIB_MARKER = '-Wl,-Bstatic'

# shared library
v['CXXFLAGS_cxxshlib'] = ['-fPIC']
v['LINKFLAGS_cxxshlib'] = ['-shared']
v['cxxshlib_PATTERN'] = 'lib%s.so'
v.cxxprogram_PATTERN = '%s'

# static lib
v['LINKFLAGS_cxxstlib'] = ['-Wl,-Bstatic']
v['cxxstlib_PATTERN'] = 'lib%s.a'
v.CXXFLAGS_cxxshlib = ['-fPIC']
v.LINKFLAGS_cxxshlib = ['-shared']
v.cxxshlib_PATTERN = 'lib%s.so'

# osx stuff
v['LINKFLAGS_MACBUNDLE'] = ['-bundle', '-undefined', 'dynamic_lookup']
v['CXXFLAGS_MACBUNDLE'] = ['-fPIC']
v['macbundle_PATTERN'] = '%s.bundle'
v.LINKFLAGS_cxxstlib = ['-Wl,-Bstatic']
v.cxxstlib_PATTERN = 'lib%s.a'

v.LINKFLAGS_MACBUNDLE = ['-bundle', '-undefined', 'dynamic_lookup']
v.CXXFLAGS_MACBUNDLE = ['-fPIC']
v.macbundle_PATTERN = '%s.bundle'

@conf
def gxx_modifier_win32(conf):
"""Configuration flags for executing gcc on Windows"""
v = conf.env
v['cxxprogram_PATTERN'] = '%s.exe'
v.cxxprogram_PATTERN = '%s.exe'

v['cxxshlib_PATTERN'] = '%s.dll'
v['implib_PATTERN'] = 'lib%s.dll.a'
v['IMPLIB_ST'] = '-Wl,--out-implib,%s'
v.cxxshlib_PATTERN = '%s.dll'
v.implib_PATTERN = '%s.dll.a'
v.IMPLIB_ST = '-Wl,--out-implib,%s'

v['CXXFLAGS_cxxshlib'] = []
v.CXXFLAGS_cxxshlib = []

# Auto-import is enabled by default even without this option,
# but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
@@ -86,43 +83,43 @@ def gxx_modifier_cygwin(conf):
"""Configuration flags for executing g++ on Cygwin"""
gxx_modifier_win32(conf)
v = conf.env
v['cxxshlib_PATTERN'] = 'cyg%s.dll'
v.cxxshlib_PATTERN = 'cyg%s.dll'
v.append_value('LINKFLAGS_cxxshlib', ['-Wl,--enable-auto-image-base'])
v['CXXFLAGS_cxxshlib'] = []
v.CXXFLAGS_cxxshlib = []

@conf
def gxx_modifier_darwin(conf):
"""Configuration flags for executing g++ on MacOS"""
v = conf.env
v['CXXFLAGS_cxxshlib'] = ['-fPIC']
v['LINKFLAGS_cxxshlib'] = ['-dynamiclib']
v['cxxshlib_PATTERN'] = 'lib%s.dylib'
v['FRAMEWORKPATH_ST'] = '-F%s'
v['FRAMEWORK_ST'] = ['-framework']
v['ARCH_ST'] = ['-arch']
v.CXXFLAGS_cxxshlib = ['-fPIC']
v.LINKFLAGS_cxxshlib = ['-dynamiclib']
v.cxxshlib_PATTERN = 'lib%s.dylib'
v.FRAMEWORKPATH_ST = '-F%s'
v.FRAMEWORK_ST = ['-framework']
v.ARCH_ST = ['-arch']

v['LINKFLAGS_cxxstlib'] = []
v.LINKFLAGS_cxxstlib = []

v['SHLIB_MARKER'] = []
v['STLIB_MARKER'] = []
v['SONAME_ST'] = []
v.SHLIB_MARKER = []
v.STLIB_MARKER = []
v.SONAME_ST = []

@conf
def gxx_modifier_aix(conf):
"""Configuration flags for executing g++ on AIX"""
v = conf.env
v['LINKFLAGS_cxxprogram']= ['-Wl,-brtl']
v.LINKFLAGS_cxxprogram= ['-Wl,-brtl']

v['LINKFLAGS_cxxshlib'] = ['-shared', '-Wl,-brtl,-bexpfull']
v['SHLIB_MARKER'] = []
v.LINKFLAGS_cxxshlib = ['-shared', '-Wl,-brtl,-bexpfull']
v.SHLIB_MARKER = []

@conf
def gxx_modifier_hpux(conf):
v = conf.env
v['SHLIB_MARKER'] = []
v['STLIB_MARKER'] = []
v['CFLAGS_cxxshlib'] = ['-fPIC','-DPIC']
v['cxxshlib_PATTERN'] = 'lib%s.sl'
v.SHLIB_MARKER = []
v.STLIB_MARKER = []
v.CFLAGS_cxxshlib = ['-fPIC','-DPIC']
v.cxxshlib_PATTERN = 'lib%s.sl'

@conf
def gxx_modifier_openbsd(conf):
@@ -131,9 +128,9 @@ def gxx_modifier_openbsd(conf):
@conf
def gcc_modifier_osf1V(conf):
v = conf.env
v['SHLIB_MARKER'] = []
v['STLIB_MARKER'] = []
v['SONAME_ST'] = []
v.SHLIB_MARKER = []
v.STLIB_MARKER = []
v.SONAME_ST = []

@conf
def gxx_modifier_platform(conf):
@@ -156,4 +153,5 @@ def configure(conf):
conf.cxx_load_tools()
conf.cxx_add_flags()
conf.link_add_flags()
conf.check_gcc_o_space('cxx')


+ 3
- 6
waflib/Tools/icc.py View File

@@ -1,10 +1,10 @@
#!/usr/bin/env python
# encoding: utf-8
# Stian Selnes 2008
# Thomas Nagy 2009-2010 (ita)
# Thomas Nagy 2009-2018 (ita)

"""
Detect the Intel C compiler
Detects the Intel C compiler
"""

import sys
@@ -14,11 +14,8 @@ from waflib.Configure import conf
@conf
def find_icc(conf):
"""
Find the program icc and execute it to ensure it really is icc
Finds the program icc and execute it to ensure it really is icc
"""
if sys.platform == 'cygwin':
conf.fatal('The Intel compiler does not work on Cygwin')

cc = conf.find_program(['icc', 'ICL'], var='CC')
conf.get_cc_version(cc, icc=True)
conf.env.CC_NAME = 'icc'


+ 3
- 6
waflib/Tools/icpc.py View File

@@ -1,9 +1,9 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy 2009-2010 (ita)
# Thomas Nagy 2009-2018 (ita)

"""
Detect the Intel C++ compiler
Detects the Intel C++ compiler
"""

import sys
@@ -13,11 +13,8 @@ from waflib.Configure import conf
@conf
def find_icpc(conf):
"""
Find the program icpc, and execute it to ensure it really is icpc
Finds the program icpc, and execute it to ensure it really is icpc
"""
if sys.platform == 'cygwin':
conf.fatal('The Intel compiler does not work on Cygwin')

cxx = conf.find_program('icpc', var='CXX')
conf.get_cc_version(cxx, icc=True)
conf.env.CXX_NAME = 'icc'


+ 413
- 0
waflib/Tools/ifort.py View File

@@ -0,0 +1,413 @@
#! /usr/bin/env python
# encoding: utf-8
# DC 2008
# Thomas Nagy 2016-2018 (ita)

import os, re, traceback
from waflib import Utils, Logs, Errors
from waflib.Tools import fc, fc_config, fc_scan, ar, ccroot
from waflib.Configure import conf
from waflib.TaskGen import after_method, feature

@conf
def find_ifort(conf):
fc = conf.find_program('ifort', var='FC')
conf.get_ifort_version(fc)
conf.env.FC_NAME = 'IFORT'

@conf
def ifort_modifier_win32(self):
v = self.env
v.IFORT_WIN32 = True
v.FCSTLIB_MARKER = ''
v.FCSHLIB_MARKER = ''

v.FCLIB_ST = v.FCSTLIB_ST = '%s.lib'
v.FCLIBPATH_ST = v.STLIBPATH_ST = '/LIBPATH:%s'
v.FCINCPATH_ST = '/I%s'
v.FCDEFINES_ST = '/D%s'

v.fcprogram_PATTERN = v.fcprogram_test_PATTERN = '%s.exe'
v.fcshlib_PATTERN = '%s.dll'
v.fcstlib_PATTERN = v.implib_PATTERN = '%s.lib'

v.FCLNK_TGT_F = '/out:'
v.FC_TGT_F = ['/c', '/o', '']
v.FCFLAGS_fcshlib = ''
v.LINKFLAGS_fcshlib = '/DLL'
v.AR_TGT_F = '/out:'
v.IMPLIB_ST = '/IMPLIB:%s'

v.append_value('LINKFLAGS', '/subsystem:console')
if v.IFORT_MANIFEST:
v.append_value('LINKFLAGS', ['/MANIFEST'])

@conf
def ifort_modifier_darwin(conf):
fc_config.fortran_modifier_darwin(conf)

@conf
def ifort_modifier_platform(conf):
dest_os = conf.env.DEST_OS or Utils.unversioned_sys_platform()
ifort_modifier_func = getattr(conf, 'ifort_modifier_' + dest_os, None)
if ifort_modifier_func:
ifort_modifier_func()

@conf
def get_ifort_version(conf, fc):
"""
Detects the compiler version and sets ``conf.env.FC_VERSION``
"""
version_re = re.compile(r"\bIntel\b.*\bVersion\s*(?P<major>\d*)\.(?P<minor>\d*)",re.I).search
if Utils.is_win32:
cmd = fc
else:
cmd = fc + ['-logo']

out, err = fc_config.getoutput(conf, cmd, stdin=False)
match = version_re(out) or version_re(err)
if not match:
conf.fatal('cannot determine ifort version.')
k = match.groupdict()
conf.env.FC_VERSION = (k['major'], k['minor'])

def configure(conf):
"""
Detects the Intel Fortran compilers
"""
if Utils.is_win32:
compiler, version, path, includes, libdirs, arch = conf.detect_ifort()
v = conf.env
v.DEST_CPU = arch
v.PATH = path
v.INCLUDES = includes
v.LIBPATH = libdirs
v.MSVC_COMPILER = compiler
try:
v.MSVC_VERSION = float(version)
except ValueError:
v.MSVC_VERSION = float(version[:-3])

conf.find_ifort_win32()
conf.ifort_modifier_win32()
else:
conf.find_ifort()
conf.find_program('xiar', var='AR')
conf.find_ar()
conf.fc_flags()
conf.fc_add_flags()
conf.ifort_modifier_platform()


all_ifort_platforms = [ ('intel64', 'amd64'), ('em64t', 'amd64'), ('ia32', 'x86'), ('Itanium', 'ia64')]
"""List of icl platforms"""

@conf
def gather_ifort_versions(conf, versions):
"""
List compiler versions by looking up registry keys
"""
version_pattern = re.compile('^...?.?\....?.?')
try:
all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Compilers\\Fortran')
except OSError:
try:
all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Compilers\\Fortran')
except OSError:
return
index = 0
while 1:
try:
version = Utils.winreg.EnumKey(all_versions, index)
except OSError:
break
index += 1
if not version_pattern.match(version):
continue
targets = {}
for target,arch in all_ifort_platforms:
if target=='intel64':
targetDir='EM64T_NATIVE'
else:
targetDir=target
try:
Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir)
icl_version=Utils.winreg.OpenKey(all_versions,version)
path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
except OSError:
pass
else:
batch_file=os.path.join(path,'bin','ifortvars.bat')
if os.path.isfile(batch_file):
targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file)

for target,arch in all_ifort_platforms:
try:
icl_version = Utils.winreg.OpenKey(all_versions, version+'\\'+target)
path,type = Utils.winreg.QueryValueEx(icl_version,'ProductDir')
except OSError:
continue
else:
batch_file=os.path.join(path,'bin','ifortvars.bat')
if os.path.isfile(batch_file):
targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file)
major = version[0:2]
versions['intel ' + major] = targets

@conf
def setup_ifort(conf, versiondict):
"""
Checks installed compilers and targets and returns the first combination from the user's
options, env, or the global supported lists that checks.

:param versiondict: dict(platform -> dict(architecture -> configuration))
:type versiondict: dict(string -> dict(string -> target_compiler)
:return: the compiler, revision, path, include dirs, library paths and target architecture
:rtype: tuple of strings
"""
platforms = Utils.to_list(conf.env.MSVC_TARGETS) or [i for i,j in all_ifort_platforms]
desired_versions = conf.env.MSVC_VERSIONS or list(reversed(list(versiondict.keys())))
for version in desired_versions:
try:
targets = versiondict[version]
except KeyError:
continue
for arch in platforms:
try:
cfg = targets[arch]
except KeyError:
continue
cfg.evaluate()
if cfg.is_valid:
compiler,revision = version.rsplit(' ', 1)
return compiler,revision,cfg.bindirs,cfg.incdirs,cfg.libdirs,cfg.cpu
conf.fatal('ifort: Impossible to find a valid architecture for building %r - %r' % (desired_versions, list(versiondict.keys())))

@conf
def get_ifort_version_win32(conf, compiler, version, target, vcvars):
# FIXME hack
try:
conf.msvc_cnt += 1
except AttributeError:
conf.msvc_cnt = 1
batfile = conf.bldnode.make_node('waf-print-msvc-%d.bat' % conf.msvc_cnt)
batfile.write("""@echo off
set INCLUDE=
set LIB=
call "%s" %s
echo PATH=%%PATH%%
echo INCLUDE=%%INCLUDE%%
echo LIB=%%LIB%%;%%LIBPATH%%
""" % (vcvars,target))
sout = conf.cmd_and_log(['cmd.exe', '/E:on', '/V:on', '/C', batfile.abspath()])
batfile.delete()
lines = sout.splitlines()

if not lines[0]:
lines.pop(0)

MSVC_PATH = MSVC_INCDIR = MSVC_LIBDIR = None
for line in lines:
if line.startswith('PATH='):
path = line[5:]
MSVC_PATH = path.split(';')
elif line.startswith('INCLUDE='):
MSVC_INCDIR = [i for i in line[8:].split(';') if i]
elif line.startswith('LIB='):
MSVC_LIBDIR = [i for i in line[4:].split(';') if i]
if None in (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR):
conf.fatal('ifort: Could not find a valid architecture for building (get_ifort_version_win32)')

# Check if the compiler is usable at all.
# The detection may return 64-bit versions even on 32-bit systems, and these would fail to run.
env = dict(os.environ)
env.update(PATH = path)
compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
fc = conf.find_program(compiler_name, path_list=MSVC_PATH)

# delete CL if exists. because it could contain parameters which can change cl's behaviour rather catastrophically.
if 'CL' in env:
del(env['CL'])

try:
conf.cmd_and_log(fc + ['/help'], env=env)
except UnicodeError:
st = traceback.format_exc()
if conf.logger:
conf.logger.error(st)
conf.fatal('ifort: Unicode error - check the code page?')
except Exception as e:
Logs.debug('ifort: get_ifort_version: %r %r %r -> failure %s', compiler, version, target, str(e))
conf.fatal('ifort: cannot run the compiler in get_ifort_version (run with -v to display errors)')
else:
Logs.debug('ifort: get_ifort_version: %r %r %r -> OK', compiler, version, target)
finally:
conf.env[compiler_name] = ''

return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR)

class target_compiler(object):
"""
Wraps a compiler configuration; call evaluate() to determine
whether the configuration is usable.
"""
def __init__(self, ctx, compiler, cpu, version, bat_target, bat, callback=None):
"""
:param ctx: configuration context to use to eventually get the version environment
:param compiler: compiler name
:param cpu: target cpu
:param version: compiler version number
:param bat_target: ?
:param bat: path to the batch file to run
:param callback: optional function to take the realized environment variables tup and map it (e.g. to combine other constant paths)
"""
self.conf = ctx
self.name = None
self.is_valid = False
self.is_done = False

self.compiler = compiler
self.cpu = cpu
self.version = version
self.bat_target = bat_target
self.bat = bat
self.callback = callback

def evaluate(self):
if self.is_done:
return
self.is_done = True
try:
vs = self.conf.get_ifort_version_win32(self.compiler, self.version, self.bat_target, self.bat)
except Errors.ConfigurationError:
self.is_valid = False
return
if self.callback:
vs = self.callback(self, vs)
self.is_valid = True
(self.bindirs, self.incdirs, self.libdirs) = vs

def __str__(self):
return str((self.bindirs, self.incdirs, self.libdirs))

def __repr__(self):
return repr((self.bindirs, self.incdirs, self.libdirs))

@conf
def detect_ifort(self):
return self.setup_ifort(self.get_ifort_versions(False))

@conf
def get_ifort_versions(self, eval_and_save=True):
"""
:return: platforms to compiler configurations
:rtype: dict
"""
dct = {}
self.gather_ifort_versions(dct)
return dct

def _get_prog_names(self, compiler):
if compiler=='intel':
compiler_name = 'ifort'
linker_name = 'XILINK'
lib_name = 'XILIB'
else:
# assumes CL.exe
compiler_name = 'CL'
linker_name = 'LINK'
lib_name = 'LIB'
return compiler_name, linker_name, lib_name

@conf
def find_ifort_win32(conf):
# the autodetection is supposed to be performed before entering in this method
v = conf.env
path = v.PATH
compiler = v.MSVC_COMPILER
version = v.MSVC_VERSION

compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
v.IFORT_MANIFEST = (compiler == 'intel' and version >= 11)

# compiler
fc = conf.find_program(compiler_name, var='FC', path_list=path)

# before setting anything, check if the compiler is really intel fortran
env = dict(conf.environ)
if path:
env.update(PATH = ';'.join(path))
if not conf.cmd_and_log(fc + ['/nologo', '/help'], env=env):
conf.fatal('not intel fortran compiler could not be identified')

v.FC_NAME = 'IFORT'

if not v.LINK_FC:
conf.find_program(linker_name, var='LINK_FC', path_list=path, mandatory=True)

if not v.AR:
conf.find_program(lib_name, path_list=path, var='AR', mandatory=True)
v.ARFLAGS = ['/nologo']

# manifest tool. Not required for VS 2003 and below. Must have for VS 2005 and later
if v.IFORT_MANIFEST:
conf.find_program('MT', path_list=path, var='MT')
v.MTFLAGS = ['/nologo']

try:
conf.load('winres')
except Errors.WafError:
Logs.warn('Resource compiler not found. Compiling resource file is disabled')

#######################################################################################################
##### conf above, build below

@after_method('apply_link')
@feature('fc')
def apply_flags_ifort(self):
"""
Adds additional flags implied by msvc, such as subsystems and pdb files::

def build(bld):
bld.stlib(source='main.c', target='bar', subsystem='gruik')
"""
if not self.env.IFORT_WIN32 or not getattr(self, 'link_task', None):
return

is_static = isinstance(self.link_task, ccroot.stlink_task)

subsystem = getattr(self, 'subsystem', '')
if subsystem:
subsystem = '/subsystem:%s' % subsystem
flags = is_static and 'ARFLAGS' or 'LINKFLAGS'
self.env.append_value(flags, subsystem)

if not is_static:
for f in self.env.LINKFLAGS:
d = f.lower()
if d[1:] == 'debug':
pdbnode = self.link_task.outputs[0].change_ext('.pdb')
self.link_task.outputs.append(pdbnode)

if getattr(self, 'install_task', None):
self.pdb_install_task = self.add_install_files(install_to=self.install_task.install_to, install_from=pdbnode)

break

@feature('fcprogram', 'fcshlib', 'fcprogram_test')
@after_method('apply_link')
def apply_manifest_ifort(self):
"""
Enables manifest embedding in Fortran DLLs when using ifort on Windows
See: http://msdn2.microsoft.com/en-us/library/ms235542(VS.80).aspx
"""
if self.env.IFORT_WIN32 and getattr(self, 'link_task', None):
# it seems ifort.exe cannot be called for linking
self.link_task.env.FC = self.env.LINK_FC

if self.env.IFORT_WIN32 and self.env.IFORT_MANIFEST and getattr(self, 'link_task', None):
out_node = self.link_task.outputs[0]
man_node = out_node.parent.find_or_declare(out_node.name + '.manifest')
self.link_task.outputs.append(man_node)
self.env.DO_MANIFEST = True


+ 231
- 0
waflib/Tools/intltool.py View File

@@ -0,0 +1,231 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2018 (ita)

"""
Support for translation tools such as msgfmt and intltool

Usage::

def configure(conf):
conf.load('gnu_dirs intltool')

def build(bld):
# process the .po files into .gmo files, and install them in LOCALEDIR
bld(features='intltool_po', appname='myapp', podir='po', install_path="${LOCALEDIR}")

# process an input file, substituting the translations from the po dir
bld(
features = "intltool_in",
podir = "../po",
style = "desktop",
flags = ["-u"],
source = 'kupfer.desktop.in',
install_path = "${DATADIR}/applications",
)

Usage of the :py:mod:`waflib.Tools.gnu_dirs` is recommended, but not obligatory.
"""

from __future__ import with_statement

import os, re
from waflib import Context, Task, Utils, Logs
import waflib.Tools.ccroot
from waflib.TaskGen import feature, before_method, taskgen_method
from waflib.Logs import error
from waflib.Configure import conf

_style_flags = {
'ba': '-b',
'desktop': '-d',
'keys': '-k',
'quoted': '--quoted-style',
'quotedxml': '--quotedxml-style',
'rfc822deb': '-r',
'schemas': '-s',
'xml': '-x',
}

@taskgen_method
def ensure_localedir(self):
"""
Expands LOCALEDIR from DATAROOTDIR/locale if possible, or falls back to PREFIX/share/locale
"""
# use the tool gnu_dirs to provide options to define this
if not self.env.LOCALEDIR:
if self.env.DATAROOTDIR:
self.env.LOCALEDIR = os.path.join(self.env.DATAROOTDIR, 'locale')
else:
self.env.LOCALEDIR = os.path.join(self.env.PREFIX, 'share', 'locale')

@before_method('process_source')
@feature('intltool_in')
def apply_intltool_in_f(self):
"""
Creates tasks to translate files by intltool-merge::

def build(bld):
bld(
features = "intltool_in",
podir = "../po",
style = "desktop",
flags = ["-u"],
source = 'kupfer.desktop.in',
install_path = "${DATADIR}/applications",
)

:param podir: location of the .po files
:type podir: string
:param source: source files to process
:type source: list of string
:param style: the intltool-merge mode of operation, can be one of the following values:
``ba``, ``desktop``, ``keys``, ``quoted``, ``quotedxml``, ``rfc822deb``, ``schemas`` and ``xml``.
See the ``intltool-merge`` man page for more information about supported modes of operation.
:type style: string
:param flags: compilation flags ("-quc" by default)
:type flags: list of string
:param install_path: installation path
:type install_path: string
"""
try:
self.meths.remove('process_source')
except ValueError:
pass

self.ensure_localedir()

podir = getattr(self, 'podir', '.')
podirnode = self.path.find_dir(podir)
if not podirnode:
error("could not find the podir %r" % podir)
return

cache = getattr(self, 'intlcache', '.intlcache')
self.env.INTLCACHE = [os.path.join(str(self.path.get_bld()), podir, cache)]
self.env.INTLPODIR = podirnode.bldpath()
self.env.append_value('INTLFLAGS', getattr(self, 'flags', self.env.INTLFLAGS_DEFAULT))

if '-c' in self.env.INTLFLAGS:
self.bld.fatal('Redundant -c flag in intltool task %r' % self)

style = getattr(self, 'style', None)
if style:
try:
style_flag = _style_flags[style]
except KeyError:
self.bld.fatal('intltool_in style "%s" is not valid' % style)

self.env.append_unique('INTLFLAGS', [style_flag])

for i in self.to_list(self.source):
node = self.path.find_resource(i)

task = self.create_task('intltool', node, node.change_ext(''))
inst = getattr(self, 'install_path', None)
if inst:
self.add_install_files(install_to=inst, install_from=task.outputs)

@feature('intltool_po')
def apply_intltool_po(self):
"""
Creates tasks to process po files::

def build(bld):
bld(features='intltool_po', appname='myapp', podir='po', install_path="${LOCALEDIR}")

The relevant task generator arguments are:

:param podir: directory of the .po files
:type podir: string
:param appname: name of the application
:type appname: string
:param install_path: installation directory
:type install_path: string

The file LINGUAS must be present in the directory pointed by *podir* and list the translation files to process.
"""
try:
self.meths.remove('process_source')
except ValueError:
pass

self.ensure_localedir()

appname = getattr(self, 'appname', getattr(Context.g_module, Context.APPNAME, 'set_your_app_name'))
podir = getattr(self, 'podir', '.')
inst = getattr(self, 'install_path', '${LOCALEDIR}')

linguas = self.path.find_node(os.path.join(podir, 'LINGUAS'))
if linguas:
# scan LINGUAS file for locales to process
with open(linguas.abspath()) as f:
langs = []
for line in f.readlines():
# ignore lines containing comments
if not line.startswith('#'):
langs += line.split()
re_linguas = re.compile('[-a-zA-Z_@.]+')
for lang in langs:
# Make sure that we only process lines which contain locales
if re_linguas.match(lang):
node = self.path.find_resource(os.path.join(podir, re_linguas.match(lang).group() + '.po'))
task = self.create_task('po', node, node.change_ext('.mo'))

if inst:
filename = task.outputs[0].name
(langname, ext) = os.path.splitext(filename)
inst_file = inst + os.sep + langname + os.sep + 'LC_MESSAGES' + os.sep + appname + '.mo'
self.add_install_as(install_to=inst_file, install_from=task.outputs[0],
chmod=getattr(self, 'chmod', Utils.O644))

else:
Logs.pprint('RED', "Error no LINGUAS file found in po directory")

class po(Task.Task):
"""
Compiles .po files into .gmo files
"""
run_str = '${MSGFMT} -o ${TGT} ${SRC}'
color = 'BLUE'

class intltool(Task.Task):
"""
Calls intltool-merge to update translation files
"""
run_str = '${INTLTOOL} ${INTLFLAGS} ${INTLCACHE_ST:INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}'
color = 'BLUE'

@conf
def find_msgfmt(conf):
"""
Detects msgfmt and sets the ``MSGFMT`` variable
"""
conf.find_program('msgfmt', var='MSGFMT')

@conf
def find_intltool_merge(conf):
"""
Detects intltool-merge
"""
if not conf.env.PERL:
conf.find_program('perl', var='PERL')
conf.env.INTLCACHE_ST = '--cache=%s'
conf.env.INTLFLAGS_DEFAULT = ['-q', '-u']
conf.find_program('intltool-merge', interpreter='PERL', var='INTLTOOL')

def configure(conf):
"""
Detects the program *msgfmt* and set *conf.env.MSGFMT*.
Detects the program *intltool-merge* and set *conf.env.INTLTOOL*.
It is possible to set INTLTOOL in the environment, but it must not have spaces in it::

$ INTLTOOL="/path/to/the program/intltool" waf configure

If a C/C++ compiler is present, execute a compilation test to find the header *locale.h*.
"""
conf.find_msgfmt()
conf.find_intltool_merge()
if conf.env.CC or conf.env.CXX:
conf.check(header_name='locale.h')


+ 30
- 24
waflib/Tools/irixcc.py View File

@@ -1,11 +1,12 @@
#! /usr/bin/env python
# encoding: utf-8
# imported from samba

"""
compiler definition for irix/MIPSpro cc compiler
based on suncc.py from waf
Compiler definition for irix/MIPSpro cc compiler
"""

from waflib import Errors
from waflib.Tools import ccroot, ar
from waflib.Configure import conf

@@ -13,41 +14,46 @@ from waflib.Configure import conf
def find_irixcc(conf):
v = conf.env
cc = None
if v['CC']: cc = v['CC']
elif 'CC' in conf.environ: cc = conf.environ['CC']
if not cc: cc = conf.find_program('cc', var='CC')
if not cc: conf.fatal('irixcc was not found')
if v.CC:
cc = v.CC
elif 'CC' in conf.environ:
cc = conf.environ['CC']
if not cc:
cc = conf.find_program('cc', var='CC')
if not cc:
conf.fatal('irixcc was not found')

try:
conf.cmd_and_log(cc + ['-version'])
except Exception:
except Errors.WafError:
conf.fatal('%r -version could not be executed' % cc)

v['CC'] = cc
v['CC_NAME'] = 'irix'
v.CC = cc
v.CC_NAME = 'irix'

@conf
def irixcc_common_flags(conf):
v = conf.env

v['CC_SRC_F'] = ''
v['CC_TGT_F'] = ['-c', '-o']
v['CPPPATH_ST'] = '-I%s'
v['DEFINES_ST'] = '-D%s'
v.CC_SRC_F = ''
v.CC_TGT_F = ['-c', '-o']
v.CPPPATH_ST = '-I%s'
v.DEFINES_ST = '-D%s'

# linker
if not v['LINK_CC']: v['LINK_CC'] = v['CC']
v['CCLNK_SRC_F'] = ''
v['CCLNK_TGT_F'] = ['-o']
if not v.LINK_CC:
v.LINK_CC = v.CC

v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STLIB_ST'] = '-l%s'
v['STLIBPATH_ST'] = '-L%s'
v.CCLNK_SRC_F = ''
v.CCLNK_TGT_F = ['-o']

v['cprogram_PATTERN'] = '%s'
v['cshlib_PATTERN'] = 'lib%s.so'
v['cstlib_PATTERN'] = 'lib%s.a'
v.LIB_ST = '-l%s' # template for adding libs
v.LIBPATH_ST = '-L%s' # template for adding libpaths
v.STLIB_ST = '-l%s'
v.STLIBPATH_ST = '-L%s'

v.cprogram_PATTERN = '%s'
v.cshlib_PATTERN = 'lib%s.so'
v.cstlib_PATTERN = 'lib%s.a'

def configure(conf):
conf.find_irixcc()


+ 464
- 0
waflib/Tools/javaw.py View File

@@ -0,0 +1,464 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2018 (ita)

"""
Java support

Javac is one of the few compilers that behaves very badly:

#. it outputs files where it wants to (-d is only for the package root)

#. it recompiles files silently behind your back

#. it outputs an undefined amount of files (inner classes)

Remember that the compilation can be performed using Jython[1] rather than regular Python. Instead of
running one of the following commands::

./waf configure
python waf configure

You would have to run::

java -jar /path/to/jython.jar waf configure

[1] http://www.jython.org/
"""

import os, shutil
from waflib import Task, Utils, Errors, Node
from waflib.Configure import conf
from waflib.TaskGen import feature, before_method, after_method

from waflib.Tools import ccroot
ccroot.USELIB_VARS['javac'] = set(['CLASSPATH', 'JAVACFLAGS'])

SOURCE_RE = '**/*.java'
JAR_RE = '**/*'

class_check_source = '''
public class Test {
public static void main(String[] argv) {
Class lib;
if (argv.length < 1) {
System.err.println("Missing argument");
System.exit(77);
}
try {
lib = Class.forName(argv[0]);
} catch (ClassNotFoundException e) {
System.err.println("ClassNotFoundException");
System.exit(1);
}
lib = null;
System.exit(0);
}
}
'''

@feature('javac')
@before_method('process_source')
def apply_java(self):
"""
Create a javac task for compiling *.java files*. There can be
only one javac task by task generator.
"""
Utils.def_attrs(self, jarname='', classpath='',
sourcepath='.', srcdir='.',
jar_mf_attributes={}, jar_mf_classpath=[])

outdir = getattr(self, 'outdir', None)
if outdir:
if not isinstance(outdir, Node.Node):
outdir = self.path.get_bld().make_node(self.outdir)
else:
outdir = self.path.get_bld()
outdir.mkdir()
self.outdir = outdir
self.env.OUTDIR = outdir.abspath()

self.javac_task = tsk = self.create_task('javac')
tmp = []

srcdir = getattr(self, 'srcdir', '')
if isinstance(srcdir, Node.Node):
srcdir = [srcdir]
for x in Utils.to_list(srcdir):
if isinstance(x, Node.Node):
y = x
else:
y = self.path.find_dir(x)
if not y:
self.bld.fatal('Could not find the folder %s from %s' % (x, self.path))
tmp.append(y)

tsk.srcdir = tmp

if getattr(self, 'compat', None):
tsk.env.append_value('JAVACFLAGS', ['-source', str(self.compat)])

if hasattr(self, 'sourcepath'):
fold = [isinstance(x, Node.Node) and x or self.path.find_dir(x) for x in self.to_list(self.sourcepath)]
names = os.pathsep.join([x.srcpath() for x in fold])
else:
names = [x.srcpath() for x in tsk.srcdir]

if names:
tsk.env.append_value('JAVACFLAGS', ['-sourcepath', names])

@feature('javac')
@before_method('propagate_uselib_vars')
@after_method('apply_java')
def use_javac_files(self):
"""
Processes the *use* attribute referring to other java compilations
"""
lst = []
self.uselib = self.to_list(getattr(self, 'uselib', []))
names = self.to_list(getattr(self, 'use', []))
get = self.bld.get_tgen_by_name
for x in names:
try:
y = get(x)
except Errors.WafError:
self.uselib.append(x)
else:
y.post()
if hasattr(y, 'jar_task'):
lst.append(y.jar_task.outputs[0].abspath())
self.javac_task.set_run_after(y.jar_task)
else:
for tsk in y.tasks:
self.javac_task.set_run_after(tsk)
self.env.append_value('CLASSPATH', lst)

@feature('javac')
@after_method('apply_java', 'propagate_uselib_vars', 'use_javac_files')
def set_classpath(self):
"""
Sets the CLASSPATH value on the *javac* task previously created.
"""
if getattr(self, 'classpath', None):
self.env.append_unique('CLASSPATH', getattr(self, 'classpath', []))
for x in self.tasks:
x.env.CLASSPATH = os.pathsep.join(self.env.CLASSPATH) + os.pathsep

@feature('jar')
@after_method('apply_java', 'use_javac_files')
@before_method('process_source')
def jar_files(self):
"""
Creates a jar task (one maximum per task generator)
"""
destfile = getattr(self, 'destfile', 'test.jar')
jaropts = getattr(self, 'jaropts', [])
manifest = getattr(self, 'manifest', None)

basedir = getattr(self, 'basedir', None)
if basedir:
if not isinstance(self.basedir, Node.Node):
basedir = self.path.get_bld().make_node(basedir)
else:
basedir = self.path.get_bld()
if not basedir:
self.bld.fatal('Could not find the basedir %r for %r' % (self.basedir, self))

self.jar_task = tsk = self.create_task('jar_create')
if manifest:
jarcreate = getattr(self, 'jarcreate', 'cfm')
if not isinstance(manifest,Node.Node):
node = self.path.find_resource(manifest)
else:
node = manifest
if not node:
self.bld.fatal('invalid manifest file %r for %r' % (manifest, self))
tsk.dep_nodes.append(node)
jaropts.insert(0, node.abspath())
else:
jarcreate = getattr(self, 'jarcreate', 'cf')
if not isinstance(destfile, Node.Node):
destfile = self.path.find_or_declare(destfile)
if not destfile:
self.bld.fatal('invalid destfile %r for %r' % (destfile, self))
tsk.set_outputs(destfile)
tsk.basedir = basedir

jaropts.append('-C')
jaropts.append(basedir.bldpath())
jaropts.append('.')

tsk.env.JAROPTS = jaropts
tsk.env.JARCREATE = jarcreate

if getattr(self, 'javac_task', None):
tsk.set_run_after(self.javac_task)

@feature('jar')
@after_method('jar_files')
def use_jar_files(self):
"""
Processes the *use* attribute to set the build order on the
tasks created by another task generator.
"""
self.uselib = self.to_list(getattr(self, 'uselib', []))
names = self.to_list(getattr(self, 'use', []))
get = self.bld.get_tgen_by_name
for x in names:
try:
y = get(x)
except Errors.WafError:
self.uselib.append(x)
else:
y.post()
self.jar_task.run_after.update(y.tasks)

class JTask(Task.Task):
"""
Base class for java and jar tasks; provides functionality to run long commands
"""
def split_argfile(self, cmd):
inline = [cmd[0]]
infile = []
for x in cmd[1:]:
# jar and javac do not want -J flags in @file
if x.startswith('-J'):
inline.append(x)
else:
infile.append(self.quote_flag(x))
return (inline, infile)

class jar_create(JTask):
"""
Creates a jar file
"""
color = 'GREEN'
run_str = '${JAR} ${JARCREATE} ${TGT} ${JAROPTS}'

def runnable_status(self):
"""
Wait for dependent tasks to be executed, then read the
files to update the list of inputs.
"""
for t in self.run_after:
if not t.hasrun:
return Task.ASK_LATER
if not self.inputs:
try:
self.inputs = [x for x in self.basedir.ant_glob(JAR_RE, remove=False) if id(x) != id(self.outputs[0])]
except Exception:
raise Errors.WafError('Could not find the basedir %r for %r' % (self.basedir, self))
return super(jar_create, self).runnable_status()

class javac(JTask):
"""
Compiles java files
"""
color = 'BLUE'
run_str = '${JAVAC} -classpath ${CLASSPATH} -d ${OUTDIR} ${JAVACFLAGS} ${SRC}'
vars = ['CLASSPATH', 'JAVACFLAGS', 'JAVAC', 'OUTDIR']
"""
The javac task will be executed again if the variables CLASSPATH, JAVACFLAGS, JAVAC or OUTDIR change.
"""
def uid(self):
"""Identify java tasks by input&output folder"""
lst = [self.__class__.__name__, self.generator.outdir.abspath()]
for x in self.srcdir:
lst.append(x.abspath())
return Utils.h_list(lst)

def runnable_status(self):
"""
Waits for dependent tasks to be complete, then read the file system to find the input nodes.
"""
for t in self.run_after:
if not t.hasrun:
return Task.ASK_LATER

if not self.inputs:
self.inputs = []
for x in self.srcdir:
if x.exists():
self.inputs.extend(x.ant_glob(SOURCE_RE, remove=False))
return super(javac, self).runnable_status()

def post_run(self):
"""
List class files created
"""
for node in self.generator.outdir.ant_glob('**/*.class'):
self.generator.bld.node_sigs[node] = self.uid()
self.generator.bld.task_sigs[self.uid()] = self.cache_sig

@feature('javadoc')
@after_method('process_rule')
def create_javadoc(self):
"""
Creates a javadoc task (feature 'javadoc')
"""
tsk = self.create_task('javadoc')
tsk.classpath = getattr(self, 'classpath', [])
self.javadoc_package = Utils.to_list(self.javadoc_package)
if not isinstance(self.javadoc_output, Node.Node):
self.javadoc_output = self.bld.path.find_or_declare(self.javadoc_output)

class javadoc(Task.Task):
"""
Builds java documentation
"""
color = 'BLUE'

def __str__(self):
return '%s: %s -> %s\n' % (self.__class__.__name__, self.generator.srcdir, self.generator.javadoc_output)

def run(self):
env = self.env
bld = self.generator.bld
wd = bld.bldnode

#add src node + bld node (for generated java code)
srcpath = self.generator.path.abspath() + os.sep + self.generator.srcdir
srcpath += os.pathsep
srcpath += self.generator.path.get_bld().abspath() + os.sep + self.generator.srcdir

classpath = env.CLASSPATH
classpath += os.pathsep
classpath += os.pathsep.join(self.classpath)
classpath = "".join(classpath)

self.last_cmd = lst = []
lst.extend(Utils.to_list(env.JAVADOC))
lst.extend(['-d', self.generator.javadoc_output.abspath()])
lst.extend(['-sourcepath', srcpath])
lst.extend(['-classpath', classpath])
lst.extend(['-subpackages'])
lst.extend(self.generator.javadoc_package)
lst = [x for x in lst if x]

self.generator.bld.cmd_and_log(lst, cwd=wd, env=env.env or None, quiet=0)

def post_run(self):
nodes = self.generator.javadoc_output.ant_glob('**')
for node in nodes:
self.generator.bld.node_sigs[node] = self.uid()
self.generator.bld.task_sigs[self.uid()] = self.cache_sig

def configure(self):
"""
Detects the javac, java and jar programs
"""
# If JAVA_PATH is set, we prepend it to the path list
java_path = self.environ['PATH'].split(os.pathsep)
v = self.env

if 'JAVA_HOME' in self.environ:
java_path = [os.path.join(self.environ['JAVA_HOME'], 'bin')] + java_path
self.env.JAVA_HOME = [self.environ['JAVA_HOME']]

for x in 'javac java jar javadoc'.split():
self.find_program(x, var=x.upper(), path_list=java_path)

if 'CLASSPATH' in self.environ:
v.CLASSPATH = self.environ['CLASSPATH']

if not v.JAR:
self.fatal('jar is required for making java packages')
if not v.JAVAC:
self.fatal('javac is required for compiling java classes')

v.JARCREATE = 'cf' # can use cvf
v.JAVACFLAGS = []

@conf
def check_java_class(self, classname, with_classpath=None):
"""
Checks if the specified java class exists

:param classname: class to check, like java.util.HashMap
:type classname: string
:param with_classpath: additional classpath to give
:type with_classpath: string
"""
javatestdir = '.waf-javatest'

classpath = javatestdir
if self.env.CLASSPATH:
classpath += os.pathsep + self.env.CLASSPATH
if isinstance(with_classpath, str):
classpath += os.pathsep + with_classpath

shutil.rmtree(javatestdir, True)
os.mkdir(javatestdir)

Utils.writef(os.path.join(javatestdir, 'Test.java'), class_check_source)

# Compile the source
self.exec_command(self.env.JAVAC + [os.path.join(javatestdir, 'Test.java')], shell=False)

# Try to run the app
cmd = self.env.JAVA + ['-cp', classpath, 'Test', classname]
self.to_log("%s\n" % str(cmd))
found = self.exec_command(cmd, shell=False)

self.msg('Checking for java class %s' % classname, not found)

shutil.rmtree(javatestdir, True)

return found

@conf
def check_jni_headers(conf):
"""
Checks for jni headers and libraries. On success the conf.env variables xxx_JAVA are added for use in C/C++ targets::

def options(opt):
opt.load('compiler_c')

def configure(conf):
conf.load('compiler_c java')
conf.check_jni_headers()

def build(bld):
bld.shlib(source='a.c', target='app', use='JAVA')
"""
if not conf.env.CC_NAME and not conf.env.CXX_NAME:
conf.fatal('load a compiler first (gcc, g++, ..)')

if not conf.env.JAVA_HOME:
conf.fatal('set JAVA_HOME in the system environment')

# jni requires the jvm
javaHome = conf.env.JAVA_HOME[0]

dir = conf.root.find_dir(conf.env.JAVA_HOME[0] + '/include')
if dir is None:
dir = conf.root.find_dir(conf.env.JAVA_HOME[0] + '/../Headers') # think different?!
if dir is None:
conf.fatal('JAVA_HOME does not seem to be set properly')

f = dir.ant_glob('**/(jni|jni_md).h')
incDirs = [x.parent.abspath() for x in f]

dir = conf.root.find_dir(conf.env.JAVA_HOME[0])
f = dir.ant_glob('**/*jvm.(so|dll|dylib)')
libDirs = [x.parent.abspath() for x in f] or [javaHome]

# On windows, we need both the .dll and .lib to link. On my JDK, they are
# in different directories...
f = dir.ant_glob('**/*jvm.(lib)')
if f:
libDirs = [[x, y.parent.abspath()] for x in libDirs for y in f]

if conf.env.DEST_OS == 'freebsd':
conf.env.append_unique('LINKFLAGS_JAVA', '-pthread')
for d in libDirs:
try:
conf.check(header_name='jni.h', define_name='HAVE_JNI_H', lib='jvm',
libpath=d, includes=incDirs, uselib_store='JAVA', uselib='JAVA')
except Exception:
pass
else:
break
else:
conf.fatal('could not find lib jvm in %r (see config.log)' % libDirs)


+ 56
- 0
waflib/Tools/ldc2.py View File

@@ -0,0 +1,56 @@
#!/usr/bin/env python
# encoding: utf-8
# Alex Rønne Petersen, 2012 (alexrp/Zor)

from waflib.Tools import ar, d
from waflib.Configure import conf

@conf
def find_ldc2(conf):
"""
Finds the program *ldc2* and set the variable *D*
"""
conf.find_program(['ldc2'], var='D')

out = conf.cmd_and_log(conf.env.D + ['-version'])
if out.find("based on DMD v2.") == -1:
conf.fatal("detected compiler is not ldc2")

@conf
def common_flags_ldc2(conf):
"""
Sets the D flags required by *ldc2*
"""
v = conf.env

v.D_SRC_F = ['-c']
v.D_TGT_F = '-of%s'

v.D_LINKER = v.D
v.DLNK_SRC_F = ''
v.DLNK_TGT_F = '-of%s'
v.DINC_ST = '-I%s'

v.DSHLIB_MARKER = v.DSTLIB_MARKER = ''
v.DSTLIB_ST = v.DSHLIB_ST = '-L-l%s'
v.DSTLIBPATH_ST = v.DLIBPATH_ST = '-L-L%s'

v.LINKFLAGS_dshlib = ['-L-shared']

v.DHEADER_ext = '.di'
v.DFLAGS_d_with_header = ['-H', '-Hf']
v.D_HDR_F = '%s'

v.LINKFLAGS = []
v.DFLAGS_dshlib = ['-relocation-model=pic']

def configure(conf):
"""
Configuration for *ldc2*
"""
conf.find_ldc2()
conf.load('ar')
conf.load('d')
conf.common_flags_ldc2()
conf.d_platform_flags()


+ 38
- 0
waflib/Tools/lua.py View File

@@ -0,0 +1,38 @@
#!/usr/bin/env python
# encoding: utf-8
# Sebastian Schlingmann, 2008
# Thomas Nagy, 2008-2018 (ita)

"""
Lua support.

Compile *.lua* files into *.luac*::

def configure(conf):
conf.load('lua')
conf.env.LUADIR = '/usr/local/share/myapp/scripts/'
def build(bld):
bld(source='foo.lua')
"""

from waflib.TaskGen import extension
from waflib import Task

@extension('.lua')
def add_lua(self, node):
tsk = self.create_task('luac', node, node.change_ext('.luac'))
inst_to = getattr(self, 'install_path', self.env.LUADIR and '${LUADIR}' or None)
if inst_to:
self.add_install_files(install_to=inst_to, install_from=tsk.outputs)
return tsk

class luac(Task.Task):
run_str = '${LUAC} -s -o ${TGT} ${SRC}'
color = 'PINK'

def configure(conf):
"""
Detect the luac compiler and set *conf.env.LUAC*
"""
conf.find_program('luac', var='LUAC')


+ 39
- 0
waflib/Tools/md5_tstamp.py View File

@@ -0,0 +1,39 @@
#! /usr/bin/env python
# encoding: utf-8

"""
Re-calculate md5 hashes of files only when the file times or the file
size have changed.

The hashes can also reflect either the file contents (STRONGEST=True) or the
file time and file size.

The performance benefits of this module are usually insignificant.
"""

import os, stat
from waflib import Utils, Build, Node

STRONGEST = True

Build.SAVED_ATTRS.append('hashes_md5_tstamp')
def h_file(self):
filename = self.abspath()
st = os.stat(filename)

cache = self.ctx.hashes_md5_tstamp
if filename in cache and cache[filename][0] == st.st_mtime:
return cache[filename][1]

if STRONGEST:
ret = Utils.h_file(filename)
else:
if stat.S_ISDIR(st[stat.ST_MODE]):
raise IOError('Not a file')
ret = Utils.md5(str((st.st_mtime, st.st_size)).encode()).digest()

cache[filename] = (st.st_mtime, ret)
return ret
h_file.__doc__ = Node.Node.h_file.__doc__
Node.Node.h_file = h_file


+ 357
- 512
waflib/Tools/msvc.py
File diff suppressed because it is too large
View File


+ 26
- 0
waflib/Tools/nasm.py View File

@@ -0,0 +1,26 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2008-2018 (ita)

"""
Nasm tool (asm processing)
"""

import os
import waflib.Tools.asm # leave this
from waflib.TaskGen import feature

@feature('asm')
def apply_nasm_vars(self):
"""provided for compatibility"""
self.env.append_value('ASFLAGS', self.to_list(getattr(self, 'nasm_flags', [])))

def configure(conf):
"""
Detect nasm/yasm and set the variable *AS*
"""
conf.find_program(['nasm', 'yasm'], var='AS')
conf.env.AS_TGT_F = ['-o']
conf.env.ASLNK_TGT_F = ['-o']
conf.load('asm')
conf.env.ASMPATH_ST = '-I%s' + os.sep

+ 24
- 0
waflib/Tools/nobuild.py View File

@@ -0,0 +1,24 @@
#! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2015 (ita)

"""
Override the build commands to write empty files.
This is useful for profiling and evaluating the Python overhead.

To use::

def build(bld):
...
bld.load('nobuild')

"""

from waflib import Task
def build(bld):
def run(self):
for x in self.outputs:
x.write('')
for (name, cls) in Task.classes.items():
cls.run = run


+ 156
- 0
waflib/Tools/perl.py View File

@@ -0,0 +1,156 @@
#!/usr/bin/env python
# encoding: utf-8
# andersg at 0x63.nu 2007
# Thomas Nagy 2016-2018 (ita)

"""
Support for Perl extensions. A C/C++ compiler is required::

def options(opt):
opt.load('compiler_c perl')
def configure(conf):
conf.load('compiler_c perl')
conf.check_perl_version((5,6,0))
conf.check_perl_ext_devel()
conf.check_perl_module('Cairo')
conf.check_perl_module('Devel::PPPort 4.89')
def build(bld):
bld(
features = 'c cshlib perlext',
source = 'Mytest.xs',
target = 'Mytest',
install_path = '${ARCHDIR_PERL}/auto')
bld.install_files('${ARCHDIR_PERL}', 'Mytest.pm')
"""

import os
from waflib import Task, Options, Utils, Errors
from waflib.Configure import conf
from waflib.TaskGen import extension, feature, before_method

@before_method('apply_incpaths', 'apply_link', 'propagate_uselib_vars')
@feature('perlext')
def init_perlext(self):
"""
Change the values of *cshlib_PATTERN* and *cxxshlib_PATTERN* to remove the
*lib* prefix from library names.
"""
self.uselib = self.to_list(getattr(self, 'uselib', []))
if not 'PERLEXT' in self.uselib:
self.uselib.append('PERLEXT')
self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.perlext_PATTERN

@extension('.xs')
def xsubpp_file(self, node):
"""
Create :py:class:`waflib.Tools.perl.xsubpp` tasks to process *.xs* files
"""
outnode = node.change_ext('.c')
self.create_task('xsubpp', node, outnode)
self.source.append(outnode)

class xsubpp(Task.Task):
"""
Process *.xs* files
"""
run_str = '${PERL} ${XSUBPP} -noprototypes -typemap ${EXTUTILS_TYPEMAP} ${SRC} > ${TGT}'
color = 'BLUE'
ext_out = ['.h']

@conf
def check_perl_version(self, minver=None):
"""
Check if Perl is installed, and set the variable PERL.
minver is supposed to be a tuple
"""
res = True
if minver:
cver = '.'.join(map(str,minver))
else:
cver = ''

self.start_msg('Checking for minimum perl version %s' % cver)

perl = self.find_program('perl', var='PERL', value=getattr(Options.options, 'perlbinary', None))
version = self.cmd_and_log(perl + ["-e", 'printf \"%vd\", $^V'])
if not version:
res = False
version = "Unknown"
elif not minver is None:
ver = tuple(map(int, version.split(".")))
if ver < minver:
res = False

self.end_msg(version, color=res and 'GREEN' or 'YELLOW')
return res

@conf
def check_perl_module(self, module):
"""
Check if specified perlmodule is installed.

The minimum version can be specified by specifying it after modulename
like this::

def configure(conf):
conf.check_perl_module("Some::Module 2.92")
"""
cmd = self.env.PERL + ['-e', 'use %s' % module]
self.start_msg('perl module %s' % module)
try:
r = self.cmd_and_log(cmd)
except Errors.WafError:
self.end_msg(False)
return None
self.end_msg(r or True)
return r

@conf
def check_perl_ext_devel(self):
"""
Check for configuration needed to build perl extensions.

Sets different xxx_PERLEXT variables in the environment.

Also sets the ARCHDIR_PERL variable useful as installation path,
which can be overridden by ``--with-perl-archdir`` option.
"""

env = self.env
perl = env.PERL
if not perl:
self.fatal('find perl first')

def cmd_perl_config(s):
return perl + ['-MConfig', '-e', 'print \"%s\"' % s]
def cfg_str(cfg):
return self.cmd_and_log(cmd_perl_config(cfg))
def cfg_lst(cfg):
return Utils.to_list(cfg_str(cfg))
def find_xsubpp():
for var in ('privlib', 'vendorlib'):
xsubpp = cfg_lst('$Config{%s}/ExtUtils/xsubpp$Config{exe_ext}' % var)
if xsubpp and os.path.isfile(xsubpp[0]):
return xsubpp
return self.find_program('xsubpp')

env.LINKFLAGS_PERLEXT = cfg_lst('$Config{lddlflags}')
env.INCLUDES_PERLEXT = cfg_lst('$Config{archlib}/CORE')
env.CFLAGS_PERLEXT = cfg_lst('$Config{ccflags} $Config{cccdlflags}')
env.EXTUTILS_TYPEMAP = cfg_lst('$Config{privlib}/ExtUtils/typemap')
env.XSUBPP = find_xsubpp()

if not getattr(Options.options, 'perlarchdir', None):
env.ARCHDIR_PERL = cfg_str('$Config{sitearch}')
else:
env.ARCHDIR_PERL = getattr(Options.options, 'perlarchdir')

env.perlext_PATTERN = '%s.' + cfg_str('$Config{dlext}')

def options(opt):
"""
Add the ``--with-perl-archdir`` and ``--with-perl-binary`` command-line options.
"""
opt.add_option('--with-perl-binary', type='string', dest='perlbinary', help = 'Specify alternate perl binary', default=None)
opt.add_option('--with-perl-archdir', type='string', dest='perlarchdir', help = 'Specify directory where to install arch specific files', default=None)


+ 627
- 0
waflib/Tools/python.py View File

@@ -0,0 +1,627 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2007-2015 (ita)
# Gustavo Carneiro (gjc), 2007

"""
Support for Python, detect the headers and libraries and provide
*use* variables to link C/C++ programs against them::

def options(opt):
opt.load('compiler_c python')
def configure(conf):
conf.load('compiler_c python')
conf.check_python_version((2,4,2))
conf.check_python_headers()
def build(bld):
bld.program(features='pyembed', source='a.c', target='myprog')
bld.shlib(features='pyext', source='b.c', target='mylib')
"""

import os, sys
from waflib import Errors, Logs, Node, Options, Task, Utils
from waflib.TaskGen import extension, before_method, after_method, feature
from waflib.Configure import conf

FRAG = '''
#include <Python.h>
#ifdef __cplusplus
extern "C" {
#endif
void Py_Initialize(void);
void Py_Finalize(void);
#ifdef __cplusplus
}
#endif
int main(int argc, char **argv)
{
(void)argc; (void)argv;
Py_Initialize();
Py_Finalize();
return 0;
}
'''
"""
Piece of C/C++ code used in :py:func:`waflib.Tools.python.check_python_headers`
"""

INST = '''
import sys, py_compile
py_compile.compile(sys.argv[1], sys.argv[2], sys.argv[3], True)
'''
"""
Piece of Python code used in :py:class:`waflib.Tools.python.pyo` and :py:class:`waflib.Tools.python.pyc` for byte-compiling python files
"""

DISTUTILS_IMP = ['from distutils.sysconfig import get_config_var, get_python_lib']

@before_method('process_source')
@feature('py')
def feature_py(self):
"""
Create tasks to byte-compile .py files and install them, if requested
"""
self.install_path = getattr(self, 'install_path', '${PYTHONDIR}')
install_from = getattr(self, 'install_from', None)
if install_from and not isinstance(install_from, Node.Node):
install_from = self.path.find_dir(install_from)
self.install_from = install_from

ver = self.env.PYTHON_VERSION
if not ver:
self.bld.fatal('Installing python files requires PYTHON_VERSION, try conf.check_python_version')

if int(ver.replace('.', '')) > 31:
self.install_32 = True

@extension('.py')
def process_py(self, node):
"""
Add signature of .py file, so it will be byte-compiled when necessary
"""
assert(hasattr(self, 'install_path')), 'add features="py"'

# where to install the python file
if self.install_path:
if self.install_from:
self.add_install_files(install_to=self.install_path, install_from=node, cwd=self.install_from, relative_trick=True)
else:
self.add_install_files(install_to=self.install_path, install_from=node, relative_trick=True)

lst = []
if self.env.PYC:
lst.append('pyc')
if self.env.PYO:
lst.append('pyo')

if self.install_path:
if self.install_from:
pyd = Utils.subst_vars("%s/%s" % (self.install_path, node.path_from(self.install_from)), self.env)
else:
pyd = Utils.subst_vars("%s/%s" % (self.install_path, node.path_from(self.path)), self.env)
else:
pyd = node.abspath()

for ext in lst:
if self.env.PYTAG and not self.env.NOPYCACHE:
# __pycache__ installation for python 3.2 - PEP 3147
name = node.name[:-3]
pyobj = node.parent.get_bld().make_node('__pycache__').make_node("%s.%s.%s" % (name, self.env.PYTAG, ext))
pyobj.parent.mkdir()
else:
pyobj = node.change_ext(".%s" % ext)

tsk = self.create_task(ext, node, pyobj)
tsk.pyd = pyd

if self.install_path:
self.add_install_files(install_to=os.path.dirname(pyd), install_from=pyobj, cwd=node.parent.get_bld(), relative_trick=True)

class pyc(Task.Task):
"""
Byte-compiling python files
"""
color = 'PINK'
def __str__(self):
node = self.outputs[0]
return node.path_from(node.ctx.launch_node())
def run(self):
cmd = [Utils.subst_vars('${PYTHON}', self.env), '-c', INST, self.inputs[0].abspath(), self.outputs[0].abspath(), self.pyd]
ret = self.generator.bld.exec_command(cmd)
return ret

class pyo(Task.Task):
"""
Byte-compiling python files
"""
color = 'PINK'
def __str__(self):
node = self.outputs[0]
return node.path_from(node.ctx.launch_node())
def run(self):
cmd = [Utils.subst_vars('${PYTHON}', self.env), Utils.subst_vars('${PYFLAGS_OPT}', self.env), '-c', INST, self.inputs[0].abspath(), self.outputs[0].abspath(), self.pyd]
ret = self.generator.bld.exec_command(cmd)
return ret

@feature('pyext')
@before_method('propagate_uselib_vars', 'apply_link')
@after_method('apply_bundle')
def init_pyext(self):
"""
Change the values of *cshlib_PATTERN* and *cxxshlib_PATTERN* to remove the
*lib* prefix from library names.
"""
self.uselib = self.to_list(getattr(self, 'uselib', []))
if not 'PYEXT' in self.uselib:
self.uselib.append('PYEXT')
# override shlib_PATTERN set by the osx module
self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.macbundle_PATTERN = self.env.pyext_PATTERN
self.env.fcshlib_PATTERN = self.env.dshlib_PATTERN = self.env.pyext_PATTERN

try:
if not self.install_path:
return
except AttributeError:
self.install_path = '${PYTHONARCHDIR}'

@feature('pyext')
@before_method('apply_link', 'apply_bundle')
def set_bundle(self):
"""Mac-specific pyext extension that enables bundles from c_osx.py"""
if Utils.unversioned_sys_platform() == 'darwin':
self.mac_bundle = True

@before_method('propagate_uselib_vars')
@feature('pyembed')
def init_pyembed(self):
"""
Add the PYEMBED variable.
"""
self.uselib = self.to_list(getattr(self, 'uselib', []))
if not 'PYEMBED' in self.uselib:
self.uselib.append('PYEMBED')

@conf
def get_python_variables(self, variables, imports=None):
"""
Spawn a new python process to dump configuration variables

:param variables: variables to print
:type variables: list of string
:param imports: one import by element
:type imports: list of string
:return: the variable values
:rtype: list of string
"""
if not imports:
try:
imports = self.python_imports
except AttributeError:
imports = DISTUTILS_IMP

program = list(imports) # copy
program.append('')
for v in variables:
program.append("print(repr(%s))" % v)
os_env = dict(os.environ)
try:
del os_env['MACOSX_DEPLOYMENT_TARGET'] # see comments in the OSX tool
except KeyError:
pass

try:
out = self.cmd_and_log(self.env.PYTHON + ['-c', '\n'.join(program)], env=os_env)
except Errors.WafError:
self.fatal('The distutils module is unusable: install "python-devel"?')
self.to_log(out)
return_values = []
for s in out.splitlines():
s = s.strip()
if not s:
continue
if s == 'None':
return_values.append(None)
elif (s[0] == "'" and s[-1] == "'") or (s[0] == '"' and s[-1] == '"'):
return_values.append(eval(s))
elif s[0].isdigit():
return_values.append(int(s))
else: break
return return_values

@conf
def test_pyembed(self, mode, msg='Testing pyembed configuration'):
self.check(header_name='Python.h', define_name='HAVE_PYEMBED', msg=msg,
fragment=FRAG, errmsg='Could not build a python embedded interpreter',
features='%s %sprogram pyembed' % (mode, mode))

@conf
def test_pyext(self, mode, msg='Testing pyext configuration'):
self.check(header_name='Python.h', define_name='HAVE_PYEXT', msg=msg,
fragment=FRAG, errmsg='Could not build python extensions',
features='%s %sshlib pyext' % (mode, mode))

@conf
def python_cross_compile(self, features='pyembed pyext'):
"""
For cross-compilation purposes, it is possible to bypass the normal detection and set the flags that you want:
PYTHON_VERSION='3.4' PYTAG='cpython34' pyext_PATTERN="%s.so" PYTHON_LDFLAGS='-lpthread -ldl' waf configure

The following variables are used:
PYTHON_VERSION required
PYTAG required
PYTHON_LDFLAGS required
pyext_PATTERN required
PYTHON_PYEXT_LDFLAGS
PYTHON_PYEMBED_LDFLAGS
"""
features = Utils.to_list(features)
if not ('PYTHON_LDFLAGS' in self.environ or 'PYTHON_PYEXT_LDFLAGS' in self.environ or 'PYTHON_PYEMBED_LDFLAGS' in self.environ):
return False

for x in 'PYTHON_VERSION PYTAG pyext_PATTERN'.split():
if not x in self.environ:
self.fatal('Please set %s in the os environment' % x)
else:
self.env[x] = self.environ[x]

xx = self.env.CXX_NAME and 'cxx' or 'c'
if 'pyext' in features:
flags = self.environ.get('PYTHON_PYEXT_LDFLAGS', self.environ.get('PYTHON_LDFLAGS'))
if flags is None:
self.fatal('No flags provided through PYTHON_PYEXT_LDFLAGS as required')
else:
self.parse_flags(flags, 'PYEXT')
self.test_pyext(xx)
if 'pyembed' in features:
flags = self.environ.get('PYTHON_PYEMBED_LDFLAGS', self.environ.get('PYTHON_LDFLAGS'))
if flags is None:
self.fatal('No flags provided through PYTHON_PYEMBED_LDFLAGS as required')
else:
self.parse_flags(flags, 'PYEMBED')
self.test_pyembed(xx)
return True

@conf
def check_python_headers(conf, features='pyembed pyext'):
"""
Check for headers and libraries necessary to extend or embed python by using the module *distutils*.
On success the environment variables xxx_PYEXT and xxx_PYEMBED are added:

* PYEXT: for compiling python extensions
* PYEMBED: for embedding a python interpreter
"""
features = Utils.to_list(features)
assert ('pyembed' in features) or ('pyext' in features), "check_python_headers features must include 'pyembed' and/or 'pyext'"
env = conf.env
if not env.CC_NAME and not env.CXX_NAME:
conf.fatal('load a compiler first (gcc, g++, ..)')

# bypass all the code below for cross-compilation
if conf.python_cross_compile(features):
return

if not env.PYTHON_VERSION:
conf.check_python_version()

pybin = env.PYTHON
if not pybin:
conf.fatal('Could not find the python executable')

# so we actually do all this for compatibility reasons and for obtaining pyext_PATTERN below
v = 'prefix SO LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS LDVERSION'.split()
try:
lst = conf.get_python_variables(["get_config_var('%s') or ''" % x for x in v])
except RuntimeError:
conf.fatal("Python development headers not found (-v for details).")

vals = ['%s = %r' % (x, y) for (x, y) in zip(v, lst)]
conf.to_log("Configuration returned from %r:\n%s\n" % (pybin, '\n'.join(vals)))

dct = dict(zip(v, lst))
x = 'MACOSX_DEPLOYMENT_TARGET'
if dct[x]:
env[x] = conf.environ[x] = dct[x]
env.pyext_PATTERN = '%s' + dct['SO'] # not a mistake


# Try to get pythonX.Y-config
num = '.'.join(env.PYTHON_VERSION.split('.')[:2])
conf.find_program([''.join(pybin) + '-config', 'python%s-config' % num, 'python-config-%s' % num, 'python%sm-config' % num], var='PYTHON_CONFIG', msg="python-config", mandatory=False)

if env.PYTHON_CONFIG:
# python2.6-config requires 3 runs
all_flags = [['--cflags', '--libs', '--ldflags']]
if sys.hexversion < 0x2070000:
all_flags = [[k] for k in all_flags[0]]

xx = env.CXX_NAME and 'cxx' or 'c'

if 'pyembed' in features:
for flags in all_flags:
conf.check_cfg(msg='Asking python-config for pyembed %r flags' % ' '.join(flags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEMBED', args=flags)

try:
conf.test_pyembed(xx)
except conf.errors.ConfigurationError:
# python bug 7352
if dct['Py_ENABLE_SHARED'] and dct['LIBDIR']:
env.append_unique('LIBPATH_PYEMBED', [dct['LIBDIR']])
conf.test_pyembed(xx)
else:
raise

if 'pyext' in features:
for flags in all_flags:
conf.check_cfg(msg='Asking python-config for pyext %r flags' % ' '.join(flags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEXT', args=flags)

try:
conf.test_pyext(xx)
except conf.errors.ConfigurationError:
# python bug 7352
if dct['Py_ENABLE_SHARED'] and dct['LIBDIR']:
env.append_unique('LIBPATH_PYEXT', [dct['LIBDIR']])
conf.test_pyext(xx)
else:
raise

conf.define('HAVE_PYTHON_H', 1)
return

# No python-config, do something else on windows systems
all_flags = dct['LDFLAGS'] + ' ' + dct['CFLAGS']
conf.parse_flags(all_flags, 'PYEMBED')

all_flags = dct['LDFLAGS'] + ' ' + dct['LDSHARED'] + ' ' + dct['CFLAGS']
conf.parse_flags(all_flags, 'PYEXT')

result = None
if not dct["LDVERSION"]:
dct["LDVERSION"] = env.PYTHON_VERSION

# further simplification will be complicated
for name in ('python' + dct['LDVERSION'], 'python' + env.PYTHON_VERSION + 'm', 'python' + env.PYTHON_VERSION.replace('.', '')):

# LIBPATH_PYEMBED is already set; see if it works.
if not result and env.LIBPATH_PYEMBED:
path = env.LIBPATH_PYEMBED
conf.to_log("\n\n# Trying default LIBPATH_PYEMBED: %r\n" % path)
result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in LIBPATH_PYEMBED' % name)

if not result and dct['LIBDIR']:
path = [dct['LIBDIR']]
conf.to_log("\n\n# try again with -L$python_LIBDIR: %r\n" % path)
result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in LIBDIR' % name)

if not result and dct['LIBPL']:
path = [dct['LIBPL']]
conf.to_log("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n")
result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in python_LIBPL' % name)

if not result:
path = [os.path.join(dct['prefix'], "libs")]
conf.to_log("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n")
result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in $prefix/libs' % name)

if result:
break # do not forget to set LIBPATH_PYEMBED

if result:
env.LIBPATH_PYEMBED = path
env.append_value('LIB_PYEMBED', [name])
else:
conf.to_log("\n\n### LIB NOT FOUND\n")

# under certain conditions, python extensions must link to
# python libraries, not just python embedding programs.
if Utils.is_win32 or dct['Py_ENABLE_SHARED']:
env.LIBPATH_PYEXT = env.LIBPATH_PYEMBED
env.LIB_PYEXT = env.LIB_PYEMBED

conf.to_log("Include path for Python extensions (found via distutils module): %r\n" % (dct['INCLUDEPY'],))
env.INCLUDES_PYEXT = [dct['INCLUDEPY']]
env.INCLUDES_PYEMBED = [dct['INCLUDEPY']]

# Code using the Python API needs to be compiled with -fno-strict-aliasing
if env.CC_NAME == 'gcc':
env.append_value('CFLAGS_PYEMBED', ['-fno-strict-aliasing'])
env.append_value('CFLAGS_PYEXT', ['-fno-strict-aliasing'])
if env.CXX_NAME == 'gcc':
env.append_value('CXXFLAGS_PYEMBED', ['-fno-strict-aliasing'])
env.append_value('CXXFLAGS_PYEXT', ['-fno-strict-aliasing'])

if env.CC_NAME == "msvc":
from distutils.msvccompiler import MSVCCompiler
dist_compiler = MSVCCompiler()
dist_compiler.initialize()
env.append_value('CFLAGS_PYEXT', dist_compiler.compile_options)
env.append_value('CXXFLAGS_PYEXT', dist_compiler.compile_options)
env.append_value('LINKFLAGS_PYEXT', dist_compiler.ldflags_shared)

# See if it compiles
conf.check(header_name='Python.h', define_name='HAVE_PYTHON_H', uselib='PYEMBED', fragment=FRAG, errmsg='Distutils not installed? Broken python installation? Get python-config now!')

@conf
def check_python_version(conf, minver=None):
"""
Check if the python interpreter is found matching a given minimum version.
minver should be a tuple, eg. to check for python >= 2.4.2 pass (2,4,2) as minver.

If successful, PYTHON_VERSION is defined as 'MAJOR.MINOR' (eg. '2.4')
of the actual python version found, and PYTHONDIR and PYTHONARCHDIR
are defined, pointing to the site-packages directories appropriate for
this python version, where modules/packages/extensions should be
installed.

:param minver: minimum version
:type minver: tuple of int
"""
assert minver is None or isinstance(minver, tuple)
pybin = conf.env.PYTHON
if not pybin:
conf.fatal('could not find the python executable')

# Get python version string
cmd = pybin + ['-c', 'import sys\nfor x in sys.version_info: print(str(x))']
Logs.debug('python: Running python command %r', cmd)
lines = conf.cmd_and_log(cmd).split()
assert len(lines) == 5, "found %r lines, expected 5: %r" % (len(lines), lines)
pyver_tuple = (int(lines[0]), int(lines[1]), int(lines[2]), lines[3], int(lines[4]))

# Compare python version with the minimum required
result = (minver is None) or (pyver_tuple >= minver)

if result:
# define useful environment variables
pyver = '.'.join([str(x) for x in pyver_tuple[:2]])
conf.env.PYTHON_VERSION = pyver

if 'PYTHONDIR' in conf.env:
# Check if --pythondir was specified
pydir = conf.env.PYTHONDIR
elif 'PYTHONDIR' in conf.environ:
# Check environment for PYTHONDIR
pydir = conf.environ['PYTHONDIR']
else:
# Finally, try to guess
if Utils.is_win32:
(python_LIBDEST, pydir) = conf.get_python_variables(
["get_config_var('LIBDEST') or ''",
"get_python_lib(standard_lib=0) or ''"])
else:
python_LIBDEST = None
(pydir,) = conf.get_python_variables( ["get_python_lib(standard_lib=0, prefix=%r) or ''" % conf.env.PREFIX])
if python_LIBDEST is None:
if conf.env.LIBDIR:
python_LIBDEST = os.path.join(conf.env.LIBDIR, 'python' + pyver)
else:
python_LIBDEST = os.path.join(conf.env.PREFIX, 'lib', 'python' + pyver)

if 'PYTHONARCHDIR' in conf.env:
# Check if --pythonarchdir was specified
pyarchdir = conf.env.PYTHONARCHDIR
elif 'PYTHONARCHDIR' in conf.environ:
# Check environment for PYTHONDIR
pyarchdir = conf.environ['PYTHONARCHDIR']
else:
# Finally, try to guess
(pyarchdir, ) = conf.get_python_variables( ["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r) or ''" % conf.env.PREFIX])
if not pyarchdir:
pyarchdir = pydir

if hasattr(conf, 'define'): # conf.define is added by the C tool, so may not exist
conf.define('PYTHONDIR', pydir)
conf.define('PYTHONARCHDIR', pyarchdir)

conf.env.PYTHONDIR = pydir
conf.env.PYTHONARCHDIR = pyarchdir

# Feedback
pyver_full = '.'.join(map(str, pyver_tuple[:3]))
if minver is None:
conf.msg('Checking for python version', pyver_full)
else:
minver_str = '.'.join(map(str, minver))
conf.msg('Checking for python version >= %s' % (minver_str,), pyver_full, color=result and 'GREEN' or 'YELLOW')

if not result:
conf.fatal('The python version is too old, expecting %r' % (minver,))

PYTHON_MODULE_TEMPLATE = '''
import %s as current_module
version = getattr(current_module, '__version__', None)
if version is not None:
print(str(version))
else:
print('unknown version')
'''

@conf
def check_python_module(conf, module_name, condition=''):
"""
Check if the selected python interpreter can import the given python module::

def configure(conf):
conf.check_python_module('pygccxml')
conf.check_python_module('re', condition="ver > num(2, 0, 4) and ver <= num(3, 0, 0)")

:param module_name: module
:type module_name: string
"""
msg = "Checking for python module %r" % module_name
if condition:
msg = '%s (%s)' % (msg, condition)
conf.start_msg(msg)
try:
ret = conf.cmd_and_log(conf.env.PYTHON + ['-c', PYTHON_MODULE_TEMPLATE % module_name])
except Errors.WafError:
conf.end_msg(False)
conf.fatal('Could not find the python module %r' % module_name)

ret = ret.strip()
if condition:
conf.end_msg(ret)
if ret == 'unknown version':
conf.fatal('Could not check the %s version' % module_name)

from distutils.version import LooseVersion
def num(*k):
if isinstance(k[0], int):
return LooseVersion('.'.join([str(x) for x in k]))
else:
return LooseVersion(k[0])
d = {'num': num, 'ver': LooseVersion(ret)}
ev = eval(condition, {}, d)
if not ev:
conf.fatal('The %s version does not satisfy the requirements' % module_name)
else:
if ret == 'unknown version':
conf.end_msg(True)
else:
conf.end_msg(ret)

def configure(conf):
"""
Detect the python interpreter
"""
v = conf.env
if getattr(Options.options, 'pythondir', None):
v.PYTHONDIR = Options.options.pythondir
if getattr(Options.options, 'pythonarchdir', None):
v.PYTHONARCHDIR = Options.options.pythonarchdir
if getattr(Options.options, 'nopycache', None):
v.NOPYCACHE=Options.options.nopycache

if not v.PYTHON:
v.PYTHON = [getattr(Options.options, 'python', None) or sys.executable]
v.PYTHON = Utils.to_list(v.PYTHON)
conf.find_program('python', var='PYTHON')

v.PYFLAGS = ''
v.PYFLAGS_OPT = '-O'

v.PYC = getattr(Options.options, 'pyc', 1)
v.PYO = getattr(Options.options, 'pyo', 1)

try:
v.PYTAG = conf.cmd_and_log(conf.env.PYTHON + ['-c', "import imp;print(imp.get_tag())"]).strip()
except Errors.WafError:
pass

def options(opt):
"""
Add python-specific options
"""
pyopt=opt.add_option_group("Python Options")
pyopt.add_option('--nopyc', dest = 'pyc', action='store_false', default=1,
help = 'Do not install bytecode compiled .pyc files (configuration) [Default:install]')
pyopt.add_option('--nopyo', dest='pyo', action='store_false', default=1,
help='Do not install optimised compiled .pyo files (configuration) [Default:install]')
pyopt.add_option('--nopycache',dest='nopycache', action='store_true',
help='Do not use __pycache__ directory to install objects [Default:auto]')
pyopt.add_option('--python', dest="python",
help='python binary to be used [Default: %s]' % sys.executable)
pyopt.add_option('--pythondir', dest='pythondir',
help='Installation path for python modules (py, platform-independent .py and .pyc files)')
pyopt.add_option('--pythonarchdir', dest='pythonarchdir',
help='Installation path for python extension (pyext, platform-dependent .so or .dylib files)')


+ 796
- 0
waflib/Tools/qt5.py View File

@@ -0,0 +1,796 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2018 (ita)

"""
This tool helps with finding Qt5 tools and libraries,
and also provides syntactic sugar for using Qt5 tools.

The following snippet illustrates the tool usage::

def options(opt):
opt.load('compiler_cxx qt5')

def configure(conf):
conf.load('compiler_cxx qt5')

def build(bld):
bld(
features = 'qt5 cxx cxxprogram',
uselib = 'QT5CORE QT5GUI QT5OPENGL QT5SVG',
source = 'main.cpp textures.qrc aboutDialog.ui',
target = 'window',
)

Here, the UI description and resource files will be processed
to generate code.

Usage
=====

Load the "qt5" tool.

You also need to edit your sources accordingly:

- the normal way of doing things is to have your C++ files
include the .moc file.
This is regarded as the best practice (and provides much faster
compilations).
It also implies that the include paths have beenset properly.

- to have the include paths added automatically, use the following::

from waflib.TaskGen import feature, before_method, after_method
@feature('cxx')
@after_method('process_source')
@before_method('apply_incpaths')
def add_includes_paths(self):
incs = set(self.to_list(getattr(self, 'includes', '')))
for x in self.compiled_tasks:
incs.add(x.inputs[0].parent.path_from(self.path))
self.includes = sorted(incs)

Note: another tool provides Qt processing that does not require
.moc includes, see 'playground/slow_qt/'.

A few options (--qt{dir,bin,...}) and environment variables
(QT5_{ROOT,DIR,MOC,UIC,XCOMPILE}) allow finer tuning of the tool,
tool path selection, etc; please read the source for more info.

The detection uses pkg-config on Linux by default. To force static library detection use:
QT5_XCOMPILE=1 QT5_FORCE_STATIC=1 waf configure
"""

from __future__ import with_statement

try:
from xml.sax import make_parser
from xml.sax.handler import ContentHandler
except ImportError:
has_xml = False
ContentHandler = object
else:
has_xml = True

import os, sys, re
from waflib.Tools import cxx
from waflib import Task, Utils, Options, Errors, Context
from waflib.TaskGen import feature, after_method, extension, before_method
from waflib.Configure import conf
from waflib import Logs

MOC_H = ['.h', '.hpp', '.hxx', '.hh']
"""
File extensions associated to .moc files
"""

EXT_RCC = ['.qrc']
"""
File extension for the resource (.qrc) files
"""

EXT_UI = ['.ui']
"""
File extension for the user interface (.ui) files
"""

EXT_QT5 = ['.cpp', '.cc', '.cxx', '.C']
"""
File extensions of C++ files that may require a .moc processing
"""

class qxx(Task.classes['cxx']):
"""
Each C++ file can have zero or several .moc files to create.
They are known only when the files are scanned (preprocessor)
To avoid scanning the c++ files each time (parsing C/C++), the results
are retrieved from the task cache (bld.node_deps/bld.raw_deps).
The moc tasks are also created *dynamically* during the build.
"""

def __init__(self, *k, **kw):
Task.Task.__init__(self, *k, **kw)
self.moc_done = 0

def runnable_status(self):
"""
Compute the task signature to make sure the scanner was executed. Create the
moc tasks by using :py:meth:`waflib.Tools.qt5.qxx.add_moc_tasks` (if necessary),
then postpone the task execution (there is no need to recompute the task signature).
"""
if self.moc_done:
return Task.Task.runnable_status(self)
else:
for t in self.run_after:
if not t.hasrun:
return Task.ASK_LATER
self.add_moc_tasks()
return Task.Task.runnable_status(self)

def create_moc_task(self, h_node, m_node):
"""
If several libraries use the same classes, it is possible that moc will run several times (Issue 1318)
It is not possible to change the file names, but we can assume that the moc transformation will be identical,
and the moc tasks can be shared in a global cache.
"""
try:
moc_cache = self.generator.bld.moc_cache
except AttributeError:
moc_cache = self.generator.bld.moc_cache = {}

try:
return moc_cache[h_node]
except KeyError:
tsk = moc_cache[h_node] = Task.classes['moc'](env=self.env, generator=self.generator)
tsk.set_inputs(h_node)
tsk.set_outputs(m_node)
tsk.env.append_unique('MOC_FLAGS', '-i')

if self.generator:
self.generator.tasks.append(tsk)

# direct injection in the build phase (safe because called from the main thread)
gen = self.generator.bld.producer
gen.outstanding.append(tsk)
gen.total += 1

return tsk

else:
# remove the signature, it must be recomputed with the moc task
delattr(self, 'cache_sig')

def add_moc_tasks(self):
"""
Creates moc tasks by looking in the list of file dependencies ``bld.raw_deps[self.uid()]``
"""
node = self.inputs[0]
bld = self.generator.bld

try:
# compute the signature once to know if there is a moc file to create
self.signature()
except KeyError:
# the moc file may be referenced somewhere else
pass
else:
# remove the signature, it must be recomputed with the moc task
delattr(self, 'cache_sig')

include_nodes = [node.parent] + self.generator.includes_nodes

moctasks = []
mocfiles = set()
for d in bld.raw_deps.get(self.uid(), []):
if not d.endswith('.moc'):
continue

# process that base.moc only once
if d in mocfiles:
continue
mocfiles.add(d)

# find the source associated with the moc file
h_node = None
base2 = d[:-4]

# foo.moc from foo.cpp
prefix = node.name[:node.name.rfind('.')]
if base2 == prefix:
h_node = node
else:
# this deviates from the standard
# if bar.cpp includes foo.moc, then assume it is from foo.h
for x in include_nodes:
for e in MOC_H:
h_node = x.find_node(base2 + e)
if h_node:
break
else:
continue
break
if h_node:
m_node = h_node.change_ext('.moc')
else:
raise Errors.WafError('No source found for %r which is a moc file' % d)

# create the moc task
task = self.create_moc_task(h_node, m_node)
moctasks.append(task)

# simple scheduler dependency: run the moc task before others
self.run_after.update(set(moctasks))
self.moc_done = 1

class trans_update(Task.Task):
"""Updates a .ts files from a list of C++ files"""
run_str = '${QT_LUPDATE} ${SRC} -ts ${TGT}'
color = 'BLUE'

class XMLHandler(ContentHandler):
"""
Parses ``.qrc`` files
"""
def __init__(self):
ContentHandler.__init__(self)
self.buf = []
self.files = []
def startElement(self, name, attrs):
if name == 'file':
self.buf = []
def endElement(self, name):
if name == 'file':
self.files.append(str(''.join(self.buf)))
def characters(self, cars):
self.buf.append(cars)

@extension(*EXT_RCC)
def create_rcc_task(self, node):
"Creates rcc and cxx tasks for ``.qrc`` files"
rcnode = node.change_ext('_rc.%d.cpp' % self.idx)
self.create_task('rcc', node, rcnode)
cpptask = self.create_task('cxx', rcnode, rcnode.change_ext('.o'))
try:
self.compiled_tasks.append(cpptask)
except AttributeError:
self.compiled_tasks = [cpptask]
return cpptask

@extension(*EXT_UI)
def create_uic_task(self, node):
"Create uic tasks for user interface ``.ui`` definition files"

"""
If UIC file is used in more than one bld, we would have a conflict in parallel execution
It is not possible to change the file names (like .self.idx. as for objects) as they have
to be referenced by the source file, but we can assume that the transformation will be identical
and the tasks can be shared in a global cache.
"""
try:
uic_cache = self.bld.uic_cache
except AttributeError:
uic_cache = self.bld.uic_cache = {}

if node not in uic_cache:
uictask = uic_cache[node] = self.create_task('ui5', node)
uictask.outputs = [node.parent.find_or_declare(self.env.ui_PATTERN % node.name[:-3])]

@extension('.ts')
def add_lang(self, node):
"""Adds all the .ts file into ``self.lang``"""
self.lang = self.to_list(getattr(self, 'lang', [])) + [node]

@feature('qt5')
@before_method('process_source')
def process_mocs(self):
"""
Processes MOC files included in headers::

def build(bld):
bld.program(features='qt5', source='main.cpp', target='app', use='QT5CORE', moc='foo.h')

The build will run moc on foo.h to create moc_foo.n.cpp. The number in the file name
is provided to avoid name clashes when the same headers are used by several targets.
"""
lst = self.to_nodes(getattr(self, 'moc', []))
self.source = self.to_list(getattr(self, 'source', []))
for x in lst:
prefix = x.name[:x.name.rfind('.')] # foo.h -> foo
moc_target = 'moc_%s.%d.cpp' % (prefix, self.idx)
moc_node = x.parent.find_or_declare(moc_target)
self.source.append(moc_node)

self.create_task('moc', x, moc_node)

@feature('qt5')
@after_method('apply_link')
def apply_qt5(self):
"""
Adds MOC_FLAGS which may be necessary for moc::

def build(bld):
bld.program(features='qt5', source='main.cpp', target='app', use='QT5CORE')

The additional parameters are:

:param lang: list of translation files (\*.ts) to process
:type lang: list of :py:class:`waflib.Node.Node` or string without the .ts extension
:param update: whether to process the C++ files to update the \*.ts files (use **waf --translate**)
:type update: bool
:param langname: if given, transform the \*.ts files into a .qrc files to include in the binary file
:type langname: :py:class:`waflib.Node.Node` or string without the .qrc extension
"""
if getattr(self, 'lang', None):
qmtasks = []
for x in self.to_list(self.lang):
if isinstance(x, str):
x = self.path.find_resource(x + '.ts')
qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.%d.qm' % self.idx)))

if getattr(self, 'update', None) and Options.options.trans_qt5:
cxxnodes = [a.inputs[0] for a in self.compiled_tasks] + [
a.inputs[0] for a in self.tasks if a.inputs and a.inputs[0].name.endswith('.ui')]
for x in qmtasks:
self.create_task('trans_update', cxxnodes, x.inputs)

if getattr(self, 'langname', None):
qmnodes = [x.outputs[0] for x in qmtasks]
rcnode = self.langname
if isinstance(rcnode, str):
rcnode = self.path.find_or_declare(rcnode + ('.%d.qrc' % self.idx))
t = self.create_task('qm2rcc', qmnodes, rcnode)
k = create_rcc_task(self, t.outputs[0])
self.link_task.inputs.append(k.outputs[0])

lst = []
for flag in self.to_list(self.env.CXXFLAGS):
if len(flag) < 2:
continue
f = flag[0:2]
if f in ('-D', '-I', '/D', '/I'):
if (f[0] == '/'):
lst.append('-' + flag[1:])
else:
lst.append(flag)
self.env.append_value('MOC_FLAGS', lst)

@extension(*EXT_QT5)
def cxx_hook(self, node):
"""
Re-maps C++ file extensions to the :py:class:`waflib.Tools.qt5.qxx` task.
"""
return self.create_compiled_task('qxx', node)

class rcc(Task.Task):
"""
Processes ``.qrc`` files
"""
color = 'BLUE'
run_str = '${QT_RCC} -name ${tsk.rcname()} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}'
ext_out = ['.h']

def rcname(self):
return os.path.splitext(self.inputs[0].name)[0]

def scan(self):
"""Parse the *.qrc* files"""
if not has_xml:
Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!')
return ([], [])

parser = make_parser()
curHandler = XMLHandler()
parser.setContentHandler(curHandler)
with open(self.inputs[0].abspath(), 'r') as f:
parser.parse(f)

nodes = []
names = []
root = self.inputs[0].parent
for x in curHandler.files:
nd = root.find_resource(x)
if nd:
nodes.append(nd)
else:
names.append(x)
return (nodes, names)

def quote_flag(self, x):
"""
Override Task.quote_flag. QT parses the argument files
differently than cl.exe and link.exe

:param x: flag
:type x: string
:return: quoted flag
:rtype: string
"""
return x


class moc(Task.Task):
"""
Creates ``.moc`` files
"""
color = 'BLUE'
run_str = '${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}'

def quote_flag(self, x):
"""
Override Task.quote_flag. QT parses the argument files
differently than cl.exe and link.exe

:param x: flag
:type x: string
:return: quoted flag
:rtype: string
"""
return x


class ui5(Task.Task):
"""
Processes ``.ui`` files
"""
color = 'BLUE'
run_str = '${QT_UIC} ${SRC} -o ${TGT}'
ext_out = ['.h']

class ts2qm(Task.Task):
"""
Generates ``.qm`` files from ``.ts`` files
"""
color = 'BLUE'
run_str = '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}'

class qm2rcc(Task.Task):
"""
Generates ``.qrc`` files from ``.qm`` files
"""
color = 'BLUE'
after = 'ts2qm'
def run(self):
"""Create a qrc file including the inputs"""
txt = '\n'.join(['<file>%s</file>' % k.path_from(self.outputs[0].parent) for k in self.inputs])
code = '<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n%s\n</qresource>\n</RCC>' % txt
self.outputs[0].write(code)

def configure(self):
"""
Besides the configuration options, the environment variable QT5_ROOT may be used
to give the location of the qt5 libraries (absolute path).

The detection uses the program ``pkg-config`` through :py:func:`waflib.Tools.config_c.check_cfg`
"""
self.find_qt5_binaries()
self.set_qt5_libs_dir()
self.set_qt5_libs_to_check()
self.set_qt5_defines()
self.find_qt5_libraries()
self.add_qt5_rpath()
self.simplify_qt5_libs()

# warn about this during the configuration too
if not has_xml:
Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!')

if 'COMPILER_CXX' not in self.env:
self.fatal('No CXX compiler defined: did you forget to configure compiler_cxx first?')

# Qt5 may be compiled with '-reduce-relocations' which requires dependent programs to have -fPIE or -fPIC?
frag = '#include <QApplication>\nint main(int argc, char **argv) {return 0;}\n'
uses = 'QT5CORE QT5WIDGETS QT5GUI'
for flag in [[], '-fPIE', '-fPIC', '-std=c++11' , ['-std=c++11', '-fPIE'], ['-std=c++11', '-fPIC']]:
msg = 'See if Qt files compile '
if flag:
msg += 'with %s' % flag
try:
self.check(features='qt5 cxx', use=uses, uselib_store='qt5', cxxflags=flag, fragment=frag, msg=msg)
except self.errors.ConfigurationError:
pass
else:
break
else:
self.fatal('Could not build a simple Qt application')

# FreeBSD does not add /usr/local/lib and the pkg-config files do not provide it either :-/
if Utils.unversioned_sys_platform() == 'freebsd':
frag = '#include <QApplication>\nint main(int argc, char **argv) { QApplication app(argc, argv); return NULL != (void*) (&app);}\n'
try:
self.check(features='qt5 cxx cxxprogram', use=uses, fragment=frag, msg='Can we link Qt programs on FreeBSD directly?')
except self.errors.ConfigurationError:
self.check(features='qt5 cxx cxxprogram', use=uses, uselib_store='qt5', libpath='/usr/local/lib', fragment=frag, msg='Is /usr/local/lib required?')

@conf
def find_qt5_binaries(self):
"""
Detects Qt programs such as qmake, moc, uic, lrelease
"""
env = self.env
opt = Options.options

qtdir = getattr(opt, 'qtdir', '')
qtbin = getattr(opt, 'qtbin', '')

paths = []

if qtdir:
qtbin = os.path.join(qtdir, 'bin')

# the qt directory has been given from QT5_ROOT - deduce the qt binary path
if not qtdir:
qtdir = self.environ.get('QT5_ROOT', '')
qtbin = self.environ.get('QT5_BIN') or os.path.join(qtdir, 'bin')

if qtbin:
paths = [qtbin]

# no qtdir, look in the path and in /usr/local/Trolltech
if not qtdir:
paths = self.environ.get('PATH', '').split(os.pathsep)
paths.extend(['/usr/share/qt5/bin', '/usr/local/lib/qt5/bin'])
try:
lst = Utils.listdir('/usr/local/Trolltech/')
except OSError:
pass
else:
if lst:
lst.sort()
lst.reverse()

# keep the highest version
qtdir = '/usr/local/Trolltech/%s/' % lst[0]
qtbin = os.path.join(qtdir, 'bin')
paths.append(qtbin)

# at the end, try to find qmake in the paths given
# keep the one with the highest version
cand = None
prev_ver = ['5', '0', '0']
for qmk in ('qmake-qt5', 'qmake5', 'qmake'):
try:
qmake = self.find_program(qmk, path_list=paths)
except self.errors.ConfigurationError:
pass
else:
try:
version = self.cmd_and_log(qmake + ['-query', 'QT_VERSION']).strip()
except self.errors.WafError:
pass
else:
if version:
new_ver = version.split('.')
if new_ver > prev_ver:
cand = qmake
prev_ver = new_ver

# qmake could not be found easily, rely on qtchooser
if not cand:
try:
self.find_program('qtchooser')
except self.errors.ConfigurationError:
pass
else:
cmd = self.env.QTCHOOSER + ['-qt=5', '-run-tool=qmake']
try:
version = self.cmd_and_log(cmd + ['-query', 'QT_VERSION'])
except self.errors.WafError:
pass
else:
cand = cmd

if cand:
self.env.QMAKE = cand
else:
self.fatal('Could not find qmake for qt5')

self.env.QT_HOST_BINS = qtbin = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_HOST_BINS']).strip()
paths.insert(0, qtbin)

def find_bin(lst, var):
if var in env:
return
for f in lst:
try:
ret = self.find_program(f, path_list=paths)
except self.errors.ConfigurationError:
pass
else:
env[var]=ret
break

find_bin(['uic-qt5', 'uic'], 'QT_UIC')
if not env.QT_UIC:
self.fatal('cannot find the uic compiler for qt5')

self.start_msg('Checking for uic version')
uicver = self.cmd_and_log(env.QT_UIC + ['-version'], output=Context.BOTH)
uicver = ''.join(uicver).strip()
uicver = uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt', '')
self.end_msg(uicver)
if uicver.find(' 3.') != -1 or uicver.find(' 4.') != -1:
self.fatal('this uic compiler is for qt3 or qt4, add uic for qt5 to your path')

find_bin(['moc-qt5', 'moc'], 'QT_MOC')
find_bin(['rcc-qt5', 'rcc'], 'QT_RCC')
find_bin(['lrelease-qt5', 'lrelease'], 'QT_LRELEASE')
find_bin(['lupdate-qt5', 'lupdate'], 'QT_LUPDATE')

env.UIC_ST = '%s -o %s'
env.MOC_ST = '-o'
env.ui_PATTERN = 'ui_%s.h'
env.QT_LRELEASE_FLAGS = ['-silent']
env.MOCCPPPATH_ST = '-I%s'
env.MOCDEFINES_ST = '-D%s'

@conf
def set_qt5_libs_dir(self):
env = self.env
qtlibs = getattr(Options.options, 'qtlibs', None) or self.environ.get('QT5_LIBDIR')
if not qtlibs:
try:
qtlibs = self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_LIBS']).strip()
except Errors.WafError:
qtdir = self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_PREFIX']).strip()
qtlibs = os.path.join(qtdir, 'lib')
self.msg('Found the Qt5 libraries in', qtlibs)
env.QTLIBS = qtlibs

@conf
def find_single_qt5_lib(self, name, uselib, qtlibs, qtincludes, force_static):
env = self.env
if force_static:
exts = ('.a', '.lib')
prefix = 'STLIB'
else:
exts = ('.so', '.lib')
prefix = 'LIB'

def lib_names():
for x in exts:
for k in ('', '5') if Utils.is_win32 else ['']:
for p in ('lib', ''):
yield (p, name, k, x)

for tup in lib_names():
k = ''.join(tup)
path = os.path.join(qtlibs, k)
if os.path.exists(path):
if env.DEST_OS == 'win32':
libval = ''.join(tup[:-1])
else:
libval = name
env.append_unique(prefix + '_' + uselib, libval)
env.append_unique('%sPATH_%s' % (prefix, uselib), qtlibs)
env.append_unique('INCLUDES_' + uselib, qtincludes)
env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, name.replace('Qt5', 'Qt')))
return k
return False

@conf
def find_qt5_libraries(self):
env = self.env

qtincludes = self.environ.get('QT5_INCLUDES') or self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_HEADERS']).strip()
force_static = self.environ.get('QT5_FORCE_STATIC')
try:
if self.environ.get('QT5_XCOMPILE'):
self.fatal('QT5_XCOMPILE Disables pkg-config detection')
self.check_cfg(atleast_pkgconfig_version='0.1')
except self.errors.ConfigurationError:
for i in self.qt5_vars:
uselib = i.upper()
if Utils.unversioned_sys_platform() == 'darwin':
# Since at least qt 4.7.3 each library locates in separate directory
fwk = i.replace('Qt5', 'Qt')
frameworkName = fwk + '.framework'

qtDynamicLib = os.path.join(env.QTLIBS, frameworkName, fwk)
if os.path.exists(qtDynamicLib):
env.append_unique('FRAMEWORK_' + uselib, fwk)
env.append_unique('FRAMEWORKPATH_' + uselib, env.QTLIBS)
self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN')
else:
self.msg('Checking for %s' % i, False, 'YELLOW')
env.append_unique('INCLUDES_' + uselib, os.path.join(env.QTLIBS, frameworkName, 'Headers'))
else:
ret = self.find_single_qt5_lib(i, uselib, env.QTLIBS, qtincludes, force_static)
if not force_static and not ret:
ret = self.find_single_qt5_lib(i, uselib, env.QTLIBS, qtincludes, True)
self.msg('Checking for %s' % i, ret, 'GREEN' if ret else 'YELLOW')
else:
path = '%s:%s:%s/pkgconfig:/usr/lib/qt5/lib/pkgconfig:/opt/qt5/lib/pkgconfig:/usr/lib/qt5/lib:/opt/qt5/lib' % (
self.environ.get('PKG_CONFIG_PATH', ''), env.QTLIBS, env.QTLIBS)
for i in self.qt5_vars:
self.check_cfg(package=i, args='--cflags --libs', mandatory=False, force_static=force_static, pkg_config_path=path)

@conf
def simplify_qt5_libs(self):
"""
Since library paths make really long command-lines,
and since everything depends on qtcore, remove the qtcore ones from qtgui, etc
"""
env = self.env
def process_lib(vars_, coreval):
for d in vars_:
var = d.upper()
if var == 'QTCORE':
continue

value = env['LIBPATH_'+var]
if value:
core = env[coreval]
accu = []
for lib in value:
if lib in core:
continue
accu.append(lib)
env['LIBPATH_'+var] = accu
process_lib(self.qt5_vars, 'LIBPATH_QTCORE')

@conf
def add_qt5_rpath(self):
"""
Defines rpath entries for Qt libraries
"""
env = self.env
if getattr(Options.options, 'want_rpath', False):
def process_rpath(vars_, coreval):
for d in vars_:
var = d.upper()
value = env['LIBPATH_' + var]
if value:
core = env[coreval]
accu = []
for lib in value:
if var != 'QTCORE':
if lib in core:
continue
accu.append('-Wl,--rpath='+lib)
env['RPATH_' + var] = accu
process_rpath(self.qt5_vars, 'LIBPATH_QTCORE')

@conf
def set_qt5_libs_to_check(self):
self.qt5_vars = Utils.to_list(getattr(self, 'qt5_vars', []))
if not self.qt5_vars:
dirlst = Utils.listdir(self.env.QTLIBS)

pat = self.env.cxxshlib_PATTERN
if Utils.is_win32:
pat = pat.replace('.dll', '.lib')
if self.environ.get('QT5_FORCE_STATIC'):
pat = self.env.cxxstlib_PATTERN
if Utils.unversioned_sys_platform() == 'darwin':
pat = "%s\.framework"
re_qt = re.compile(pat%'Qt5?(?P<name>.*)'+'$')
for x in dirlst:
m = re_qt.match(x)
if m:
self.qt5_vars.append("Qt5%s" % m.group('name'))
if not self.qt5_vars:
self.fatal('cannot find any Qt5 library (%r)' % self.env.QTLIBS)

qtextralibs = getattr(Options.options, 'qtextralibs', None)
if qtextralibs:
self.qt5_vars.extend(qtextralibs.split(','))

@conf
def set_qt5_defines(self):
if sys.platform != 'win32':
return
for x in self.qt5_vars:
y=x.replace('Qt5', 'Qt')[2:].upper()
self.env.append_unique('DEFINES_%s' % x.upper(), 'QT_%s_LIB' % y)

def options(opt):
"""
Command-line options
"""
opt.add_option('--want-rpath', action='store_true', default=False, dest='want_rpath', help='enable the rpath for qt libraries')
for i in 'qtdir qtbin qtlibs'.split():
opt.add_option('--'+i, type='string', default='', dest=i)

opt.add_option('--translate', action='store_true', help='collect translation strings', dest='trans_qt5', default=False)
opt.add_option('--qtextralibs', type='string', default='', dest='qtextralibs', help='additional qt libraries on the system to add to default ones, comma separated')


+ 186
- 0
waflib/Tools/ruby.py View File

@@ -0,0 +1,186 @@
#!/usr/bin/env python
# encoding: utf-8
# daniel.svensson at purplescout.se 2008
# Thomas Nagy 2016-2018 (ita)

"""
Support for Ruby extensions. A C/C++ compiler is required::

def options(opt):
opt.load('compiler_c ruby')
def configure(conf):
conf.load('compiler_c ruby')
conf.check_ruby_version((1,8,0))
conf.check_ruby_ext_devel()
conf.check_ruby_module('libxml')
def build(bld):
bld(
features = 'c cshlib rubyext',
source = 'rb_mytest.c',
target = 'mytest_ext',
install_path = '${ARCHDIR_RUBY}')
bld.install_files('${LIBDIR_RUBY}', 'Mytest.rb')
"""

import os
from waflib import Errors, Options, Task, Utils
from waflib.TaskGen import before_method, feature, extension
from waflib.Configure import conf

@feature('rubyext')
@before_method('apply_incpaths', 'process_source', 'apply_bundle', 'apply_link')
def init_rubyext(self):
"""
Add required variables for ruby extensions
"""
self.install_path = '${ARCHDIR_RUBY}'
self.uselib = self.to_list(getattr(self, 'uselib', ''))
if not 'RUBY' in self.uselib:
self.uselib.append('RUBY')
if not 'RUBYEXT' in self.uselib:
self.uselib.append('RUBYEXT')

@feature('rubyext')
@before_method('apply_link', 'propagate_uselib_vars')
def apply_ruby_so_name(self):
"""
Strip the *lib* prefix from ruby extensions
"""
self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.rubyext_PATTERN

@conf
def check_ruby_version(self, minver=()):
"""
Checks if ruby is installed.
If installed the variable RUBY will be set in environment.
The ruby binary can be overridden by ``--with-ruby-binary`` command-line option.
"""

ruby = self.find_program('ruby', var='RUBY', value=Options.options.rubybinary)

try:
version = self.cmd_and_log(ruby + ['-e', 'puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip()
except Errors.WafError:
self.fatal('could not determine ruby version')
self.env.RUBY_VERSION = version

try:
ver = tuple(map(int, version.split('.')))
except Errors.WafError:
self.fatal('unsupported ruby version %r' % version)

cver = ''
if minver:
cver = '> ' + '.'.join(str(x) for x in minver)
if ver < minver:
self.fatal('ruby is too old %r' % ver)

self.msg('Checking for ruby version %s' % cver, version)

@conf
def check_ruby_ext_devel(self):
"""
Check if a ruby extension can be created
"""
if not self.env.RUBY:
self.fatal('ruby detection is required first')

if not self.env.CC_NAME and not self.env.CXX_NAME:
self.fatal('load a c/c++ compiler first')

version = tuple(map(int, self.env.RUBY_VERSION.split(".")))

def read_out(cmd):
return Utils.to_list(self.cmd_and_log(self.env.RUBY + ['-rrbconfig', '-e', cmd]))

def read_config(key):
return read_out('puts RbConfig::CONFIG[%r]' % key)

cpppath = archdir = read_config('archdir')

if version >= (1, 9, 0):
ruby_hdrdir = read_config('rubyhdrdir')
cpppath += ruby_hdrdir
if version >= (2, 0, 0):
cpppath += read_config('rubyarchhdrdir')
cpppath += [os.path.join(ruby_hdrdir[0], read_config('arch')[0])]

self.check(header_name='ruby.h', includes=cpppath, errmsg='could not find ruby header file', link_header_test=False)

self.env.LIBPATH_RUBYEXT = read_config('libdir')
self.env.LIBPATH_RUBYEXT += archdir
self.env.INCLUDES_RUBYEXT = cpppath
self.env.CFLAGS_RUBYEXT = read_config('CCDLFLAGS')
self.env.rubyext_PATTERN = '%s.' + read_config('DLEXT')[0]

# ok this is really stupid, but the command and flags are combined.
# so we try to find the first argument...
flags = read_config('LDSHARED')
while flags and flags[0][0] != '-':
flags = flags[1:]

# we also want to strip out the deprecated ppc flags
if len(flags) > 1 and flags[1] == "ppc":
flags = flags[2:]

self.env.LINKFLAGS_RUBYEXT = flags
self.env.LINKFLAGS_RUBYEXT += read_config('LIBS')
self.env.LINKFLAGS_RUBYEXT += read_config('LIBRUBYARG_SHARED')

if Options.options.rubyarchdir:
self.env.ARCHDIR_RUBY = Options.options.rubyarchdir
else:
self.env.ARCHDIR_RUBY = read_config('sitearchdir')[0]

if Options.options.rubylibdir:
self.env.LIBDIR_RUBY = Options.options.rubylibdir
else:
self.env.LIBDIR_RUBY = read_config('sitelibdir')[0]

@conf
def check_ruby_module(self, module_name):
"""
Check if the selected ruby interpreter can require the given ruby module::

def configure(conf):
conf.check_ruby_module('libxml')

:param module_name: module
:type module_name: string
"""
self.start_msg('Ruby module %s' % module_name)
try:
self.cmd_and_log(self.env.RUBY + ['-e', 'require \'%s\';puts 1' % module_name])
except Errors.WafError:
self.end_msg(False)
self.fatal('Could not find the ruby module %r' % module_name)
self.end_msg(True)

@extension('.rb')
def process(self, node):
return self.create_task('run_ruby', node)

class run_ruby(Task.Task):
"""
Task to run ruby files detected by file extension .rb::

def options(opt):
opt.load('ruby')

def configure(ctx):
ctx.check_ruby_version()

def build(bld):
bld.env.RBFLAGS = '-e puts "hello world"'
bld(source='a_ruby_file.rb')
"""
run_str = '${RUBY} ${RBFLAGS} -I ${SRC[0].parent.abspath()} ${SRC}'

def options(opt):
"""
Add the ``--with-ruby-archdir``, ``--with-ruby-libdir`` and ``--with-ruby-binary`` options
"""
opt.add_option('--with-ruby-archdir', type='string', dest='rubyarchdir', help='Specify directory where to install arch specific files')
opt.add_option('--with-ruby-libdir', type='string', dest='rubylibdir', help='Specify alternate ruby library path')
opt.add_option('--with-ruby-binary', type='string', dest='rubybinary', help='Specify alternate ruby binary')


+ 27
- 29
waflib/Tools/suncc.py View File

@@ -1,27 +1,26 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)
# Thomas Nagy, 2006-2018 (ita)
# Ralf Habacker, 2006 (rh)

from waflib import Errors
from waflib.Tools import ccroot, ar
from waflib.Configure import conf

@conf
def find_scc(conf):
"""
Detect the Sun C compiler
Detects the Sun C compiler
"""
v = conf.env
cc = conf.find_program('cc', var='CC')

try:
conf.cmd_and_log(cc + ['-flags'])
except Exception:
except Errors.WafError:
conf.fatal('%r is not a Sun compiler' % cc)
v.CC_NAME = 'sun'
conf.get_suncc_version(cc)


@conf
def scc_common_flags(conf):
"""
@@ -29,36 +28,34 @@ def scc_common_flags(conf):
"""
v = conf.env

v['CC_SRC_F'] = []
v['CC_TGT_F'] = ['-c', '-o']
v.CC_SRC_F = []
v.CC_TGT_F = ['-c', '-o', '']

# linker
if not v['LINK_CC']: v['LINK_CC'] = v['CC']
v['CCLNK_SRC_F'] = ''
v['CCLNK_TGT_F'] = ['-o']
v['CPPPATH_ST'] = '-I%s'
v['DEFINES_ST'] = '-D%s'
if not v.LINK_CC:
v.LINK_CC = v.CC

v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STLIB_ST'] = '-l%s'
v['STLIBPATH_ST'] = '-L%s'
v.CCLNK_SRC_F = ''
v.CCLNK_TGT_F = ['-o', '']
v.CPPPATH_ST = '-I%s'
v.DEFINES_ST = '-D%s'

v['SONAME_ST'] = '-Wl,-h,%s'
v['SHLIB_MARKER'] = '-Bdynamic'
v['STLIB_MARKER'] = '-Bstatic'
v.LIB_ST = '-l%s' # template for adding libs
v.LIBPATH_ST = '-L%s' # template for adding libpaths
v.STLIB_ST = '-l%s'
v.STLIBPATH_ST = '-L%s'

# program
v['cprogram_PATTERN'] = '%s'
v.SONAME_ST = '-Wl,-h,%s'
v.SHLIB_MARKER = '-Bdynamic'
v.STLIB_MARKER = '-Bstatic'

# shared library
v['CFLAGS_cshlib'] = ['-xcode=pic32', '-DPIC']
v['LINKFLAGS_cshlib'] = ['-G']
v['cshlib_PATTERN'] = 'lib%s.so'
v.cprogram_PATTERN = '%s'

# static lib
v['LINKFLAGS_cstlib'] = ['-Bstatic']
v['cstlib_PATTERN'] = 'lib%s.a'
v.CFLAGS_cshlib = ['-xcode=pic32', '-DPIC']
v.LINKFLAGS_cshlib = ['-G']
v.cshlib_PATTERN = 'lib%s.so'

v.LINKFLAGS_cstlib = ['-Bstatic']
v.cstlib_PATTERN = 'lib%s.a'

def configure(conf):
conf.find_scc()
@@ -67,3 +64,4 @@ def configure(conf):
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()


+ 26
- 27
waflib/Tools/suncxx.py View File

@@ -1,21 +1,22 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)
# Thomas Nagy, 2006-2018 (ita)
# Ralf Habacker, 2006 (rh)

from waflib import Errors
from waflib.Tools import ccroot, ar
from waflib.Configure import conf

@conf
def find_sxx(conf):
"""
Detect the sun C++ compiler
Detects the sun C++ compiler
"""
v = conf.env
cc = conf.find_program(['CC', 'c++'], var='CXX')
try:
conf.cmd_and_log(cc + ['-flags'])
except Exception:
except Errors.WafError:
conf.fatal('%r is not a Sun compiler' % cc)
v.CXX_NAME = 'sun'
conf.get_suncc_version(cc)
@@ -27,36 +28,34 @@ def sxx_common_flags(conf):
"""
v = conf.env

v['CXX_SRC_F'] = []
v['CXX_TGT_F'] = ['-c', '-o']
v.CXX_SRC_F = []
v.CXX_TGT_F = ['-c', '-o', '']

# linker
if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
v['CXXLNK_SRC_F'] = []
v['CXXLNK_TGT_F'] = ['-o']
v['CPPPATH_ST'] = '-I%s'
v['DEFINES_ST'] = '-D%s'
if not v.LINK_CXX:
v.LINK_CXX = v.CXX

v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STLIB_ST'] = '-l%s'
v['STLIBPATH_ST'] = '-L%s'
v.CXXLNK_SRC_F = []
v.CXXLNK_TGT_F = ['-o', '']
v.CPPPATH_ST = '-I%s'
v.DEFINES_ST = '-D%s'

v['SONAME_ST'] = '-Wl,-h,%s'
v['SHLIB_MARKER'] = '-Bdynamic'
v['STLIB_MARKER'] = '-Bstatic'
v.LIB_ST = '-l%s' # template for adding libs
v.LIBPATH_ST = '-L%s' # template for adding libpaths
v.STLIB_ST = '-l%s'
v.STLIBPATH_ST = '-L%s'

# program
v['cxxprogram_PATTERN'] = '%s'
v.SONAME_ST = '-Wl,-h,%s'
v.SHLIB_MARKER = '-Bdynamic'
v.STLIB_MARKER = '-Bstatic'

# shared library
v['CXXFLAGS_cxxshlib'] = ['-xcode=pic32', '-DPIC']
v['LINKFLAGS_cxxshlib'] = ['-G']
v['cxxshlib_PATTERN'] = 'lib%s.so'
v.cxxprogram_PATTERN = '%s'

# static lib
v['LINKFLAGS_cxxstlib'] = ['-Bstatic']
v['cxxstlib_PATTERN'] = 'lib%s.a'
v.CXXFLAGS_cxxshlib = ['-xcode=pic32', '-DPIC']
v.LINKFLAGS_cxxshlib = ['-G']
v.cxxshlib_PATTERN = 'lib%s.so'

v.LINKFLAGS_cxxstlib = ['-Bstatic']
v.cxxstlib_PATTERN = 'lib%s.a'

def configure(conf):
conf.find_sxx()


+ 543
- 0
waflib/Tools/tex.py View File

@@ -0,0 +1,543 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2018 (ita)

"""
TeX/LaTeX/PDFLaTeX/XeLaTeX support

Example::

def configure(conf):
conf.load('tex')
if not conf.env.LATEX:
conf.fatal('The program LaTex is required')

def build(bld):
bld(
features = 'tex',
type = 'latex', # pdflatex or xelatex
source = 'document.ltx', # mandatory, the source
outs = 'ps', # 'pdf' or 'ps pdf'
deps = 'crossreferencing.lst', # to give dependencies directly
prompt = 1, # 0 for the batch mode
)

Notes:

- To configure with a special program, use::

$ PDFLATEX=luatex waf configure

- This tool does not use the target attribute of the task generator
(``bld(target=...)``); the target file name is built from the source
base name and the output type(s)
"""

import os, re
from waflib import Utils, Task, Errors, Logs, Node
from waflib.TaskGen import feature, before_method

re_bibunit = re.compile(r'\\(?P<type>putbib)\[(?P<file>[^\[\]]*)\]',re.M)
def bibunitscan(self):
"""
Parses TeX inputs and try to find the *bibunit* file dependencies

:return: list of bibunit files
:rtype: list of :py:class:`waflib.Node.Node`
"""
node = self.inputs[0]

nodes = []
if not node:
return nodes

code = node.read()
for match in re_bibunit.finditer(code):
path = match.group('file')
if path:
found = None
for k in ('', '.bib'):
# add another loop for the tex include paths?
Logs.debug('tex: trying %s%s', path, k)
fi = node.parent.find_resource(path + k)
if fi:
found = True
nodes.append(fi)
# no break
if not found:
Logs.debug('tex: could not find %s', path)

Logs.debug('tex: found the following bibunit files: %s', nodes)
return nodes

exts_deps_tex = ['', '.ltx', '.tex', '.bib', '.pdf', '.png', '.eps', '.ps', '.sty']
"""List of typical file extensions included in latex files"""

exts_tex = ['.ltx', '.tex']
"""List of typical file extensions that contain latex"""

re_tex = re.compile(r'\\(?P<type>usepackage|RequirePackage|include|bibliography([^\[\]{}]*)|putbib|includegraphics|input|import|bringin|lstinputlisting)(\[[^\[\]]*\])?{(?P<file>[^{}]*)}',re.M)
"""Regexp for expressions that may include latex files"""

g_bibtex_re = re.compile('bibdata', re.M)
"""Regexp for bibtex files"""

g_glossaries_re = re.compile('\\@newglossary', re.M)
"""Regexp for expressions that create glossaries"""

class tex(Task.Task):
"""
Compiles a tex/latex file.

.. inheritance-diagram:: waflib.Tools.tex.latex waflib.Tools.tex.xelatex waflib.Tools.tex.pdflatex
"""

bibtex_fun, _ = Task.compile_fun('${BIBTEX} ${BIBTEXFLAGS} ${SRCFILE}', shell=False)
bibtex_fun.__doc__ = """
Execute the program **bibtex**
"""

makeindex_fun, _ = Task.compile_fun('${MAKEINDEX} ${MAKEINDEXFLAGS} ${SRCFILE}', shell=False)
makeindex_fun.__doc__ = """
Execute the program **makeindex**
"""

makeglossaries_fun, _ = Task.compile_fun('${MAKEGLOSSARIES} ${SRCFILE}', shell=False)
makeglossaries_fun.__doc__ = """
Execute the program **makeglossaries**
"""

def exec_command(self, cmd, **kw):
"""
Executes TeX commands without buffering (latex may prompt for inputs)

:return: the return code
:rtype: int
"""
if self.env.PROMPT_LATEX:
# capture the outputs in configuration tests
kw['stdout'] = kw['stderr'] = None
return super(tex, self).exec_command(cmd, **kw)

def scan_aux(self, node):
"""
Recursive regex-based scanner that finds included auxiliary files.
"""
nodes = [node]
re_aux = re.compile(r'\\@input{(?P<file>[^{}]*)}', re.M)

def parse_node(node):
code = node.read()
for match in re_aux.finditer(code):
path = match.group('file')
found = node.parent.find_or_declare(path)
if found and found not in nodes:
Logs.debug('tex: found aux node %r', found)
nodes.append(found)
parse_node(found)
parse_node(node)
return nodes

def scan(self):
"""
Recursive regex-based scanner that finds latex dependencies. It uses :py:attr:`waflib.Tools.tex.re_tex`

Depending on your needs you might want:

* to change re_tex::

from waflib.Tools import tex
tex.re_tex = myregex

* or to change the method scan from the latex tasks::

from waflib.Task import classes
classes['latex'].scan = myscanfunction
"""
node = self.inputs[0]

nodes = []
names = []
seen = []
if not node:
return (nodes, names)

def parse_node(node):
if node in seen:
return
seen.append(node)
code = node.read()
for match in re_tex.finditer(code):

multibib = match.group('type')
if multibib and multibib.startswith('bibliography'):
multibib = multibib[len('bibliography'):]
if multibib.startswith('style'):
continue
else:
multibib = None

for path in match.group('file').split(','):
if path:
add_name = True
found = None
for k in exts_deps_tex:

# issue 1067, scan in all texinputs folders
for up in self.texinputs_nodes:
Logs.debug('tex: trying %s%s', path, k)
found = up.find_resource(path + k)
if found:
break


for tsk in self.generator.tasks:
if not found or found in tsk.outputs:
break
else:
nodes.append(found)
add_name = False
for ext in exts_tex:
if found.name.endswith(ext):
parse_node(found)
break

# multibib stuff
if found and multibib and found.name.endswith('.bib'):
try:
self.multibibs.append(found)
except AttributeError:
self.multibibs = [found]

# no break, people are crazy
if add_name:
names.append(path)
parse_node(node)

for x in nodes:
x.parent.get_bld().mkdir()

Logs.debug("tex: found the following : %s and names %s", nodes, names)
return (nodes, names)

def check_status(self, msg, retcode):
"""
Checks an exit status and raise an error with a particular message

:param msg: message to display if the code is non-zero
:type msg: string
:param retcode: condition
:type retcode: boolean
"""
if retcode != 0:
raise Errors.WafError('%r command exit status %r' % (msg, retcode))

def info(self, *k, **kw):
try:
info = self.generator.bld.conf.logger.info
except AttributeError:
info = Logs.info
info(*k, **kw)

def bibfile(self):
"""
Parses *.aux* files to find bibfiles to process.
If present, execute :py:meth:`waflib.Tools.tex.tex.bibtex_fun`
"""
for aux_node in self.aux_nodes:
try:
ct = aux_node.read()
except EnvironmentError:
Logs.error('Error reading %s: %r', aux_node.abspath())
continue

if g_bibtex_re.findall(ct):
self.info('calling bibtex')

self.env.env = {}
self.env.env.update(os.environ)
self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()})
self.env.SRCFILE = aux_node.name[:-4]
self.check_status('error when calling bibtex', self.bibtex_fun())

for node in getattr(self, 'multibibs', []):
self.env.env = {}
self.env.env.update(os.environ)
self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()})
self.env.SRCFILE = node.name[:-4]
self.check_status('error when calling bibtex', self.bibtex_fun())

def bibunits(self):
"""
Parses *.aux* file to find bibunit files. If there are bibunit files,
runs :py:meth:`waflib.Tools.tex.tex.bibtex_fun`.
"""
try:
bibunits = bibunitscan(self)
except OSError:
Logs.error('error bibunitscan')
else:
if bibunits:
fn = ['bu' + str(i) for i in range(1, len(bibunits) + 1)]
if fn:
self.info('calling bibtex on bibunits')

for f in fn:
self.env.env = {'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()}
self.env.SRCFILE = f
self.check_status('error when calling bibtex', self.bibtex_fun())

def makeindex(self):
"""
Searches the filesystem for *.idx* files to process. If present,
runs :py:meth:`waflib.Tools.tex.tex.makeindex_fun`
"""
self.idx_node = self.inputs[0].change_ext('.idx')
try:
idx_path = self.idx_node.abspath()
os.stat(idx_path)
except OSError:
self.info('index file %s absent, not calling makeindex', idx_path)
else:
self.info('calling makeindex')

self.env.SRCFILE = self.idx_node.name
self.env.env = {}
self.check_status('error when calling makeindex %s' % idx_path, self.makeindex_fun())

def bibtopic(self):
"""
Lists additional .aux files from the bibtopic package
"""
p = self.inputs[0].parent.get_bld()
if os.path.exists(os.path.join(p.abspath(), 'btaux.aux')):
self.aux_nodes += p.ant_glob('*[0-9].aux')

def makeglossaries(self):
"""
Lists additional glossaries from .aux files. If present, runs the makeglossaries program.
"""
src_file = self.inputs[0].abspath()
base_file = os.path.basename(src_file)
base, _ = os.path.splitext(base_file)
for aux_node in self.aux_nodes:
try:
ct = aux_node.read()
except EnvironmentError:
Logs.error('Error reading %s: %r', aux_node.abspath())
continue

if g_glossaries_re.findall(ct):
if not self.env.MAKEGLOSSARIES:
raise Errors.WafError("The program 'makeglossaries' is missing!")
Logs.warn('calling makeglossaries')
self.env.SRCFILE = base
self.check_status('error when calling makeglossaries %s' % base, self.makeglossaries_fun())
return

def texinputs(self):
"""
Returns the list of texinput nodes as a string suitable for the TEXINPUTS environment variables

:rtype: string
"""
return os.pathsep.join([k.abspath() for k in self.texinputs_nodes]) + os.pathsep

def run(self):
"""
Runs the whole TeX build process

Multiple passes are required depending on the usage of cross-references,
bibliographies, glossaries, indexes and additional contents
The appropriate TeX compiler is called until the *.aux* files stop changing.
"""
env = self.env

if not env.PROMPT_LATEX:
env.append_value('LATEXFLAGS', '-interaction=batchmode')
env.append_value('PDFLATEXFLAGS', '-interaction=batchmode')
env.append_value('XELATEXFLAGS', '-interaction=batchmode')

# important, set the cwd for everybody
self.cwd = self.inputs[0].parent.get_bld()

self.info('first pass on %s', self.__class__.__name__)

# Hash .aux files before even calling the LaTeX compiler
cur_hash = self.hash_aux_nodes()

self.call_latex()

# Find the .aux files again since bibtex processing can require it
self.hash_aux_nodes()

self.bibtopic()
self.bibfile()
self.bibunits()
self.makeindex()
self.makeglossaries()

for i in range(10):
# There is no need to call latex again if the .aux hash value has not changed
prev_hash = cur_hash
cur_hash = self.hash_aux_nodes()
if not cur_hash:
Logs.error('No aux.h to process')
if cur_hash and cur_hash == prev_hash:
break

# run the command
self.info('calling %s', self.__class__.__name__)
self.call_latex()

def hash_aux_nodes(self):
"""
Returns a hash of the .aux file contents

:rtype: string or bytes
"""
try:
self.aux_nodes
except AttributeError:
try:
self.aux_nodes = self.scan_aux(self.inputs[0].change_ext('.aux'))
except IOError:
return None
return Utils.h_list([Utils.h_file(x.abspath()) for x in self.aux_nodes])

def call_latex(self):
"""
Runs the TeX compiler once
"""
self.env.env = {}
self.env.env.update(os.environ)
self.env.env.update({'TEXINPUTS': self.texinputs()})
self.env.SRCFILE = self.inputs[0].abspath()
self.check_status('error when calling latex', self.texfun())

class latex(tex):
"Compiles LaTeX files"
texfun, vars = Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}', shell=False)

class pdflatex(tex):
"Compiles PdfLaTeX files"
texfun, vars = Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}', shell=False)

class xelatex(tex):
"XeLaTeX files"
texfun, vars = Task.compile_fun('${XELATEX} ${XELATEXFLAGS} ${SRCFILE}', shell=False)

class dvips(Task.Task):
"Converts dvi files to postscript"
run_str = '${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}'
color = 'BLUE'
after = ['latex', 'pdflatex', 'xelatex']

class dvipdf(Task.Task):
"Converts dvi files to pdf"
run_str = '${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}'
color = 'BLUE'
after = ['latex', 'pdflatex', 'xelatex']

class pdf2ps(Task.Task):
"Converts pdf files to postscript"
run_str = '${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}'
color = 'BLUE'
after = ['latex', 'pdflatex', 'xelatex']

@feature('tex')
@before_method('process_source')
def apply_tex(self):
"""
Creates :py:class:`waflib.Tools.tex.tex` objects, and
dvips/dvipdf/pdf2ps tasks if necessary (outs='ps', etc).
"""
if not getattr(self, 'type', None) in ('latex', 'pdflatex', 'xelatex'):
self.type = 'pdflatex'

outs = Utils.to_list(getattr(self, 'outs', []))

# prompt for incomplete files (else the batchmode is used)
try:
self.generator.bld.conf
except AttributeError:
default_prompt = False
else:
default_prompt = True
self.env.PROMPT_LATEX = getattr(self, 'prompt', default_prompt)

deps_lst = []

if getattr(self, 'deps', None):
deps = self.to_list(self.deps)
for dep in deps:
if isinstance(dep, str):
n = self.path.find_resource(dep)
if not n:
self.bld.fatal('Could not find %r for %r' % (dep, self))
if not n in deps_lst:
deps_lst.append(n)
elif isinstance(dep, Node.Node):
deps_lst.append(dep)

for node in self.to_nodes(self.source):
if self.type == 'latex':
task = self.create_task('latex', node, node.change_ext('.dvi'))
elif self.type == 'pdflatex':
task = self.create_task('pdflatex', node, node.change_ext('.pdf'))
elif self.type == 'xelatex':
task = self.create_task('xelatex', node, node.change_ext('.pdf'))

task.env = self.env

# add the manual dependencies
if deps_lst:
for n in deps_lst:
if not n in task.dep_nodes:
task.dep_nodes.append(n)

# texinputs is a nasty beast
if hasattr(self, 'texinputs_nodes'):
task.texinputs_nodes = self.texinputs_nodes
else:
task.texinputs_nodes = [node.parent, node.parent.get_bld(), self.path, self.path.get_bld()]
lst = os.environ.get('TEXINPUTS', '')
if self.env.TEXINPUTS:
lst += os.pathsep + self.env.TEXINPUTS
if lst:
lst = lst.split(os.pathsep)
for x in lst:
if x:
if os.path.isabs(x):
p = self.bld.root.find_node(x)
if p:
task.texinputs_nodes.append(p)
else:
Logs.error('Invalid TEXINPUTS folder %s', x)
else:
Logs.error('Cannot resolve relative paths in TEXINPUTS %s', x)

if self.type == 'latex':
if 'ps' in outs:
tsk = self.create_task('dvips', task.outputs, node.change_ext('.ps'))
tsk.env.env = dict(os.environ)
if 'pdf' in outs:
tsk = self.create_task('dvipdf', task.outputs, node.change_ext('.pdf'))
tsk.env.env = dict(os.environ)
elif self.type == 'pdflatex':
if 'ps' in outs:
self.create_task('pdf2ps', task.outputs, node.change_ext('.ps'))
self.source = []

def configure(self):
"""
Find the programs tex, latex and others without raising errors.
"""
v = self.env
for p in 'tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps makeglossaries'.split():
try:
self.find_program(p, var=p.upper())
except self.errors.ConfigurationError:
pass
v.DVIPSFLAGS = '-Ppdf'


+ 355
- 0
waflib/Tools/vala.py View File

@@ -0,0 +1,355 @@
#!/usr/bin/env python
# encoding: utf-8
# Ali Sabil, 2007
# Radosław Szkodziński, 2010

"""
At this point, vala is still unstable, so do not expect
this tool to be too stable either (apis, etc)
"""

import re
from waflib import Build, Context, Errors, Logs, Node, Options, Task, Utils
from waflib.TaskGen import extension, taskgen_method
from waflib.Configure import conf

class valac(Task.Task):
"""
Compiles vala files
"""
#run_str = "${VALAC} ${VALAFLAGS}" # ideally
#vars = ['VALAC_VERSION']
vars = ["VALAC", "VALAC_VERSION", "VALAFLAGS"]
ext_out = ['.h']

def run(self):
cmd = self.env.VALAC + self.env.VALAFLAGS
resources = getattr(self, 'vala_exclude', [])
cmd.extend([a.abspath() for a in self.inputs if a not in resources])
ret = self.exec_command(cmd, cwd=self.vala_dir_node.abspath())

if ret:
return ret

if self.generator.dump_deps_node:
self.generator.dump_deps_node.write('\n'.join(self.generator.packages))

return ret

@taskgen_method
def init_vala_task(self):
"""
Initializes the vala task with the relevant data (acts as a constructor)
"""
self.profile = getattr(self, 'profile', 'gobject')

self.packages = packages = Utils.to_list(getattr(self, 'packages', []))
self.use = Utils.to_list(getattr(self, 'use', []))
if packages and not self.use:
self.use = packages[:] # copy

if self.profile == 'gobject':
if not 'GOBJECT' in self.use:
self.use.append('GOBJECT')

def addflags(flags):
self.env.append_value('VALAFLAGS', flags)

if self.profile:
addflags('--profile=%s' % self.profile)

valatask = self.valatask

# output directory
if hasattr(self, 'vala_dir'):
if isinstance(self.vala_dir, str):
valatask.vala_dir_node = self.path.get_bld().make_node(self.vala_dir)
try:
valatask.vala_dir_node.mkdir()
except OSError:
raise self.bld.fatal('Cannot create the vala dir %r' % valatask.vala_dir_node)
else:
valatask.vala_dir_node = self.vala_dir
else:
valatask.vala_dir_node = self.path.get_bld()
addflags('--directory=%s' % valatask.vala_dir_node.abspath())

if hasattr(self, 'thread'):
if self.profile == 'gobject':
if not 'GTHREAD' in self.use:
self.use.append('GTHREAD')
else:
#Vala doesn't have threading support for dova nor posix
Logs.warn('Profile %s means no threading support', self.profile)
self.thread = False

if self.thread:
addflags('--thread')

self.is_lib = 'cprogram' not in self.features
if self.is_lib:
addflags('--library=%s' % self.target)

h_node = valatask.vala_dir_node.find_or_declare('%s.h' % self.target)
valatask.outputs.append(h_node)
addflags('--header=%s' % h_node.name)

valatask.outputs.append(valatask.vala_dir_node.find_or_declare('%s.vapi' % self.target))

if getattr(self, 'gir', None):
gir_node = valatask.vala_dir_node.find_or_declare('%s.gir' % self.gir)
addflags('--gir=%s' % gir_node.name)
valatask.outputs.append(gir_node)

self.vala_target_glib = getattr(self, 'vala_target_glib', getattr(Options.options, 'vala_target_glib', None))
if self.vala_target_glib:
addflags('--target-glib=%s' % self.vala_target_glib)

addflags(['--define=%s' % x for x in Utils.to_list(getattr(self, 'vala_defines', []))])

packages_private = Utils.to_list(getattr(self, 'packages_private', []))
addflags(['--pkg=%s' % x for x in packages_private])

def _get_api_version():
api_version = '1.0'
if hasattr(Context.g_module, 'API_VERSION'):
version = Context.g_module.API_VERSION.split(".")
if version[0] == "0":
api_version = "0." + version[1]
else:
api_version = version[0] + ".0"
return api_version

self.includes = Utils.to_list(getattr(self, 'includes', []))
valatask.install_path = getattr(self, 'install_path', '')

valatask.vapi_path = getattr(self, 'vapi_path', '${DATAROOTDIR}/vala/vapi')
valatask.pkg_name = getattr(self, 'pkg_name', self.env.PACKAGE)
valatask.header_path = getattr(self, 'header_path', '${INCLUDEDIR}/%s-%s' % (valatask.pkg_name, _get_api_version()))
valatask.install_binding = getattr(self, 'install_binding', True)

self.vapi_dirs = vapi_dirs = Utils.to_list(getattr(self, 'vapi_dirs', []))
#includes = []

if hasattr(self, 'use'):
local_packages = Utils.to_list(self.use)[:] # make sure to have a copy
seen = []
while len(local_packages) > 0:
package = local_packages.pop()
if package in seen:
continue
seen.append(package)

# check if the package exists
try:
package_obj = self.bld.get_tgen_by_name(package)
except Errors.WafError:
continue

# in practice the other task is already processed
# but this makes it explicit
package_obj.post()
package_name = package_obj.target
task = getattr(package_obj, 'valatask', None)
if task:
for output in task.outputs:
if output.name == package_name + ".vapi":
valatask.set_run_after(task)
if package_name not in packages:
packages.append(package_name)
if output.parent not in vapi_dirs:
vapi_dirs.append(output.parent)
if output.parent not in self.includes:
self.includes.append(output.parent)

if hasattr(package_obj, 'use'):
lst = self.to_list(package_obj.use)
lst.reverse()
local_packages = [pkg for pkg in lst if pkg not in seen] + local_packages

addflags(['--pkg=%s' % p for p in packages])

for vapi_dir in vapi_dirs:
if isinstance(vapi_dir, Node.Node):
v_node = vapi_dir
else:
v_node = self.path.find_dir(vapi_dir)
if not v_node:
Logs.warn('Unable to locate Vala API directory: %r', vapi_dir)
else:
addflags('--vapidir=%s' % v_node.abspath())

self.dump_deps_node = None
if self.is_lib and self.packages:
self.dump_deps_node = valatask.vala_dir_node.find_or_declare('%s.deps' % self.target)
valatask.outputs.append(self.dump_deps_node)

if self.is_lib and valatask.install_binding:
headers_list = [o for o in valatask.outputs if o.suffix() == ".h"]
if headers_list:
self.install_vheader = self.add_install_files(install_to=valatask.header_path, install_from=headers_list)

vapi_list = [o for o in valatask.outputs if (o.suffix() in (".vapi", ".deps"))]
if vapi_list:
self.install_vapi = self.add_install_files(install_to=valatask.vapi_path, install_from=vapi_list)

gir_list = [o for o in valatask.outputs if o.suffix() == '.gir']
if gir_list:
self.install_gir = self.add_install_files(
install_to=getattr(self, 'gir_path', '${DATAROOTDIR}/gir-1.0'), install_from=gir_list)

if hasattr(self, 'vala_resources'):
nodes = self.to_nodes(self.vala_resources)
valatask.vala_exclude = getattr(valatask, 'vala_exclude', []) + nodes
valatask.inputs.extend(nodes)
for x in nodes:
addflags(['--gresources', x.abspath()])

@extension('.vala', '.gs')
def vala_file(self, node):
"""
Compile a vala file and bind the task to *self.valatask*. If an existing vala task is already set, add the node
to its inputs. The typical example is::

def build(bld):
bld.program(
packages = 'gtk+-2.0',
target = 'vala-gtk-example',
use = 'GTK GLIB',
source = 'vala-gtk-example.vala foo.vala',
vala_defines = ['DEBUG'] # adds --define=<xyz> values to the command-line

# the following arguments are for libraries
#gir = 'hello-1.0',
#gir_path = '/tmp',
#vapi_path = '/tmp',
#pkg_name = 'hello'
# disable installing of gir, vapi and header
#install_binding = False

# profile = 'xyz' # adds --profile=<xyz> to enable profiling
# thread = True, # adds --thread, except if profile is on or not on 'gobject'
# vala_target_glib = 'xyz' # adds --target-glib=<xyz>, can be given through the command-line option --vala-target-glib=<xyz>
)


:param node: vala file
:type node: :py:class:`waflib.Node.Node`
"""

try:
valatask = self.valatask
except AttributeError:
valatask = self.valatask = self.create_task('valac')
self.init_vala_task()

valatask.inputs.append(node)
name = node.name[:node.name.rfind('.')] + '.c'
c_node = valatask.vala_dir_node.find_or_declare(name)
valatask.outputs.append(c_node)
self.source.append(c_node)

@extension('.vapi')
def vapi_file(self, node):
try:
valatask = self.valatask
except AttributeError:
valatask = self.valatask = self.create_task('valac')
self.init_vala_task()
valatask.inputs.append(node)

@conf
def find_valac(self, valac_name, min_version):
"""
Find the valac program, and execute it to store the version
number in *conf.env.VALAC_VERSION*

:param valac_name: program name
:type valac_name: string or list of string
:param min_version: minimum version acceptable
:type min_version: tuple of int
"""
valac = self.find_program(valac_name, var='VALAC')
try:
output = self.cmd_and_log(valac + ['--version'])
except Errors.WafError:
valac_version = None
else:
ver = re.search(r'\d+.\d+.\d+', output).group().split('.')
valac_version = tuple([int(x) for x in ver])

self.msg('Checking for %s version >= %r' % (valac_name, min_version),
valac_version, valac_version and valac_version >= min_version)
if valac and valac_version < min_version:
self.fatal("%s version %r is too old, need >= %r" % (valac_name, valac_version, min_version))

self.env.VALAC_VERSION = valac_version
return valac

@conf
def check_vala(self, min_version=(0,8,0), branch=None):
"""
Check if vala compiler from a given branch exists of at least a given
version.

:param min_version: minimum version acceptable (0.8.0)
:type min_version: tuple
:param branch: first part of the version number, in case a snapshot is used (0, 8)
:type branch: tuple of int
"""
if self.env.VALA_MINVER:
min_version = self.env.VALA_MINVER
if self.env.VALA_MINVER_BRANCH:
branch = self.env.VALA_MINVER_BRANCH
if not branch:
branch = min_version[:2]
try:
find_valac(self, 'valac-%d.%d' % (branch[0], branch[1]), min_version)
except self.errors.ConfigurationError:
find_valac(self, 'valac', min_version)

@conf
def check_vala_deps(self):
"""
Load the gobject and gthread packages if they are missing.
"""
if not self.env.HAVE_GOBJECT:
pkg_args = {'package': 'gobject-2.0',
'uselib_store': 'GOBJECT',
'args': '--cflags --libs'}
if getattr(Options.options, 'vala_target_glib', None):
pkg_args['atleast_version'] = Options.options.vala_target_glib
self.check_cfg(**pkg_args)

if not self.env.HAVE_GTHREAD:
pkg_args = {'package': 'gthread-2.0',
'uselib_store': 'GTHREAD',
'args': '--cflags --libs'}
if getattr(Options.options, 'vala_target_glib', None):
pkg_args['atleast_version'] = Options.options.vala_target_glib
self.check_cfg(**pkg_args)

def configure(self):
"""
Use the following to enforce minimum vala version::

def configure(conf):
conf.env.VALA_MINVER = (0, 10, 0)
conf.load('vala')
"""
self.load('gnu_dirs')
self.check_vala_deps()
self.check_vala()
self.add_os_flags('VALAFLAGS')
self.env.append_unique('VALAFLAGS', ['-C'])

def options(opt):
"""
Load the :py:mod:`waflib.Tools.gnu_dirs` tool and add the ``--vala-target-glib`` command-line option
"""
opt.load('gnu_dirs')
valaopts = opt.add_option_group('Vala Compiler Options')
valaopts.add_option('--vala-target-glib', default=None,
dest='vala_target_glib', metavar='MAJOR.MINOR',
help='Target version of glib for Vala GObject code generation')


+ 153
- 58
waflib/Tools/waf_unit_test.py View File

@@ -1,10 +1,10 @@
#!/usr/bin/env python
# encoding: utf-8
# Carlos Rafael Giani, 2006
# Thomas Nagy, 2010
# Thomas Nagy, 2010-2018 (ita)

"""
Unit testing system for C/C++/D providing test execution:
Unit testing system for C/C++/D and interpreted languages providing test execution:

* in parallel, by using ``waf -j``
* partial (only the tests that have changed) or full (by using ``waf --alltests``)
@@ -31,31 +31,128 @@ the predefined callback::
bld(features='cxx cxxprogram test', source='main.c', target='app')
from waflib.Tools import waf_unit_test
bld.add_post_fun(waf_unit_test.summary)

By passing --dump-test-scripts the build outputs corresponding python files
(with extension _run.py) that are useful for debugging purposes.
"""

import os
import os, shlex, sys
from waflib.TaskGen import feature, after_method, taskgen_method
from waflib import Utils, Task, Logs, Options
from waflib.Tools import ccroot
testlock = Utils.threading.Lock()

SCRIPT_TEMPLATE = """#! %(python)s
import subprocess, sys
cmd = %(cmd)r
# if you want to debug with gdb:
#cmd = ['gdb', '-args'] + cmd
env = %(env)r
status = subprocess.call(cmd, env=env, cwd=%(cwd)r, shell=isinstance(cmd, str))
sys.exit(status)
"""

@taskgen_method
def handle_ut_cwd(self, key):
"""
Task generator method, used internally to limit code duplication.
This method may disappear anytime.
"""
cwd = getattr(self, key, None)
if cwd:
if isinstance(cwd, str):
# we want a Node instance
if os.path.isabs(cwd):
self.ut_cwd = self.bld.root.make_node(cwd)
else:
self.ut_cwd = self.path.make_node(cwd)

@feature('test_scripts')
def make_interpreted_test(self):
"""Create interpreted unit tests."""
for x in ['test_scripts_source', 'test_scripts_template']:
if not hasattr(self, x):
Logs.warn('a test_scripts taskgen i missing %s' % x)
return

self.ut_run, lst = Task.compile_fun(self.test_scripts_template, shell=getattr(self, 'test_scripts_shell', False))

script_nodes = self.to_nodes(self.test_scripts_source)
for script_node in script_nodes:
tsk = self.create_task('utest', [script_node])
tsk.vars = lst + tsk.vars
tsk.env['SCRIPT'] = script_node.path_from(tsk.get_cwd())

self.handle_ut_cwd('test_scripts_cwd')

env = getattr(self, 'test_scripts_env', None)
if env:
self.ut_env = env
else:
self.ut_env = dict(os.environ)

paths = getattr(self, 'test_scripts_paths', {})
for (k,v) in paths.items():
p = self.ut_env.get(k, '').split(os.pathsep)
if isinstance(v, str):
v = v.split(os.pathsep)
self.ut_env[k] = os.pathsep.join(p + v)

@feature('test')
@after_method('apply_link')
@after_method('apply_link', 'process_use')
def make_test(self):
"""Create the unit test task. There can be only one unit test task by task generator."""
if getattr(self, 'link_task', None):
self.create_task('utest', self.link_task.outputs)

if not getattr(self, 'link_task', None):
return

tsk = self.create_task('utest', self.link_task.outputs)
if getattr(self, 'ut_str', None):
self.ut_run, lst = Task.compile_fun(self.ut_str, shell=getattr(self, 'ut_shell', False))
tsk.vars = lst + tsk.vars

self.handle_ut_cwd('ut_cwd')

if not hasattr(self, 'ut_paths'):
paths = []
for x in self.tmp_use_sorted:
try:
y = self.bld.get_tgen_by_name(x).link_task
except AttributeError:
pass
else:
if not isinstance(y, ccroot.stlink_task):
paths.append(y.outputs[0].parent.abspath())
self.ut_paths = os.pathsep.join(paths) + os.pathsep

if not hasattr(self, 'ut_env'):
self.ut_env = dct = dict(os.environ)
def add_path(var):
dct[var] = self.ut_paths + dct.get(var,'')
if Utils.is_win32:
add_path('PATH')
elif Utils.unversioned_sys_platform() == 'darwin':
add_path('DYLD_LIBRARY_PATH')
add_path('LD_LIBRARY_PATH')
else:
add_path('LD_LIBRARY_PATH')

if not hasattr(self, 'ut_cmd'):
self.ut_cmd = getattr(Options.options, 'testcmd', False)

@taskgen_method
def add_test_results(self, tup):
"""Override and return tup[1] to interrupt the build immediately if a test does not run"""
Logs.debug("ut: %r", tup)
self.utest_result = tup
try:
self.utest_results.append(tup)
except AttributeError:
self.utest_results = [tup]
try:
self.bld.utest_results.append(tup)
except AttributeError:
self.bld.utest_results = [tup]

@Task.deep_inputs
class utest(Task.Task):
"""
Execute a unit test
@@ -63,6 +160,7 @@ class utest(Task.Task):
color = 'PINK'
after = ['vnum', 'inst']
vars = []

def runnable_status(self):
"""
Always execute the task if `waf --alltests` was used or no
@@ -77,37 +175,17 @@ class utest(Task.Task):
return Task.RUN_ME
return ret

def add_path(self, dct, path, var):
dct[var] = os.pathsep.join(Utils.to_list(path) + [os.environ.get(var, '')])

def get_test_env(self):
"""
In general, tests may require any library built anywhere in the project.
Override this method if fewer paths are needed
"""
try:
fu = getattr(self.generator.bld, 'all_test_paths')
except AttributeError:
# this operation may be performed by at most #maxjobs
fu = os.environ.copy()

lst = []
for g in self.generator.bld.groups:
for tg in g:
if getattr(tg, 'link_task', None):
s = tg.link_task.outputs[0].parent.abspath()
if s not in lst:
lst.append(s)

if Utils.is_win32:
self.add_path(fu, lst, 'PATH')
elif Utils.unversioned_sys_platform() == 'darwin':
self.add_path(fu, lst, 'DYLD_LIBRARY_PATH')
self.add_path(fu, lst, 'LD_LIBRARY_PATH')
else:
self.add_path(fu, lst, 'LD_LIBRARY_PATH')
self.generator.bld.all_test_paths = fu
return fu
return self.generator.ut_env

def post_run(self):
super(utest, self).post_run()
if getattr(Options.options, 'clear_failed_tests', False) and self.waf_unit_test_results[1]:
self.generator.bld.task_sigs[self.uid()] = None

def run(self):
"""
@@ -116,29 +194,44 @@ class utest(Task.Task):

Override ``add_test_results`` to interrupt the build
"""

filename = self.inputs[0].abspath()
self.ut_exec = getattr(self.generator, 'ut_exec', [filename])
if getattr(self.generator, 'ut_fun', None):
self.generator.ut_fun(self)


cwd = getattr(self.generator, 'ut_cwd', '') or self.inputs[0].parent.abspath()

testcmd = getattr(self.generator, 'ut_cmd', False) or getattr(Options.options, 'testcmd', False)
if testcmd:
self.ut_exec = (testcmd % self.ut_exec[0]).split(' ')

proc = Utils.subprocess.Popen(self.ut_exec, cwd=cwd, env=self.get_test_env(), stderr=Utils.subprocess.PIPE, stdout=Utils.subprocess.PIPE)
if hasattr(self.generator, 'ut_run'):
return self.generator.ut_run(self)

self.ut_exec = getattr(self.generator, 'ut_exec', [self.inputs[0].abspath()])
ut_cmd = getattr(self.generator, 'ut_cmd', False)
if ut_cmd:
self.ut_exec = shlex.split(ut_cmd % ' '.join(self.ut_exec))

return self.exec_command(self.ut_exec)

def exec_command(self, cmd, **kw):
Logs.debug('runner: %r', cmd)
if getattr(Options.options, 'dump_test_scripts', False):
script_code = SCRIPT_TEMPLATE % {
'python': sys.executable,
'env': self.get_test_env(),
'cwd': self.get_cwd().abspath(),
'cmd': cmd
}
script_file = self.inputs[0].abspath() + '_run.py'
Utils.writef(script_file, script_code)
os.chmod(script_file, Utils.O755)
if Logs.verbose > 1:
Logs.info('Test debug file written as %r' % script_file)

proc = Utils.subprocess.Popen(cmd, cwd=self.get_cwd().abspath(), env=self.get_test_env(),
stderr=Utils.subprocess.PIPE, stdout=Utils.subprocess.PIPE, shell=isinstance(cmd,str))
(stdout, stderr) = proc.communicate()

tup = (filename, proc.returncode, stdout, stderr)
self.waf_unit_test_results = tup = (self.inputs[0].abspath(), proc.returncode, stdout, stderr)
testlock.acquire()
try:
return self.generator.add_test_results(tup)
finally:
testlock.release()

def get_cwd(self):
return getattr(self.generator, 'ut_cwd', self.inputs[0].parent)

def summary(bld):
"""
Display an execution summary::
@@ -155,15 +248,15 @@ def summary(bld):
total = len(lst)
tfail = len([x for x in lst if x[1]])

Logs.pprint('CYAN', ' tests that pass %d/%d' % (total-tfail, total))
Logs.pprint('GREEN', ' tests that pass %d/%d' % (total-tfail, total))
for (f, code, out, err) in lst:
if not code:
Logs.pprint('CYAN', ' %s' % f)
Logs.pprint('GREEN', ' %s' % f)

Logs.pprint('CYAN', ' tests that fail %d/%d' % (tfail, total))
Logs.pprint('GREEN' if tfail == 0 else 'RED', ' tests that fail %d/%d' % (tfail, total))
for (f, code, out, err) in lst:
if code:
Logs.pprint('CYAN', ' %s' % f)
Logs.pprint('RED', ' %s' % f)

def set_exit_code(bld):
"""
@@ -194,8 +287,10 @@ def options(opt):
"""
opt.add_option('--notests', action='store_true', default=False, help='Exec no unit tests', dest='no_tests')
opt.add_option('--alltests', action='store_true', default=False, help='Exec all unit tests', dest='all_tests')
opt.add_option('--testcmd', action='store', default=False,
help = 'Run the unit tests using the test-cmd string'
' example "--test-cmd="valgrind --error-exitcode=1'
' %s" to run under valgrind', dest='testcmd')
opt.add_option('--clear-failed', action='store_true', default=False,
help='Force failed unit tests to run again next time', dest='clear_failed_tests')
opt.add_option('--testcmd', action='store', default=False, dest='testcmd',
help='Run the unit tests using the test-cmd string example "--testcmd="valgrind --error-exitcode=1 %s" to run under valgrind')
opt.add_option('--dump-test-scripts', action='store_true', default=False,
help='Create python scripts to help debug tests', dest='dump_test_scripts')


+ 78
- 0
waflib/Tools/winres.py View File

@@ -0,0 +1,78 @@
#!/usr/bin/env python
# encoding: utf-8
# Brant Young, 2007

"Process *.rc* files for C/C++: X{.rc -> [.res|.rc.o]}"

import re
from waflib import Task
from waflib.TaskGen import extension
from waflib.Tools import c_preproc

@extension('.rc')
def rc_file(self, node):
"""
Binds the .rc extension to a winrc task
"""
obj_ext = '.rc.o'
if self.env.WINRC_TGT_F == '/fo':
obj_ext = '.res'
rctask = self.create_task('winrc', node, node.change_ext(obj_ext))
try:
self.compiled_tasks.append(rctask)
except AttributeError:
self.compiled_tasks = [rctask]

re_lines = re.compile(
'(?:^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*?)\s*$)|'\
'(?:^\w+[ \t]*(ICON|BITMAP|CURSOR|HTML|FONT|MESSAGETABLE|TYPELIB|REGISTRY|D3DFX)[ \t]*(.*?)\s*$)',
re.IGNORECASE | re.MULTILINE)

class rc_parser(c_preproc.c_parser):
"""
Calculates dependencies in .rc files
"""
def filter_comments(self, node):
"""
Overrides :py:meth:`waflib.Tools.c_preproc.c_parser.filter_comments`
"""
code = node.read()
if c_preproc.use_trigraphs:
for (a, b) in c_preproc.trig_def:
code = code.split(a).join(b)
code = c_preproc.re_nl.sub('', code)
code = c_preproc.re_cpp.sub(c_preproc.repl, code)
ret = []
for m in re.finditer(re_lines, code):
if m.group(2):
ret.append((m.group(2), m.group(3)))
else:
ret.append(('include', m.group(5)))
return ret

class winrc(Task.Task):
"""
Compiles resource files
"""
run_str = '${WINRC} ${WINRCFLAGS} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${WINRC_TGT_F} ${TGT} ${WINRC_SRC_F} ${SRC}'
color = 'BLUE'
def scan(self):
tmp = rc_parser(self.generator.includes_nodes)
tmp.start(self.inputs[0], self.env)
return (tmp.nodes, tmp.names)

def configure(conf):
"""
Detects the programs RC or windres, depending on the C/C++ compiler in use
"""
v = conf.env
if not v.WINRC:
if v.CC_NAME == 'msvc':
conf.find_program('RC', var='WINRC', path_list=v.PATH)
v.WINRC_TGT_F = '/fo'
v.WINRC_SRC_F = ''
else:
conf.find_program('windres', var='WINRC', path_list=v.PATH)
v.WINRC_TGT_F = '-o'
v.WINRC_SRC_F = '-i'


+ 26
- 28
waflib/Tools/xlc.py View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)
# Thomas Nagy, 2006-2018 (ita)
# Ralf Habacker, 2006 (rh)
# Yinon Ehrlich, 2009
# Michael Kuhn, 2009
@@ -11,7 +11,7 @@ from waflib.Configure import conf
@conf
def find_xlc(conf):
"""
Detect the Aix C compiler
Detects the Aix C compiler
"""
cc = conf.find_program(['xlc_r', 'xlc'], var='CC')
conf.get_xlc_version(cc)
@@ -24,38 +24,36 @@ def xlc_common_flags(conf):
"""
v = conf.env

v['CC_SRC_F'] = []
v['CC_TGT_F'] = ['-c', '-o']
v.CC_SRC_F = []
v.CC_TGT_F = ['-c', '-o']

# linker
if not v['LINK_CC']: v['LINK_CC'] = v['CC']
v['CCLNK_SRC_F'] = []
v['CCLNK_TGT_F'] = ['-o']
v['CPPPATH_ST'] = '-I%s'
v['DEFINES_ST'] = '-D%s'
if not v.LINK_CC:
v.LINK_CC = v.CC

v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STLIB_ST'] = '-l%s'
v['STLIBPATH_ST'] = '-L%s'
v['RPATH_ST'] = '-Wl,-rpath,%s'
v.CCLNK_SRC_F = []
v.CCLNK_TGT_F = ['-o']
v.CPPPATH_ST = '-I%s'
v.DEFINES_ST = '-D%s'

v['SONAME_ST'] = []
v['SHLIB_MARKER'] = []
v['STLIB_MARKER'] = []
v.LIB_ST = '-l%s' # template for adding libs
v.LIBPATH_ST = '-L%s' # template for adding libpaths
v.STLIB_ST = '-l%s'
v.STLIBPATH_ST = '-L%s'
v.RPATH_ST = '-Wl,-rpath,%s'

# program
v['LINKFLAGS_cprogram'] = ['-Wl,-brtl']
v['cprogram_PATTERN'] = '%s'
v.SONAME_ST = []
v.SHLIB_MARKER = []
v.STLIB_MARKER = []

# shared library
v['CFLAGS_cshlib'] = ['-fPIC']
v['LINKFLAGS_cshlib'] = ['-G', '-Wl,-brtl,-bexpfull']
v['cshlib_PATTERN'] = 'lib%s.so'
v.LINKFLAGS_cprogram = ['-Wl,-brtl']
v.cprogram_PATTERN = '%s'

# static lib
v['LINKFLAGS_cstlib'] = []
v['cstlib_PATTERN'] = 'lib%s.a'
v.CFLAGS_cshlib = ['-fPIC']
v.LINKFLAGS_cshlib = ['-G', '-Wl,-brtl,-bexpfull']
v.cshlib_PATTERN = 'lib%s.so'

v.LINKFLAGS_cstlib = []
v.cstlib_PATTERN = 'lib%s.a'

def configure(conf):
conf.find_xlc()


+ 26
- 28
waflib/Tools/xlcxx.py View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)
# Thomas Nagy, 2006-2018 (ita)
# Ralf Habacker, 2006 (rh)
# Yinon Ehrlich, 2009
# Michael Kuhn, 2009
@@ -11,7 +11,7 @@ from waflib.Configure import conf
@conf
def find_xlcxx(conf):
"""
Detect the Aix C++ compiler
Detects the Aix C++ compiler
"""
cxx = conf.find_program(['xlc++_r', 'xlc++'], var='CXX')
conf.get_xlc_version(cxx)
@@ -24,38 +24,36 @@ def xlcxx_common_flags(conf):
"""
v = conf.env

v['CXX_SRC_F'] = []
v['CXX_TGT_F'] = ['-c', '-o']
v.CXX_SRC_F = []
v.CXX_TGT_F = ['-c', '-o']

# linker
if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
v['CXXLNK_SRC_F'] = []
v['CXXLNK_TGT_F'] = ['-o']
v['CPPPATH_ST'] = '-I%s'
v['DEFINES_ST'] = '-D%s'
if not v.LINK_CXX:
v.LINK_CXX = v.CXX

v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STLIB_ST'] = '-l%s'
v['STLIBPATH_ST'] = '-L%s'
v['RPATH_ST'] = '-Wl,-rpath,%s'
v.CXXLNK_SRC_F = []
v.CXXLNK_TGT_F = ['-o']
v.CPPPATH_ST = '-I%s'
v.DEFINES_ST = '-D%s'

v['SONAME_ST'] = []
v['SHLIB_MARKER'] = []
v['STLIB_MARKER'] = []
v.LIB_ST = '-l%s' # template for adding libs
v.LIBPATH_ST = '-L%s' # template for adding libpaths
v.STLIB_ST = '-l%s'
v.STLIBPATH_ST = '-L%s'
v.RPATH_ST = '-Wl,-rpath,%s'

# program
v['LINKFLAGS_cxxprogram']= ['-Wl,-brtl']
v['cxxprogram_PATTERN'] = '%s'
v.SONAME_ST = []
v.SHLIB_MARKER = []
v.STLIB_MARKER = []

# shared library
v['CXXFLAGS_cxxshlib'] = ['-fPIC']
v['LINKFLAGS_cxxshlib'] = ['-G', '-Wl,-brtl,-bexpfull']
v['cxxshlib_PATTERN'] = 'lib%s.so'
v.LINKFLAGS_cxxprogram= ['-Wl,-brtl']
v.cxxprogram_PATTERN = '%s'

# static lib
v['LINKFLAGS_cxxstlib'] = []
v['cxxstlib_PATTERN'] = 'lib%s.a'
v.CXXFLAGS_cxxshlib = ['-fPIC']
v.LINKFLAGS_cxxshlib = ['-G', '-Wl,-brtl,-bexpfull']
v.cxxshlib_PATTERN = 'lib%s.so'

v.LINKFLAGS_cxxstlib = []
v.cxxstlib_PATTERN = 'lib%s.a'

def configure(conf):
conf.find_xlcxx()


+ 448
- 204
waflib/Utils.py
File diff suppressed because it is too large
View File


+ 1
- 1
waflib/__init__.py View File

@@ -1,3 +1,3 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2010 (ita)
# Thomas Nagy, 2005-2018 (ita)

+ 3
- 3
waflib/ansiterm.py View File

@@ -120,7 +120,7 @@ else:
def clear_line(self, param):
mode = param and int(param) or 0
sbinfo = self.screen_buffer_info()
if mode == 1: # Clear from begining of line to cursor position
if mode == 1: # Clear from beginning of line to cursor position
line_start = COORD(0, sbinfo.CursorPosition.Y)
line_length = sbinfo.Size.X
elif mode == 2: # Clear entire line
@@ -136,7 +136,7 @@ else:
def clear_screen(self, param):
mode = to_int(param, 0)
sbinfo = self.screen_buffer_info()
if mode == 1: # Clear from begining of screen to cursor position
if mode == 1: # Clear from beginning of screen to cursor position
clear_start = COORD(0, 0)
clear_length = sbinfo.CursorPosition.X * sbinfo.CursorPosition.Y
elif mode == 2: # Clear entire screen and return cursor to home
@@ -320,7 +320,7 @@ else:
sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
def get_term_cols():
windll.kernel32.GetConsoleScreenBufferInfo(console, byref(sbinfo))
# TODO Issue 1401
# Issue 1401 - the progress bar cannot reach the last character
return sbinfo.Size.X - 1

# just try and see


+ 18
- 14
waflib/extras/batched_cc.py View File

@@ -3,21 +3,22 @@
# Thomas Nagy, 2006-2015 (ita)

"""
Build as batches.

Instead of compiling object files one by one, c/c++ compilers are often able to compile at once:
cc -c ../file1.c ../file2.c ../file3.c

Files are output on the directory where the compiler is called, and dependencies are more difficult
to track (do not run the command on all source files if only one file changes)

As such, we do as if the files were compiled one by one, but no command is actually run:
replace each cc/cpp Task by a TaskSlave. A new task called TaskMaster collects the
signatures from each slave and finds out the command-line to run.

Just import this module in the configuration (no other change required).
This is provided as an example, for performance unity builds are recommended (fewer tasks and
fewer jobs to execute). See waflib/extras/unity.py.
Just import this module to start using it:
def build(bld):
bld.load('batched_cc')

Note that this is provided as an example, unity builds are recommended
for best performance results (fewer tasks and fewer jobs to execute).
See waflib/extras/unity.py.
"""

from waflib import Task, Utils
@@ -26,24 +27,21 @@ from waflib.Tools import c, cxx

MAX_BATCH = 50

c_str = '${CC} ${CFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED}'
c_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${tsk.batch_incpaths()} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED} ${CPPFLAGS}'
c_fun, _ = Task.compile_fun_noshell(c_str)

cxx_str = '${CXX} ${CXXFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED}'
cxx_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${tsk.batch_incpaths()} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED} ${CPPFLAGS}'
cxx_fun, _ = Task.compile_fun_noshell(cxx_str)

count = 70000
class batch_task(Task.Task):
class batch(Task.Task):
color = 'PINK'

after = ['c', 'cxx']
before = ['cprogram', 'cshlib', 'cstlib', 'cxxprogram', 'cxxshlib', 'cxxstlib']

def uid(self):
m = Utils.md5()
m.update(Task.Task.uid(self))
m.update(str(self.generator.idx).encode())
return m.digest()
return Utils.h_list([Task.Task.uid(self), self.generator.idx, self.generator.path.abspath(), self.generator.target])

def __str__(self):
return 'Batch compilation for %d slaves' % len(self.slaves)
@@ -74,6 +72,13 @@ class batch_task(Task.Task):

return Task.SKIP_ME

def get_cwd(self):
return self.slaves[0].outputs[0].parent

def batch_incpaths(self):
st = self.env.CPPPATH_ST
return [st % node.abspath() for node in self.generator.includes_nodes]

def run(self):
self.outputs = []

@@ -85,7 +90,6 @@ class batch_task(Task.Task):
srclst.append(t.inputs[0].abspath())

self.env.SRCLST = srclst
self.cwd = slaves[0].outputs[0].parent.abspath()

if self.slaves[0].__class__.__name__ == 'c':
ret = c_fun(self)


+ 58
- 0
waflib/extras/biber.py View File

@@ -0,0 +1,58 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2011 (ita)

"""
Latex processing using "biber"
"""

import os
from waflib import Task, Logs

from waflib.Tools import tex as texmodule

class tex(texmodule.tex):
biber_fun, _ = Task.compile_fun('${BIBER} ${BIBERFLAGS} ${SRCFILE}',shell=False)
biber_fun.__doc__ = """
Execute the program **biber**
"""

def bibfile(self):
return None

def bibunits(self):
self.env.env = {}
self.env.env.update(os.environ)
self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()})
self.env.SRCFILE = self.aux_nodes[0].name[:-4]

if not self.env['PROMPT_LATEX']:
self.env.append_unique('BIBERFLAGS', '--quiet')

path = self.aux_nodes[0].abspath()[:-4] + '.bcf'
if os.path.isfile(path):
Logs.warn('calling biber')
self.check_status('error when calling biber, check %s.blg for errors' % (self.env.SRCFILE), self.biber_fun())
else:
super(tex, self).bibfile()
super(tex, self).bibunits()

class latex(tex):
texfun, vars = Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}', shell=False)
class pdflatex(tex):
texfun, vars = Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}', shell=False)
class xelatex(tex):
texfun, vars = Task.compile_fun('${XELATEX} ${XELATEXFLAGS} ${SRCFILE}', shell=False)

def configure(self):
"""
Almost the same as in tex.py, but try to detect 'biber'
"""
v = self.env
for p in ' biber tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps'.split():
try:
self.find_program(p, var=p.upper())
except self.errors.ConfigurationError:
pass
v['DVIPSFLAGS'] = '-Ppdf'


+ 128
- 0
waflib/extras/bjam.py View File

@@ -0,0 +1,128 @@
#! /usr/bin/env python
# per rosengren 2011

from os import sep, readlink
from waflib import Logs
from waflib.TaskGen import feature, after_method
from waflib.Task import Task, always_run

def options(opt):
grp = opt.add_option_group('Bjam Options')
grp.add_option('--bjam_src', default=None, help='You can find it in <boost root>/tools/jam/src')
grp.add_option('--bjam_uname', default='linuxx86_64', help='bjam is built in <src>/bin.<uname>/bjam')
grp.add_option('--bjam_config', default=None)
grp.add_option('--bjam_toolset', default=None)

def configure(cnf):
if not cnf.env.BJAM_SRC:
cnf.env.BJAM_SRC = cnf.options.bjam_src
if not cnf.env.BJAM_UNAME:
cnf.env.BJAM_UNAME = cnf.options.bjam_uname
try:
cnf.find_program('bjam', path_list=[
cnf.env.BJAM_SRC + sep + 'bin.' + cnf.env.BJAM_UNAME
])
except Exception:
cnf.env.BJAM = None
if not cnf.env.BJAM_CONFIG:
cnf.env.BJAM_CONFIG = cnf.options.bjam_config
if not cnf.env.BJAM_TOOLSET:
cnf.env.BJAM_TOOLSET = cnf.options.bjam_toolset

@feature('bjam')
@after_method('process_rule')
def process_bjam(self):
if not self.bld.env.BJAM:
self.create_task('bjam_creator')
self.create_task('bjam_build')
self.create_task('bjam_installer')
if getattr(self, 'always', False):
always_run(bjam_creator)
always_run(bjam_build)
always_run(bjam_installer)

class bjam_creator(Task):
ext_out = 'bjam_exe'
vars=['BJAM_SRC', 'BJAM_UNAME']
def run(self):
env = self.env
gen = self.generator
bjam = gen.bld.root.find_dir(env.BJAM_SRC)
if not bjam:
Logs.error('Can not find bjam source')
return -1
bjam_exe_relpath = 'bin.' + env.BJAM_UNAME + '/bjam'
bjam_exe = bjam.find_resource(bjam_exe_relpath)
if bjam_exe:
env.BJAM = bjam_exe.srcpath()
return 0
bjam_cmd = ['./build.sh']
Logs.debug('runner: ' + bjam.srcpath() + '> ' + str(bjam_cmd))
result = self.exec_command(bjam_cmd, cwd=bjam.srcpath())
if not result == 0:
Logs.error('bjam failed')
return -1
bjam_exe = bjam.find_resource(bjam_exe_relpath)
if bjam_exe:
env.BJAM = bjam_exe.srcpath()
return 0
Logs.error('bjam failed')
return -1

class bjam_build(Task):
ext_in = 'bjam_exe'
ext_out = 'install'
vars = ['BJAM_TOOLSET']
def run(self):
env = self.env
gen = self.generator
path = gen.path
bld = gen.bld
if hasattr(gen, 'root'):
build_root = path.find_node(gen.root)
else:
build_root = path
jam = bld.srcnode.find_resource(env.BJAM_CONFIG)
if jam:
Logs.debug('bjam: Using jam configuration from ' + jam.srcpath())
jam_rel = jam.relpath_gen(build_root)
else:
Logs.warn('No build configuration in build_config/user-config.jam. Using default')
jam_rel = None
bjam_exe = bld.srcnode.find_node(env.BJAM)
if not bjam_exe:
Logs.error('env.BJAM is not set')
return -1
bjam_exe_rel = bjam_exe.relpath_gen(build_root)
cmd = ([bjam_exe_rel] +
(['--user-config=' + jam_rel] if jam_rel else []) +
['--stagedir=' + path.get_bld().path_from(build_root)] +
['--debug-configuration'] +
['--with-' + lib for lib in self.generator.target] +
(['toolset=' + env.BJAM_TOOLSET] if env.BJAM_TOOLSET else []) +
['link=' + 'shared'] +
['variant=' + 'release']
)
Logs.debug('runner: ' + build_root.srcpath() + '> ' + str(cmd))
ret = self.exec_command(cmd, cwd=build_root.srcpath())
if ret != 0:
return ret
self.set_outputs(path.get_bld().ant_glob('lib/*') + path.get_bld().ant_glob('bin/*'))
return 0

class bjam_installer(Task):
ext_in = 'install'
def run(self):
gen = self.generator
path = gen.path
for idir, pat in (('${LIBDIR}', 'lib/*'), ('${BINDIR}', 'bin/*')):
files = []
for n in path.get_bld().ant_glob(pat):
try:
t = readlink(n.srcpath())
gen.bld.symlink_as(sep.join([idir, n.name]), t, postpone=False)
except OSError:
files.append(n)
gen.bld.install_files(idir, files, postpone=False)
return 0


+ 108
- 0
waflib/extras/blender.py View File

@@ -0,0 +1,108 @@
#!/usr/bin/env python
# encoding: utf-8
# Michal Proszek, 2014 (poxip)

"""
Detect the version of Blender, path
and install the extension:

def options(opt):
opt.load('blender')
def configure(cnf):
cnf.load('blender')
def build(bld):
bld(name='io_mesh_raw',
feature='blender',
files=['file1.py', 'file2.py']
)
If name variable is empty, files are installed in scripts/addons, otherwise scripts/addons/name
Use ./waf configure --system to set the installation directory to system path
"""
import os
import re
from getpass import getuser

from waflib import Utils
from waflib.TaskGen import feature
from waflib.Configure import conf

def options(opt):
opt.add_option(
'-s', '--system',
dest='directory_system',
default=False,
action='store_true',
help='determines installation directory (default: user)'
)

@conf
def find_blender(ctx):
'''Return version number of blender, if not exist return None'''
blender = ctx.find_program('blender')
output = ctx.cmd_and_log(blender + ['--version'])
m = re.search(r'Blender\s*((\d+(\.|))*)', output)
if not m:
ctx.fatal('Could not retrieve blender version')

try:
blender_version = m.group(1)
except IndexError:
ctx.fatal('Could not retrieve blender version')

ctx.env['BLENDER_VERSION'] = blender_version
return blender

@conf
def configure_paths(ctx):
"""Setup blender paths"""
# Get the username
user = getuser()
_platform = Utils.unversioned_sys_platform()
config_path = {'user': '', 'system': ''}
if _platform.startswith('linux'):
config_path['user'] = '/home/%s/.config/blender/' % user
config_path['system'] = '/usr/share/blender/'
elif _platform == 'darwin':
# MAC OS X
config_path['user'] = \
'/Users/%s/Library/Application Support/Blender/' % user
config_path['system'] = '/Library/Application Support/Blender/'
elif Utils.is_win32:
# Windows
appdata_path = ctx.getenv('APPDATA').replace('\\', '/')
homedrive = ctx.getenv('HOMEDRIVE').replace('\\', '/')

config_path['user'] = '%s/Blender Foundation/Blender/' % appdata_path
config_path['system'] = \
'%sAll Users/AppData/Roaming/Blender Foundation/Blender/' % homedrive
else:
ctx.fatal(
'Unsupported platform. '
'Available platforms: Linux, OSX, MS-Windows.'
)

blender_version = ctx.env['BLENDER_VERSION']

config_path['user'] += blender_version + '/'
config_path['system'] += blender_version + '/'

ctx.env['BLENDER_CONFIG_DIR'] = os.path.abspath(config_path['user'])
if ctx.options.directory_system:
ctx.env['BLENDER_CONFIG_DIR'] = config_path['system']

ctx.env['BLENDER_ADDONS_DIR'] = os.path.join(
ctx.env['BLENDER_CONFIG_DIR'], 'scripts/addons'
)
Utils.check_dir(ctx.env['BLENDER_ADDONS_DIR'])

def configure(ctx):
ctx.find_blender()
ctx.configure_paths()

@feature('blender_list')
def blender(self):
# Two ways to install a blender extension: as a module or just .py files
dest_dir = os.path.join(self.env.BLENDER_ADDONS_DIR, self.get_name())
Utils.check_dir(dest_dir)
self.add_install_files(install_to=dest_dir, install_from=getattr(self, 'files', '.'))


+ 81
- 0
waflib/extras/boo.py View File

@@ -0,0 +1,81 @@
#! /usr/bin/env python
# encoding: utf-8
# Yannick LM 2011

"""
Support for the boo programming language, for example::

bld(features = "boo", # necessary feature
source = "src.boo", # list of boo files
gen = "world.dll", # target
type = "library", # library/exe ("-target:xyz" flag)
name = "world" # necessary if the target is referenced by 'use'
)
"""

from waflib import Task
from waflib.Configure import conf
from waflib.TaskGen import feature, after_method, before_method, extension

@extension('.boo')
def boo_hook(self, node):
# Nothing here yet ...
# TODO filter the non-boo source files in 'apply_booc' and remove this method
pass

@feature('boo')
@before_method('process_source')
def apply_booc(self):
"""Create a booc task """
src_nodes = self.to_nodes(self.source)
out_node = self.path.find_or_declare(self.gen)

self.boo_task = self.create_task('booc', src_nodes, [out_node])

# Set variables used by the 'booc' task
self.boo_task.env.OUT = '-o:%s' % out_node.abspath()

# type is "exe" by default
type = getattr(self, "type", "exe")
self.boo_task.env.BOO_TARGET_TYPE = "-target:%s" % type

@feature('boo')
@after_method('apply_boo')
def use_boo(self):
""""
boo applications honor the **use** keyword::
"""
dep_names = self.to_list(getattr(self, 'use', []))
for dep_name in dep_names:
dep_task_gen = self.bld.get_tgen_by_name(dep_name)
if not dep_task_gen:
continue
dep_task_gen.post()
dep_task = getattr(dep_task_gen, 'boo_task', None)
if not dep_task:
# Try a cs task:
dep_task = getattr(dep_task_gen, 'cs_task', None)
if not dep_task:
# Try a link task:
dep_task = getattr(dep_task, 'link_task', None)
if not dep_task:
# Abort ...
continue
self.boo_task.set_run_after(dep_task) # order
self.boo_task.dep_nodes.extend(dep_task.outputs) # dependency
self.boo_task.env.append_value('BOO_FLAGS', '-reference:%s' % dep_task.outputs[0].abspath())

class booc(Task.Task):
"""Compiles .boo files """
color = 'YELLOW'
run_str = '${BOOC} ${BOO_FLAGS} ${BOO_TARGET_TYPE} ${OUT} ${SRC}'

@conf
def check_booc(self):
self.find_program('booc', 'BOOC')
self.env.BOO_FLAGS = ['-nologo']

def configure(self):
"""Check that booc is available """
self.check_booc()


+ 525
- 0
waflib/extras/boost.py View File

@@ -0,0 +1,525 @@
#!/usr/bin/env python
# encoding: utf-8
#
# partially based on boost.py written by Gernot Vormayr
# written by Ruediger Sonderfeld <ruediger@c-plusplus.de>, 2008
# modified by Bjoern Michaelsen, 2008
# modified by Luca Fossati, 2008
# rewritten for waf 1.5.1, Thomas Nagy, 2008
# rewritten for waf 1.6.2, Sylvain Rouquette, 2011

'''

This is an extra tool, not bundled with the default waf binary.
To add the boost tool to the waf file:
$ ./waf-light --tools=compat15,boost
or, if you have waf >= 1.6.2
$ ./waf update --files=boost

When using this tool, the wscript will look like:

def options(opt):
opt.load('compiler_cxx boost')

def configure(conf):
conf.load('compiler_cxx boost')
conf.check_boost(lib='system filesystem')

def build(bld):
bld(source='main.cpp', target='app', use='BOOST')

Options are generated, in order to specify the location of boost includes/libraries.
The `check_boost` configuration function allows to specify the used boost libraries.
It can also provide default arguments to the --boost-mt command-line arguments.
Everything will be packaged together in a BOOST component that you can use.

When using MSVC, a lot of compilation flags need to match your BOOST build configuration:
- you may have to add /EHsc to your CXXFLAGS or define boost::throw_exception if BOOST_NO_EXCEPTIONS is defined.
Errors: C4530
- boost libraries will try to be smart and use the (pretty but often not useful) auto-linking feature of MSVC
So before calling `conf.check_boost` you might want to disabling by adding
conf.env.DEFINES_BOOST += ['BOOST_ALL_NO_LIB']
Errors:
- boost might also be compiled with /MT, which links the runtime statically.
If you have problems with redefined symbols,
self.env['DEFINES_%s' % var] += ['BOOST_ALL_NO_LIB']
self.env['CXXFLAGS_%s' % var] += ['/MD', '/EHsc']
Passing `--boost-linkage_autodetect` might help ensuring having a correct linkage in some basic cases.

'''

import sys
import re
from waflib import Utils, Logs, Errors
from waflib.Configure import conf
from waflib.TaskGen import feature, after_method

BOOST_LIBS = ['/usr/lib', '/usr/local/lib', '/opt/local/lib', '/sw/lib', '/lib']
BOOST_INCLUDES = ['/usr/include', '/usr/local/include', '/opt/local/include', '/sw/include']
BOOST_VERSION_FILE = 'boost/version.hpp'
BOOST_VERSION_CODE = '''
#include <iostream>
#include <boost/version.hpp>
int main() { std::cout << BOOST_LIB_VERSION << ":" << BOOST_VERSION << std::endl; }
'''

BOOST_ERROR_CODE = '''
#include <boost/system/error_code.hpp>
int main() { boost::system::error_code c; }
'''

PTHREAD_CODE = '''
#include <pthread.h>
static void* f(void*) { return 0; }
int main() {
pthread_t th;
pthread_attr_t attr;
pthread_attr_init(&attr);
pthread_create(&th, &attr, &f, 0);
pthread_join(th, 0);
pthread_cleanup_push(0, 0);
pthread_cleanup_pop(0);
pthread_attr_destroy(&attr);
}
'''

BOOST_THREAD_CODE = '''
#include <boost/thread.hpp>
int main() { boost::thread t; }
'''

BOOST_LOG_CODE = '''
#include <boost/log/trivial.hpp>
#include <boost/log/utility/setup/console.hpp>
#include <boost/log/utility/setup/common_attributes.hpp>
int main() {
using namespace boost::log;
add_common_attributes();
add_console_log(std::clog, keywords::format = "%Message%");
BOOST_LOG_TRIVIAL(debug) << "log is working" << std::endl;
}
'''

# toolsets from {boost_dir}/tools/build/v2/tools/common.jam
PLATFORM = Utils.unversioned_sys_platform()
detect_intel = lambda env: (PLATFORM == 'win32') and 'iw' or 'il'
detect_clang = lambda env: (PLATFORM == 'darwin') and 'clang-darwin' or 'clang'
detect_mingw = lambda env: (re.search('MinGW', env.CXX[0])) and 'mgw' or 'gcc'
BOOST_TOOLSETS = {
'borland': 'bcb',
'clang': detect_clang,
'como': 'como',
'cw': 'cw',
'darwin': 'xgcc',
'edg': 'edg',
'g++': detect_mingw,
'gcc': detect_mingw,
'icpc': detect_intel,
'intel': detect_intel,
'kcc': 'kcc',
'kylix': 'bck',
'mipspro': 'mp',
'mingw': 'mgw',
'msvc': 'vc',
'qcc': 'qcc',
'sun': 'sw',
'sunc++': 'sw',
'tru64cxx': 'tru',
'vacpp': 'xlc'
}


def options(opt):
opt = opt.add_option_group('Boost Options')
opt.add_option('--boost-includes', type='string',
default='', dest='boost_includes',
help='''path to the directory where the boost includes are,
e.g., /path/to/boost_1_55_0/stage/include''')
opt.add_option('--boost-libs', type='string',
default='', dest='boost_libs',
help='''path to the directory where the boost libs are,
e.g., path/to/boost_1_55_0/stage/lib''')
opt.add_option('--boost-mt', action='store_true',
default=False, dest='boost_mt',
help='select multi-threaded libraries')
opt.add_option('--boost-abi', type='string', default='', dest='boost_abi',
help='''select libraries with tags (gd for debug, static is automatically added),
see doc Boost, Getting Started, chapter 6.1''')
opt.add_option('--boost-linkage_autodetect', action="store_true", dest='boost_linkage_autodetect',
help="auto-detect boost linkage options (don't get used to it / might break other stuff)")
opt.add_option('--boost-toolset', type='string',
default='', dest='boost_toolset',
help='force a toolset e.g. msvc, vc90, \
gcc, mingw, mgw45 (default: auto)')
py_version = '%d%d' % (sys.version_info[0], sys.version_info[1])
opt.add_option('--boost-python', type='string',
default=py_version, dest='boost_python',
help='select the lib python with this version \
(default: %s)' % py_version)


@conf
def __boost_get_version_file(self, d):
if not d:
return None
dnode = self.root.find_dir(d)
if dnode:
return dnode.find_node(BOOST_VERSION_FILE)
return None

@conf
def boost_get_version(self, d):
"""silently retrieve the boost version number"""
node = self.__boost_get_version_file(d)
if node:
try:
txt = node.read()
except EnvironmentError:
Logs.error("Could not read the file %r", node.abspath())
else:
re_but1 = re.compile('^#define\\s+BOOST_LIB_VERSION\\s+"(.+)"', re.M)
m1 = re_but1.search(txt)
re_but2 = re.compile('^#define\\s+BOOST_VERSION\\s+(\\d+)', re.M)
m2 = re_but2.search(txt)
if m1 and m2:
return (m1.group(1), m2.group(1))
return self.check_cxx(fragment=BOOST_VERSION_CODE, includes=[d], execute=True, define_ret=True).split(":")

@conf
def boost_get_includes(self, *k, **kw):
includes = k and k[0] or kw.get('includes')
if includes and self.__boost_get_version_file(includes):
return includes
for d in self.environ.get('INCLUDE', '').split(';') + BOOST_INCLUDES:
if self.__boost_get_version_file(d):
return d
if includes:
self.end_msg('headers not found in %s' % includes)
self.fatal('The configuration failed')
else:
self.end_msg('headers not found, please provide a --boost-includes argument (see help)')
self.fatal('The configuration failed')


@conf
def boost_get_toolset(self, cc):
toolset = cc
if not cc:
build_platform = Utils.unversioned_sys_platform()
if build_platform in BOOST_TOOLSETS:
cc = build_platform
else:
cc = self.env.CXX_NAME
if cc in BOOST_TOOLSETS:
toolset = BOOST_TOOLSETS[cc]
return isinstance(toolset, str) and toolset or toolset(self.env)


@conf
def __boost_get_libs_path(self, *k, **kw):
''' return the lib path and all the files in it '''
if 'files' in kw:
return self.root.find_dir('.'), Utils.to_list(kw['files'])
libs = k and k[0] or kw.get('libs')
if libs:
path = self.root.find_dir(libs)
files = path.ant_glob('*boost_*')
if not libs or not files:
for d in self.environ.get('LIB', '').split(';') + BOOST_LIBS:
if not d:
continue
path = self.root.find_dir(d)
if path:
files = path.ant_glob('*boost_*')
if files:
break
path = self.root.find_dir(d + '64')
if path:
files = path.ant_glob('*boost_*')
if files:
break
if not path:
if libs:
self.end_msg('libs not found in %s' % libs)
self.fatal('The configuration failed')
else:
self.end_msg('libs not found, please provide a --boost-libs argument (see help)')
self.fatal('The configuration failed')

self.to_log('Found the boost path in %r with the libraries:' % path)
for x in files:
self.to_log(' %r' % x)
return path, files

@conf
def boost_get_libs(self, *k, **kw):
'''
return the lib path and the required libs
according to the parameters
'''
path, files = self.__boost_get_libs_path(**kw)
files = sorted(files, key=lambda f: (len(f.name), f.name), reverse=True)
toolset = self.boost_get_toolset(kw.get('toolset', ''))
toolset_pat = '(-%s[0-9]{0,3})' % toolset
version = '-%s' % self.env.BOOST_VERSION

def find_lib(re_lib, files):
for file in files:
if re_lib.search(file.name):
self.to_log('Found boost lib %s' % file)
return file
return None

def format_lib_name(name):
if name.startswith('lib') and self.env.CC_NAME != 'msvc':
name = name[3:]
return name[:name.rfind('.')]

def match_libs(lib_names, is_static):
libs = []
lib_names = Utils.to_list(lib_names)
if not lib_names:
return libs
t = []
if kw.get('mt', False):
t.append('-mt')
if kw.get('abi'):
t.append('%s%s' % (is_static and '-s' or '-', kw['abi']))
elif is_static:
t.append('-s')
tags_pat = t and ''.join(t) or ''
ext = is_static and self.env.cxxstlib_PATTERN or self.env.cxxshlib_PATTERN
ext = ext.partition('%s')[2] # remove '%s' or 'lib%s' from PATTERN

for lib in lib_names:
if lib == 'python':
# for instance, with python='27',
# accepts '-py27', '-py2', '27', '-2.7' and '2'
# but will reject '-py3', '-py26', '26' and '3'
tags = '({0})?((-py{2})|(-py{1}(?=[^0-9]))|({2})|(-{1}.{3})|({1}(?=[^0-9]))|(?=[^0-9])(?!-py))'.format(tags_pat, kw['python'][0], kw['python'], kw['python'][1])
else:
tags = tags_pat
# Trying libraries, from most strict match to least one
for pattern in ['boost_%s%s%s%s%s$' % (lib, toolset_pat, tags, version, ext),
'boost_%s%s%s%s$' % (lib, tags, version, ext),
# Give up trying to find the right version
'boost_%s%s%s%s$' % (lib, toolset_pat, tags, ext),
'boost_%s%s%s$' % (lib, tags, ext),
'boost_%s%s$' % (lib, ext),
'boost_%s' % lib]:
self.to_log('Trying pattern %s' % pattern)
file = find_lib(re.compile(pattern), files)
if file:
libs.append(format_lib_name(file.name))
break
else:
self.end_msg('lib %s not found in %s' % (lib, path.abspath()))
self.fatal('The configuration failed')
return libs

return path.abspath(), match_libs(kw.get('lib'), False), match_libs(kw.get('stlib'), True)

@conf
def _check_pthread_flag(self, *k, **kw):
'''
Computes which flags should be added to CXXFLAGS and LINKFLAGS to compile in multi-threading mode

Yes, we *need* to put the -pthread thing in CPPFLAGS because with GCC3,
boost/thread.hpp will trigger a #error if -pthread isn't used:
boost/config/requires_threads.hpp:47:5: #error "Compiler threading support
is not turned on. Please set the correct command line options for
threading: -pthread (Linux), -pthreads (Solaris) or -mthreads (Mingw32)"

Based on _BOOST_PTHREAD_FLAG(): https://github.com/tsuna/boost.m4/blob/master/build-aux/boost.m4
'''

var = kw.get('uselib_store', 'BOOST')

self.start_msg('Checking the flags needed to use pthreads')

# The ordering *is* (sometimes) important. Some notes on the
# individual items follow:
# (none): in case threads are in libc; should be tried before -Kthread and
# other compiler flags to prevent continual compiler warnings
# -lpthreads: AIX (must check this before -lpthread)
# -Kthread: Sequent (threads in libc, but -Kthread needed for pthread.h)
# -kthread: FreeBSD kernel threads (preferred to -pthread since SMP-able)
# -llthread: LinuxThreads port on FreeBSD (also preferred to -pthread)
# -pthread: GNU Linux/GCC (kernel threads), BSD/GCC (userland threads)
# -pthreads: Solaris/GCC
# -mthreads: MinGW32/GCC, Lynx/GCC
# -mt: Sun Workshop C (may only link SunOS threads [-lthread], but it
# doesn't hurt to check since this sometimes defines pthreads too;
# also defines -D_REENTRANT)
# ... -mt is also the pthreads flag for HP/aCC
# -lpthread: GNU Linux, etc.
# --thread-safe: KAI C++
if Utils.unversioned_sys_platform() == "sunos":
# On Solaris (at least, for some versions), libc contains stubbed
# (non-functional) versions of the pthreads routines, so link-based
# tests will erroneously succeed. (We need to link with -pthreads/-mt/
# -lpthread.) (The stubs are missing pthread_cleanup_push, or rather
# a function called by this macro, so we could check for that, but
# who knows whether they'll stub that too in a future libc.) So,
# we'll just look for -pthreads and -lpthread first:
boost_pthread_flags = ["-pthreads", "-lpthread", "-mt", "-pthread"]
else:
boost_pthread_flags = ["", "-lpthreads", "-Kthread", "-kthread", "-llthread", "-pthread",
"-pthreads", "-mthreads", "-lpthread", "--thread-safe", "-mt"]

for boost_pthread_flag in boost_pthread_flags:
try:
self.env.stash()
self.env.append_value('CXXFLAGS_%s' % var, boost_pthread_flag)
self.env.append_value('LINKFLAGS_%s' % var, boost_pthread_flag)
self.check_cxx(code=PTHREAD_CODE, msg=None, use=var, execute=False)

self.end_msg(boost_pthread_flag)
return
except self.errors.ConfigurationError:
self.env.revert()
self.end_msg('None')

@conf
def check_boost(self, *k, **kw):
"""
Initialize boost libraries to be used.

Keywords: you can pass the same parameters as with the command line (without "--boost-").
Note that the command line has the priority, and should preferably be used.
"""
if not self.env['CXX']:
self.fatal('load a c++ compiler first, conf.load("compiler_cxx")')

params = {
'lib': k and k[0] or kw.get('lib'),
'stlib': kw.get('stlib')
}
for key, value in self.options.__dict__.items():
if not key.startswith('boost_'):
continue
key = key[len('boost_'):]
params[key] = value and value or kw.get(key, '')

var = kw.get('uselib_store', 'BOOST')

self.find_program('dpkg-architecture', var='DPKG_ARCHITECTURE', mandatory=False)
if self.env.DPKG_ARCHITECTURE:
deb_host_multiarch = self.cmd_and_log([self.env.DPKG_ARCHITECTURE[0], '-qDEB_HOST_MULTIARCH'])
BOOST_LIBS.insert(0, '/usr/lib/%s' % deb_host_multiarch.strip())

self.start_msg('Checking boost includes')
self.env['INCLUDES_%s' % var] = inc = self.boost_get_includes(**params)
versions = self.boost_get_version(inc)
self.env.BOOST_VERSION = versions[0]
self.env.BOOST_VERSION_NUMBER = int(versions[1])
self.end_msg("%d.%d.%d" % (int(versions[1]) / 100000,
int(versions[1]) / 100 % 1000,
int(versions[1]) % 100))
if Logs.verbose:
Logs.pprint('CYAN', ' path : %s' % self.env['INCLUDES_%s' % var])

if not params['lib'] and not params['stlib']:
return
if 'static' in kw or 'static' in params:
Logs.warn('boost: static parameter is deprecated, use stlib instead.')
self.start_msg('Checking boost libs')
path, libs, stlibs = self.boost_get_libs(**params)
self.env['LIBPATH_%s' % var] = [path]
self.env['STLIBPATH_%s' % var] = [path]
self.env['LIB_%s' % var] = libs
self.env['STLIB_%s' % var] = stlibs
self.end_msg('ok')
if Logs.verbose:
Logs.pprint('CYAN', ' path : %s' % path)
Logs.pprint('CYAN', ' shared libs : %s' % libs)
Logs.pprint('CYAN', ' static libs : %s' % stlibs)

def has_shlib(lib):
return params['lib'] and lib in params['lib']
def has_stlib(lib):
return params['stlib'] and lib in params['stlib']
def has_lib(lib):
return has_shlib(lib) or has_stlib(lib)
if has_lib('thread'):
# not inside try_link to make check visible in the output
self._check_pthread_flag(k, kw)

def try_link():
if has_lib('system'):
self.check_cxx(fragment=BOOST_ERROR_CODE, use=var, execute=False)
if has_lib('thread'):
self.check_cxx(fragment=BOOST_THREAD_CODE, use=var, execute=False)
if has_lib('log'):
if not has_lib('thread'):
self.env['DEFINES_%s' % var] += ['BOOST_LOG_NO_THREADS']
if has_shlib('log'):
self.env['DEFINES_%s' % var] += ['BOOST_LOG_DYN_LINK']
self.check_cxx(fragment=BOOST_LOG_CODE, use=var, execute=False)

if params.get('linkage_autodetect', False):
self.start_msg("Attempting to detect boost linkage flags")
toolset = self.boost_get_toolset(kw.get('toolset', ''))
if toolset in ('vc',):
# disable auto-linking feature, causing error LNK1181
# because the code wants to be linked against
self.env['DEFINES_%s' % var] += ['BOOST_ALL_NO_LIB']

# if no dlls are present, we guess the .lib files are not stubs
has_dlls = False
for x in Utils.listdir(path):
if x.endswith(self.env.cxxshlib_PATTERN % ''):
has_dlls = True
break
if not has_dlls:
self.env['STLIBPATH_%s' % var] = [path]
self.env['STLIB_%s' % var] = libs
del self.env['LIB_%s' % var]
del self.env['LIBPATH_%s' % var]

# we attempt to play with some known-to-work CXXFLAGS combinations
for cxxflags in (['/MD', '/EHsc'], []):
self.env.stash()
self.env["CXXFLAGS_%s" % var] += cxxflags
try:
try_link()
except Errors.ConfigurationError as e:
self.env.revert()
exc = e
else:
self.end_msg("ok: winning cxxflags combination: %s" % (self.env["CXXFLAGS_%s" % var]))
exc = None
self.env.commit()
break

if exc is not None:
self.end_msg("Could not auto-detect boost linking flags combination, you may report it to boost.py author", ex=exc)
self.fatal('The configuration failed')
else:
self.end_msg("Boost linkage flags auto-detection not implemented (needed ?) for this toolchain")
self.fatal('The configuration failed')
else:
self.start_msg('Checking for boost linkage')
try:
try_link()
except Errors.ConfigurationError as e:
self.end_msg("Could not link against boost libraries using supplied options")
self.fatal('The configuration failed')
self.end_msg('ok')


@feature('cxx')
@after_method('apply_link')
def install_boost(self):
if install_boost.done or not Utils.is_win32 or not self.bld.cmd.startswith('install'):
return
install_boost.done = True
inst_to = getattr(self, 'install_path', '${BINDIR}')
for lib in self.env.LIB_BOOST:
try:
file = self.bld.find_file(self.env.cxxshlib_PATTERN % lib, self.env.LIBPATH_BOOST)
self.add_install_files(install_to=inst_to, install_from=self.bld.root.find_node(file))
except:
continue
install_boost.done = False


+ 7
- 11
waflib/extras/build_file_tracker.py View File

@@ -8,25 +8,21 @@ want to use this to force partial rebuilds, see playground/track_output_files/ f

Note that there is a variety of ways to implement this, one may want use timestamps on source files too for example,
or one may want to hash the files in the source directory only under certain conditions (md5_tstamp tool)
or to hash the file in the build directory with its timestamp (similar to 'update_outputs')
or to hash the file in the build directory with its timestamp
"""

import os
from waflib import Node, Utils

def get_bld_sig(self):
if not self.is_bld() or self.ctx.bldnode is self.ctx.srcnode:
return Utils.h_file(self.abspath())

try:
return self.cache_sig
# add the creation time to the signature
return self.sig + str(os.stat(self.abspath()).st_mtime)
except AttributeError:
pass

if not self.is_bld() or self.ctx.bldnode is self.ctx.srcnode:
self.sig = Utils.h_file(self.abspath())
self.cache_sig = ret = self.sig
else:
# add the
self.cache_sig = ret = self.sig + str(os.stat(self.abspath()).st_mtime)
return ret
return None

Node.Node.get_bld_sig = get_bld_sig


+ 3
- 4
waflib/extras/build_logs.py View File

@@ -17,7 +17,7 @@ try:
up = os.path.dirname(Context.g_module.__file__)
except AttributeError:
up = '.'
LOGFILE = os.path.join(up, 'logs', '%s.log' % time.strftime('%Y_%m_%d_%H_%M'))
LOGFILE = os.path.join(up, 'logs', time.strftime('%Y_%m_%d_%H_%M.log'))

wlock = threading.Lock()
class log_to_file(object):
@@ -28,7 +28,7 @@ class log_to_file(object):
self.filename = filename
self.is_valid = True
def replace_colors(self, data):
for x in Logs.colors_lst.values():
for x in Logs.colors_lst.values():
if isinstance(x, str):
data = data.replace(x, '')
return data
@@ -96,7 +96,7 @@ def exit_cleanup():
fileobj.close()
filename = sys.stdout.filename

Logs.info('Output logged to %r' % filename)
Logs.info('Output logged to %r', filename)

# then copy the log file to "latest.log" if possible
up = os.path.dirname(os.path.abspath(filename))
@@ -104,7 +104,6 @@ def exit_cleanup():
shutil.copy(filename, os.path.join(up, 'latest.log'))
except OSError:
# this may fail on windows due to processes spawned
#
pass

atexit.register(exit_cleanup)


+ 82
- 0
waflib/extras/buildcopy.py View File

@@ -0,0 +1,82 @@
#! /usr/bin/env python
# encoding: utf-8
# Calle Rosenquist, 2017 (xbreak)
"""
Create task that copies source files to the associated build node.
This is useful to e.g. construct a complete Python package so it can be unit tested
without installation.

Source files to be copied can be specified either in `buildcopy_source` attribute, or
`source` attribute. If both are specified `buildcopy_source` has priority.

Examples::

def build(bld):
bld(name = 'bar',
features = 'py buildcopy',
source = bld.path.ant_glob('src/bar/*.py'))

bld(name = 'py baz',
features = 'buildcopy',
buildcopy_source = bld.path.ant_glob('src/bar/*.py') + ['src/bar/resource.txt'])

"""
import os, shutil
from waflib import Errors, Task, TaskGen, Utils, Node

@TaskGen.before_method('process_source')
@TaskGen.feature('buildcopy')
def make_buildcopy(self):
"""
Creates the buildcopy task.
"""
def to_src_nodes(lst):
"""Find file nodes only in src, TaskGen.to_nodes will not work for this since it gives
preference to nodes in build.
"""
if isinstance(lst, Node.Node):
if not lst.is_src():
raise Errors.WafError('buildcopy: node %s is not in src'%lst)
if not os.path.isfile(lst.abspath()):
raise Errors.WafError('buildcopy: Cannot copy directory %s (unsupported action)'%lst)
return lst

if isinstance(lst, str):
lst = [x for x in Utils.split_path(lst) if x and x != '.']

node = self.bld.path.get_src().search_node(lst)
if node:
if not os.path.isfile(node.abspath()):
raise Errors.WafError('buildcopy: Cannot copy directory %s (unsupported action)'%node)
return node

node = self.bld.path.get_src().find_node(lst)
if node:
if not os.path.isfile(node.abspath()):
raise Errors.WafError('buildcopy: Cannot copy directory %s (unsupported action)'%node)
return node
raise Errors.WafError('buildcopy: File not found in src: %s'%os.path.join(*lst))

nodes = [ to_src_nodes(n) for n in getattr(self, 'buildcopy_source', getattr(self, 'source', [])) ]
node_pairs = [(n, n.get_bld()) for n in nodes]
self.create_task('buildcopy', [n[0] for n in node_pairs], [n[1] for n in node_pairs], node_pairs=node_pairs)


class buildcopy(Task.Task):
"""
Copy for each pair `n` in `node_pairs`: n[0] -> n[1].

Attribute `node_pairs` should contain a list of tuples describing source and target:

node_pairs = [(in, out), ...]

"""
color = 'PINK'

def keyword(self):
return 'Copying'

def run(self):
for f,t in self.node_pairs:
t.parent.mkdir()
shutil.copy2(f.abspath(), t.abspath())

+ 72
- 0
waflib/extras/c_dumbpreproc.py View File

@@ -0,0 +1,72 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)

"""
Dumb C/C++ preprocessor for finding dependencies

It will look at all include files it can find after removing the comments, so the following
will always add the dependency on both "a.h" and "b.h"::

#include "a.h"
#ifdef B
#include "b.h"
#endif
int main() {
return 0;
}

To use::

def configure(conf):
conf.load('compiler_c')
conf.load('c_dumbpreproc')
"""

import re
from waflib.Tools import c_preproc

re_inc = re.compile(
'^[ \t]*(#|%:)[ \t]*(include)[ \t]*[<"](.*)[>"]\r*$',
re.IGNORECASE | re.MULTILINE)

def lines_includes(node):
code = node.read()
if c_preproc.use_trigraphs:
for (a, b) in c_preproc.trig_def:
code = code.split(a).join(b)
code = c_preproc.re_nl.sub('', code)
code = c_preproc.re_cpp.sub(c_preproc.repl, code)
return [(m.group(2), m.group(3)) for m in re.finditer(re_inc, code)]

parser = c_preproc.c_parser
class dumb_parser(parser):
def addlines(self, node):
if node in self.nodes[:-1]:
return
self.currentnode_stack.append(node.parent)

# Avoid reading the same files again
try:
lines = self.parse_cache[node]
except KeyError:
lines = self.parse_cache[node] = lines_includes(node)

self.lines = lines + [(c_preproc.POPFILE, '')] + self.lines

def start(self, node, env):
try:
self.parse_cache = node.ctx.parse_cache
except AttributeError:
self.parse_cache = node.ctx.parse_cache = {}

self.addlines(node)
while self.lines:
(x, y) = self.lines.pop(0)
if x == c_preproc.POPFILE:
self.currentnode_stack.pop()
continue
self.tryfind(y)

c_preproc.c_parser = dumb_parser


+ 87
- 0
waflib/extras/c_emscripten.py View File

@@ -0,0 +1,87 @@
#!/usr/bin/env python
# -*- coding: utf-8 vi:ts=4:noexpandtab

import subprocess, shlex, sys

from waflib.Tools import ccroot, gcc, gxx
from waflib.Configure import conf
from waflib.TaskGen import after_method, feature

from waflib.Tools.compiler_c import c_compiler
from waflib.Tools.compiler_cxx import cxx_compiler

for supported_os in ('linux', 'darwin', 'gnu', 'aix'):
c_compiler[supported_os].append('c_emscripten')
cxx_compiler[supported_os].append('c_emscripten')


@conf
def get_emscripten_version(conf, cc):
"""
Emscripten doesn't support processing '-' like clang/gcc
"""

dummy = conf.cachedir.parent.make_node("waf-emscripten.c")
dummy.write("")
cmd = cc + ['-dM', '-E', '-x', 'c', dummy.abspath()]
env = conf.env.env or None
try:
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)
out = p.communicate()[0]
except Exception as e:
conf.fatal('Could not determine emscripten version %r: %s' % (cmd, e))

if not isinstance(out, str):
out = out.decode(sys.stdout.encoding or 'latin-1')

k = {}
out = out.splitlines()
for line in out:
lst = shlex.split(line)
if len(lst)>2:
key = lst[1]
val = lst[2]
k[key] = val

if not ('__clang__' in k and 'EMSCRIPTEN' in k):
conf.fatal('Could not determine the emscripten compiler version.')

conf.env.DEST_OS = 'generic'
conf.env.DEST_BINFMT = 'elf'
conf.env.DEST_CPU = 'asm-js'
conf.env.CC_VERSION = (k['__clang_major__'], k['__clang_minor__'], k['__clang_patchlevel__'])
return k

@conf
def find_emscripten(conf):
cc = conf.find_program(['emcc'], var='CC')
conf.get_emscripten_version(cc)
conf.env.CC = cc
conf.env.CC_NAME = 'emscripten'
cxx = conf.find_program(['em++'], var='CXX')
conf.env.CXX = cxx
conf.env.CXX_NAME = 'emscripten'
conf.find_program(['emar'], var='AR')

def configure(conf):
conf.find_emscripten()
conf.find_ar()
conf.gcc_common_flags()
conf.gxx_common_flags()
conf.cc_load_tools()
conf.cc_add_flags()
conf.cxx_load_tools()
conf.cxx_add_flags()
conf.link_add_flags()
conf.env.ARFLAGS = ['rcs']
conf.env.cshlib_PATTERN = '%s.js'
conf.env.cxxshlib_PATTERN = '%s.js'
conf.env.cstlib_PATTERN = '%s.a'
conf.env.cxxstlib_PATTERN = '%s.a'
conf.env.cprogram_PATTERN = '%s.html'
conf.env.cxxprogram_PATTERN = '%s.html'
conf.env.CXX_TGT_F = ['-c', '-o', '']
conf.env.CC_TGT_F = ['-c', '-o', '']
conf.env.CXXLNK_TGT_F = ['-o', '']
conf.env.CCLNK_TGT_F = ['-o', '']
conf.env.append_value('LINKFLAGS',['-Wl,--enable-auto-import'])

+ 36
- 33
waflib/extras/c_nec.py View File

@@ -24,43 +24,46 @@ def find_sxc(conf):

@conf
def get_sxc_version(conf, fc):
version_re = re.compile(r"C\+\+/SX\s*Version\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
cmd = fc + ['-V']
p = Utils.subprocess.Popen(cmd, stdin=False, stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE, env=None)
out, err = p.communicate()
version_re = re.compile(r"C\+\+/SX\s*Version\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
cmd = fc + ['-V']
p = Utils.subprocess.Popen(cmd, stdin=False, stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE, env=None)
out, err = p.communicate()

if out: match = version_re(out)
else: match = version_re(err)
if not match:
conf.fatal('Could not determine the NEC C compiler version.')
k = match.groupdict()
conf.env['C_VERSION'] = (k['major'], k['minor'])
if out:
match = version_re(out)
else:
match = version_re(err)
if not match:
conf.fatal('Could not determine the NEC C compiler version.')
k = match.groupdict()
conf.env['C_VERSION'] = (k['major'], k['minor'])

@conf
def sxc_common_flags(conf):
v=conf.env
v['CC_SRC_F']=[]
v['CC_TGT_F']=['-c','-o']
if not v['LINK_CC']:v['LINK_CC']=v['CC']
v['CCLNK_SRC_F']=[]
v['CCLNK_TGT_F']=['-o']
v['CPPPATH_ST']='-I%s'
v['DEFINES_ST']='-D%s'
v['LIB_ST']='-l%s'
v['LIBPATH_ST']='-L%s'
v['STLIB_ST']='-l%s'
v['STLIBPATH_ST']='-L%s'
v['RPATH_ST']=''
v['SONAME_ST']=[]
v['SHLIB_MARKER']=[]
v['STLIB_MARKER']=[]
v['LINKFLAGS_cprogram']=['']
v['cprogram_PATTERN']='%s'
v['CFLAGS_cshlib']=['-fPIC']
v['LINKFLAGS_cshlib']=['']
v['cshlib_PATTERN']='lib%s.so'
v['LINKFLAGS_cstlib']=[]
v['cstlib_PATTERN']='lib%s.a'
v=conf.env
v['CC_SRC_F']=[]
v['CC_TGT_F']=['-c','-o']
if not v['LINK_CC']:
v['LINK_CC']=v['CC']
v['CCLNK_SRC_F']=[]
v['CCLNK_TGT_F']=['-o']
v['CPPPATH_ST']='-I%s'
v['DEFINES_ST']='-D%s'
v['LIB_ST']='-l%s'
v['LIBPATH_ST']='-L%s'
v['STLIB_ST']='-l%s'
v['STLIBPATH_ST']='-L%s'
v['RPATH_ST']=''
v['SONAME_ST']=[]
v['SHLIB_MARKER']=[]
v['STLIB_MARKER']=[]
v['LINKFLAGS_cprogram']=['']
v['cprogram_PATTERN']='%s'
v['CFLAGS_cshlib']=['-fPIC']
v['LINKFLAGS_cshlib']=['']
v['cshlib_PATTERN']='lib%s.so'
v['LINKFLAGS_cstlib']=[]
v['cstlib_PATTERN']='lib%s.a'

def configure(conf):
conf.find_sxc()


+ 152
- 0
waflib/extras/cabal.py View File

@@ -0,0 +1,152 @@
#!/usr/bin/env python
# encoding: utf-8
# Anton Feldmann, 2012
# "Base for cabal"
from waflib import Task, Utils
from waflib.TaskGen import extension
from waflib.Utils import threading
from shutil import rmtree
lock = threading.Lock()
registering = False
def configure(self):
self.find_program('cabal', var='CABAL')
self.find_program('ghc-pkg', var='GHCPKG')
pkgconfd = self.bldnode.abspath() + '/package.conf.d'
self.env.PREFIX = self.bldnode.abspath() + '/dist'
self.env.PKGCONFD = pkgconfd
if self.root.find_node(pkgconfd + '/package.cache'):
self.msg('Using existing package database', pkgconfd, color='CYAN')
else:
pkgdir = self.root.find_dir(pkgconfd)
if pkgdir:
self.msg('Deleting corrupt package database', pkgdir.abspath(), color ='RED')
rmtree(pkgdir.abspath())
pkgdir = None
self.cmd_and_log(self.env.GHCPKG + ['init', pkgconfd])
self.msg('Created package database', pkgconfd, color = 'YELLOW' if pkgdir else 'GREEN')
@extension('.cabal')
def process_cabal(self, node):
out_dir_node = self.bld.root.find_dir(self.bld.out_dir)
package_node = node.change_ext('.package')
package_node = out_dir_node.find_or_declare(package_node.name)
build_node = node.parent.get_bld()
build_path = build_node.abspath()
config_node = build_node.find_or_declare('setup-config')
inplace_node = build_node.find_or_declare('package.conf.inplace')
config_task = self.create_task('cabal_configure', node)
config_task.cwd = node.parent.abspath()
config_task.depends_on = getattr(self, 'depends_on', '')
config_task.build_path = build_path
config_task.set_outputs(config_node)
build_task = self.create_task('cabal_build', config_node)
build_task.cwd = node.parent.abspath()
build_task.build_path = build_path
build_task.set_outputs(inplace_node)
copy_task = self.create_task('cabal_copy', inplace_node)
copy_task.cwd = node.parent.abspath()
copy_task.depends_on = getattr(self, 'depends_on', '')
copy_task.build_path = build_path
last_task = copy_task
task_list = [config_task, build_task, copy_task]
if (getattr(self, 'register', False)):
register_task = self.create_task('cabal_register', inplace_node)
register_task.cwd = node.parent.abspath()
register_task.set_run_after(copy_task)
register_task.build_path = build_path
pkgreg_task = self.create_task('ghcpkg_register', inplace_node)
pkgreg_task.cwd = node.parent.abspath()
pkgreg_task.set_run_after(register_task)
pkgreg_task.build_path = build_path
last_task = pkgreg_task
task_list += [register_task, pkgreg_task]
touch_task = self.create_task('cabal_touch', inplace_node)
touch_task.set_run_after(last_task)
touch_task.set_outputs(package_node)
touch_task.build_path = build_path
task_list += [touch_task]
return task_list
def get_all_src_deps(node):
hs_deps = node.ant_glob('**/*.hs')
hsc_deps = node.ant_glob('**/*.hsc')
lhs_deps = node.ant_glob('**/*.lhs')
c_deps = node.ant_glob('**/*.c')
cpp_deps = node.ant_glob('**/*.cpp')
proto_deps = node.ant_glob('**/*.proto')
return sum([hs_deps, hsc_deps, lhs_deps, c_deps, cpp_deps, proto_deps], [])
class Cabal(Task.Task):
def scan(self):
return (get_all_src_deps(self.generator.path), ())
class cabal_configure(Cabal):
run_str = '${CABAL} configure -v0 --prefix=${PREFIX} --global --user --package-db=${PKGCONFD} --builddir=${tsk.build_path}'
shell = True
def scan(self):
out_node = self.generator.bld.root.find_dir(self.generator.bld.out_dir)
deps = [out_node.find_or_declare(dep).change_ext('.package') for dep in Utils.to_list(self.depends_on)]
return (deps, ())
class cabal_build(Cabal):
run_str = '${CABAL} build -v1 --builddir=${tsk.build_path}/'
shell = True
class cabal_copy(Cabal):
run_str = '${CABAL} copy -v0 --builddir=${tsk.build_path}'
shell = True
class cabal_register(Cabal):
run_str = '${CABAL} register -v0 --gen-pkg-config=${tsk.build_path}/pkg.config --builddir=${tsk.build_path}'
shell = True
class ghcpkg_register(Cabal):
run_str = '${GHCPKG} update -v0 --global --user --package-conf=${PKGCONFD} ${tsk.build_path}/pkg.config'
shell = True
def runnable_status(self):
global lock, registering
val = False
lock.acquire()
val = registering
lock.release()
if val:
return Task.ASK_LATER
ret = Task.Task.runnable_status(self)
if ret == Task.RUN_ME:
lock.acquire()
registering = True
lock.release()
return ret
def post_run(self):
global lock, registering
lock.acquire()
registering = False
lock.release()
return Task.Task.post_run(self)
class cabal_touch(Cabal):
run_str = 'touch ${TGT}'

+ 110
- 0
waflib/extras/cfg_altoptions.py View File

@@ -0,0 +1,110 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Tool to extend c_config.check_cfg()

__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
__copyright__ = "Jérôme Carretero, 2014"

"""

This tool allows to work around the absence of ``*-config`` programs
on systems, by keeping the same clean configuration syntax but inferring
values or permitting their modification via the options interface.

Note that pkg-config can also support setting ``PKG_CONFIG_PATH``,
so you can put custom files in a folder containing new .pc files.
This tool could also be implemented by taking advantage of this fact.

Usage::

def options(opt):
opt.load('c_config_alt')
opt.add_package_option('package')

def configure(cfg):
conf.load('c_config_alt')
conf.check_cfg(...)

Known issues:

- Behavior with different build contexts...

"""

import os
import functools
from waflib import Configure, Options, Errors

def name_to_dest(x):
return x.lower().replace('-', '_')


def options(opt):
def x(opt, param):
dest = name_to_dest(param)
gr = opt.get_option_group("configure options")
gr.add_option('--%s-root' % dest,
help="path containing include and lib subfolders for %s" \
% param,
)

opt.add_package_option = functools.partial(x, opt)


check_cfg_old = getattr(Configure.ConfigurationContext, 'check_cfg')

@Configure.conf
def check_cfg(conf, *k, **kw):
if k:
lst = k[0].split()
kw['package'] = lst[0]
kw['args'] = ' '.join(lst[1:])

if not 'package' in kw:
return check_cfg_old(conf, **kw)

package = kw['package']

package_lo = name_to_dest(package)
package_hi = package.upper().replace('-', '_') # TODO FIXME
package_hi = kw.get('uselib_store', package_hi)

def check_folder(path, name):
try:
assert os.path.isdir(path)
except AssertionError:
raise Errors.ConfigurationError(
"%s_%s (%s) is not a folder!" \
% (package_lo, name, path))
return path

root = getattr(Options.options, '%s_root' % package_lo, None)

if root is None:
return check_cfg_old(conf, **kw)
else:
def add_manual_var(k, v):
conf.start_msg('Adding for %s a manual var' % (package))
conf.env["%s_%s" % (k, package_hi)] = v
conf.end_msg("%s = %s" % (k, v))


check_folder(root, 'root')

pkg_inc = check_folder(os.path.join(root, "include"), 'inc')
add_manual_var('INCLUDES', [pkg_inc])
pkg_lib = check_folder(os.path.join(root, "lib"), 'libpath')
add_manual_var('LIBPATH', [pkg_lib])
add_manual_var('LIB', [package])

for x in kw.get('manual_deps', []):
for k, v in sorted(conf.env.get_merged_dict().items()):
if k.endswith('_%s' % x):
k = k.replace('_%s' % x, '')
conf.start_msg('Adding for %s a manual dep' \
%(package))
conf.env["%s_%s" % (k, package_hi)] += v
conf.end_msg('%s += %s' % (k, v))

return True


+ 85
- 0
waflib/extras/clang_compilation_database.py View File

@@ -0,0 +1,85 @@
#!/usr/bin/env python
# encoding: utf-8
# Christoph Koke, 2013

"""
Writes the c and cpp compile commands into build/compile_commands.json
see http://clang.llvm.org/docs/JSONCompilationDatabase.html

Usage:

def configure(conf):
conf.load('compiler_cxx')
...
conf.load('clang_compilation_database')
"""

import sys, os, json, shlex, pipes
from waflib import Logs, TaskGen, Task

Task.Task.keep_last_cmd = True

@TaskGen.feature('c', 'cxx')
@TaskGen.after_method('process_use')
def collect_compilation_db_tasks(self):
"Add a compilation database entry for compiled tasks"
try:
clang_db = self.bld.clang_compilation_database_tasks
except AttributeError:
clang_db = self.bld.clang_compilation_database_tasks = []
self.bld.add_post_fun(write_compilation_database)

tup = tuple(y for y in [Task.classes.get(x) for x in ('c', 'cxx')] if y)
for task in getattr(self, 'compiled_tasks', []):
if isinstance(task, tup):
clang_db.append(task)

def write_compilation_database(ctx):
"Write the clang compilation database as JSON"
database_file = ctx.bldnode.make_node('compile_commands.json')
Logs.info('Build commands will be stored in %s', database_file.path_from(ctx.path))
try:
root = json.load(database_file)
except IOError:
root = []
clang_db = dict((x['file'], x) for x in root)
for task in getattr(ctx, 'clang_compilation_database_tasks', []):
try:
cmd = task.last_cmd
except AttributeError:
continue
directory = getattr(task, 'cwd', ctx.variant_dir)
f_node = task.inputs[0]
filename = os.path.relpath(f_node.abspath(), directory)
entry = {
"directory": directory,
"arguments": cmd,
"file": filename,
}
clang_db[filename] = entry
root = list(clang_db.values())
database_file.write(json.dumps(root, indent=2))

# Override the runnable_status function to do a dummy/dry run when the file doesn't need to be compiled.
# This will make sure compile_commands.json is always fully up to date.
# Previously you could end up with a partial compile_commands.json if the build failed.
for x in ('c', 'cxx'):
if x not in Task.classes:
continue

t = Task.classes[x]

def runnable_status(self):
def exec_command(cmd, **kw):
pass

run_status = self.old_runnable_status()
if run_status == Task.SKIP_ME:
setattr(self, 'old_exec_command', getattr(self, 'exec_command', None))
setattr(self, 'exec_command', exec_command)
self.run()
setattr(self, 'exec_command', getattr(self, 'old_exec_command', None))
return run_status

setattr(t, 'old_runnable_status', getattr(t, 'runnable_status', None))
setattr(t, 'runnable_status', runnable_status)

+ 875
- 0
waflib/extras/codelite.py View File

@@ -0,0 +1,875 @@
#! /usr/bin/env python
# encoding: utf-8
# CodeLite Project
# Christian Klein (chrikle@berlios.de)
# Created: Jan 2012
# As templete for this file I used the msvs.py
# I hope this template will work proper

"""
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:

1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.

2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.

3. The name of the author may not be used to endorse or promote products
derived from this software without specific prior written permission.

THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
"""

"""

To add this tool to your project:
def options(conf):
opt.load('codelite')

It can be a good idea to add the sync_exec tool too.

To generate solution files:
$ waf configure codelite

To customize the outputs, provide subclasses in your wscript files:

from waflib.extras import codelite
class vsnode_target(codelite.vsnode_target):
def get_build_command(self, props):
# likely to be required
return "waf.bat build"
def collect_source(self):
# likely to be required
...
class codelite_bar(codelite.codelite_generator):
def init(self):
codelite.codelite_generator.init(self)
self.vsnode_target = vsnode_target

The codelite class re-uses the same build() function for reading the targets (task generators),
you may therefore specify codelite settings on the context object:

def build(bld):
bld.codelite_solution_name = 'foo.workspace'
bld.waf_command = 'waf.bat'
bld.projects_dir = bld.srcnode.make_node('')
bld.projects_dir.mkdir()


ASSUMPTIONS:
* a project can be either a directory or a target, project files are written only for targets that have source files
* each project is a vcxproj file, therefore the project uuid needs only to be a hash of the absolute path
"""

import os, re, sys
import uuid # requires python 2.5
from waflib.Build import BuildContext
from waflib import Utils, TaskGen, Logs, Task, Context, Node, Options

HEADERS_GLOB = '**/(*.h|*.hpp|*.H|*.inl)'

PROJECT_TEMPLATE = r'''<?xml version="1.0" encoding="utf-8"?>
<CodeLite_Project Name="${project.name}" InternalType="Library">
<Plugins>
<Plugin Name="qmake">
<![CDATA[00010001N0005Release000000000000]]>
</Plugin>
</Plugins>
<Description/>
<Dependencies/>
<VirtualDirectory Name="src">
${for x in project.source}
${if (project.get_key(x)=="sourcefile")}
<File Name="${x.abspath()}"/>
${endif}
${endfor}
</VirtualDirectory>
<VirtualDirectory Name="include">
${for x in project.source}
${if (project.get_key(x)=="headerfile")}
<File Name="${x.abspath()}"/>
${endif}
${endfor}
</VirtualDirectory>
<Settings Type="Dynamic Library">
<GlobalSettings>
<Compiler Options="" C_Options="">
<IncludePath Value="."/>
</Compiler>
<Linker Options="">
<LibraryPath Value="."/>
</Linker>
<ResourceCompiler Options=""/>
</GlobalSettings>
<Configuration Name="Release" CompilerType="gnu gcc" ReleasegerType="GNU gdb Releaseger" Type="Dynamic Library" BuildCmpWithGlobalSettings="append" BuildLnkWithGlobalSettings="append" BuildResWithGlobalSettings="append">
<Compiler Options="" C_Options="" Required="yes" PreCompiledHeader="" PCHInCommandLine="no" UseDifferentPCHFlags="no" PCHFlags="">
<IncludePath Value="."/>
<IncludePath Value="."/>
</Compiler>
<Linker Options="" Required="yes">
<LibraryPath Value=""/>
</Linker>
<ResourceCompiler Options="" Required="no"/>
<General OutputFile="${xml:project.build_properties[0].output_file}" IntermediateDirectory="" Command="" CommandArguments="" PauseExecWhenProcTerminates="yes"/>
<Environment EnvVarSetName="&lt;Use Defaults&gt;" DbgSetName="&lt;Use Defaults&gt;">
<![CDATA[]]>
</Environment>
<Releaseger IsRemote="no" RemoteHostName="" RemoteHostPort="" ReleasegerPath="">
<PostConnectCommands/>
<StartupCommands/>
</Releaseger>
<PreBuild/>
<PostBuild/>
<CustomBuild Enabled="yes">
$b = project.build_properties[0]}
<RebuildCommand>${xml:project.get_rebuild_command(project.build_properties[0])}</RebuildCommand>
<CleanCommand>${xml:project.get_clean_command(project.build_properties[0])}</CleanCommand>
<BuildCommand>${xml:project.get_build_command(project.build_properties[0])}</BuildCommand>
<Target Name="Install">${xml:project.get_install_command(project.build_properties[0])}</Target>
<Target Name="Build and Install">${xml:project.get_build_and_install_command(project.build_properties[0])}</Target>
<Target Name="Build All">${xml:project.get_build_all_command(project.build_properties[0])}</Target>
<Target Name="Rebuild All">${xml:project.get_rebuild_all_command(project.build_properties[0])}</Target>
<Target Name="Clean All">${xml:project.get_clean_all_command(project.build_properties[0])}</Target>
<Target Name="Build and Install All">${xml:project.get_build_and_install_all_command(project.build_properties[0])}</Target>
<PreprocessFileCommand/>
<SingleFileCommand/>
<MakefileGenerationCommand/>
<ThirdPartyToolName>None</ThirdPartyToolName>
<WorkingDirectory/>
</CustomBuild>
<AdditionalRules>
<CustomPostBuild/>
<CustomPreBuild/>
</AdditionalRules>
<Completion>
<ClangCmpFlags/>
<ClangPP/>
<SearchPaths/>
</Completion>
</Configuration>
<Configuration Name="Release" CompilerType="gnu gcc" ReleasegerType="GNU gdb Releaseger" Type="" BuildCmpWithGlobalSettings="append" BuildLnkWithGlobalSettings="append" BuildResWithGlobalSettings="append">
<Compiler Options="" C_Options="" Required="yes" PreCompiledHeader="" PCHInCommandLine="no" UseDifferentPCHFlags="no" PCHFlags="">
<IncludePath Value="."/>
</Compiler>
<Linker Options="" Required="yes"/>
<ResourceCompiler Options="" Required="no"/>
<General OutputFile="" IntermediateDirectory="./Release" Command="" CommandArguments="" UseSeparateReleaseArgs="no" ReleaseArguments="" WorkingDirectory="$(IntermediateDirectory)" PauseExecWhenProcTerminates="yes"/>
<Environment EnvVarSetName="&lt;Use Defaults&gt;" DbgSetName="&lt;Use Defaults&gt;">
<![CDATA[
]]>
</Environment>
<Releaseger IsRemote="no" RemoteHostName="" RemoteHostPort="" ReleasegerPath="">
<PostConnectCommands/>
<StartupCommands/>
</Releaseger>
<PreBuild/>
<PostBuild/>
<CustomBuild Enabled="no">
<RebuildCommand/>
<CleanCommand/>
<BuildCommand/>
<PreprocessFileCommand/>
<SingleFileCommand/>
<MakefileGenerationCommand/>
<ThirdPartyToolName/>
<WorkingDirectory/>
</CustomBuild>
<AdditionalRules>
<CustomPostBuild/>
<CustomPreBuild/>
</AdditionalRules>
<Completion>
<ClangCmpFlags/>
<ClangPP/>
<SearchPaths/>
</Completion>
</Configuration>
</Settings>
</CodeLite_Project>'''




SOLUTION_TEMPLATE = '''<?xml version="1.0" encoding="utf-8"?>
<CodeLite_Workspace Name="${getattr(project, 'codelite_solution_name', None)[:-10]}" Database="./${getattr(project, 'codelite_solution_name', None)[:-10]}.tags">
${for p in project.all_projects}
<Project Name = "${p.name}" Path = "${p.title}" Active="No"/>
${endfor}
<BuildMatrix>
<WorkspaceConfiguration Name="Release" Selected="yes">
${for p in project.all_projects}
<Project Name="${p.name}" ConfigName="Release"/>
${endfor}
</WorkspaceConfiguration>
</BuildMatrix>
</CodeLite_Workspace>'''



COMPILE_TEMPLATE = '''def f(project):
lst = []
def xml_escape(value):
return value.replace("&", "&amp;").replace('"', "&quot;").replace("'", "&apos;").replace("<", "&lt;").replace(">", "&gt;")

%s

#f = open('cmd.txt', 'w')
#f.write(str(lst))
#f.close()
return ''.join(lst)
'''
reg_act = re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<code>[^}]*?)\})", re.M)
def compile_template(line):
"""
Compile a template expression into a python function (like jsps, but way shorter)
"""
extr = []
def repl(match):
g = match.group
if g('dollar'):
return "$"
elif g('backslash'):
return "\\"
elif g('subst'):
extr.append(g('code'))
return "<<|@|>>"
return None

line2 = reg_act.sub(repl, line)
params = line2.split('<<|@|>>')
assert(extr)


indent = 0
buf = []
app = buf.append

def app(txt):
buf.append(indent * '\t' + txt)

for x in range(len(extr)):
if params[x]:
app("lst.append(%r)" % params[x])

f = extr[x]
if f.startswith(('if', 'for')):
app(f + ':')
indent += 1
elif f.startswith('py:'):
app(f[3:])
elif f.startswith(('endif', 'endfor')):
indent -= 1
elif f.startswith(('else', 'elif')):
indent -= 1
app(f + ':')
indent += 1
elif f.startswith('xml:'):
app('lst.append(xml_escape(%s))' % f[4:])
else:
#app('lst.append((%s) or "cannot find %s")' % (f, f))
app('lst.append(%s)' % f)

if extr:
if params[-1]:
app("lst.append(%r)" % params[-1])

fun = COMPILE_TEMPLATE % "\n\t".join(buf)
#print(fun)
return Task.funex(fun)


re_blank = re.compile('(\n|\r|\\s)*\n', re.M)
def rm_blank_lines(txt):
txt = re_blank.sub('\r\n', txt)
return txt

BOM = '\xef\xbb\xbf'
try:
BOM = bytes(BOM, 'latin-1') # python 3
except (TypeError, NameError):
pass

def stealth_write(self, data, flags='wb'):
try:
unicode
except NameError:
data = data.encode('utf-8') # python 3
else:
data = data.decode(sys.getfilesystemencoding(), 'replace')
data = data.encode('utf-8')

if self.name.endswith('.project'):
data = BOM + data

try:
txt = self.read(flags='rb')
if txt != data:
raise ValueError('must write')
except (IOError, ValueError):
self.write(data, flags=flags)
else:
Logs.debug('codelite: skipping %r', self)
Node.Node.stealth_write = stealth_write

re_quote = re.compile("[^a-zA-Z0-9-]")
def quote(s):
return re_quote.sub("_", s)

def xml_escape(value):
return value.replace("&", "&amp;").replace('"', "&quot;").replace("'", "&apos;").replace("<", "&lt;").replace(">", "&gt;")

def make_uuid(v, prefix = None):
"""
simple utility function
"""
if isinstance(v, dict):
keys = list(v.keys())
keys.sort()
tmp = str([(k, v[k]) for k in keys])
else:
tmp = str(v)
d = Utils.md5(tmp.encode()).hexdigest().upper()
if prefix:
d = '%s%s' % (prefix, d[8:])
gid = uuid.UUID(d, version = 4)
return str(gid).upper()

def diff(node, fromnode):
# difference between two nodes, but with "(..)" instead of ".."
c1 = node
c2 = fromnode

c1h = c1.height()
c2h = c2.height()

lst = []
up = 0

while c1h > c2h:
lst.append(c1.name)
c1 = c1.parent
c1h -= 1

while c2h > c1h:
up += 1
c2 = c2.parent
c2h -= 1

while id(c1) != id(c2):
lst.append(c1.name)
up += 1

c1 = c1.parent
c2 = c2.parent

for i in range(up):
lst.append('(..)')
lst.reverse()
return tuple(lst)

class build_property(object):
pass

class vsnode(object):
"""
Abstract class representing visual studio elements
We assume that all visual studio nodes have a uuid and a parent
"""
def __init__(self, ctx):
self.ctx = ctx # codelite context
self.name = '' # string, mandatory
self.vspath = '' # path in visual studio (name for dirs, absolute path for projects)
self.uuid = '' # string, mandatory
self.parent = None # parent node for visual studio nesting

def get_waf(self):
"""
Override in subclasses...
"""
return '%s/%s' % (self.ctx.srcnode.abspath(), getattr(self.ctx, 'waf_command', 'waf'))

def ptype(self):
"""
Return a special uuid for projects written in the solution file
"""
pass

def write(self):
"""
Write the project file, by default, do nothing
"""
pass

def make_uuid(self, val):
"""
Alias for creating uuid values easily (the templates cannot access global variables)
"""
return make_uuid(val)

class vsnode_vsdir(vsnode):
"""
Nodes representing visual studio folders (which do not match the filesystem tree!)
"""
VS_GUID_SOLUTIONFOLDER = "2150E333-8FDC-42A3-9474-1A3956D46DE8"
def __init__(self, ctx, uuid, name, vspath=''):
vsnode.__init__(self, ctx)
self.title = self.name = name
self.uuid = uuid
self.vspath = vspath or name

def ptype(self):
return self.VS_GUID_SOLUTIONFOLDER

class vsnode_project(vsnode):
"""
Abstract class representing visual studio project elements
A project is assumed to be writable, and has a node representing the file to write to
"""
VS_GUID_VCPROJ = "8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942"
def ptype(self):
return self.VS_GUID_VCPROJ

def __init__(self, ctx, node):
vsnode.__init__(self, ctx)
self.path = node
self.uuid = make_uuid(node.abspath())
self.name = node.name
self.title = self.path.abspath()
self.source = [] # list of node objects
self.build_properties = [] # list of properties (nmake commands, output dir, etc)

def dirs(self):
"""
Get the list of parent folders of the source files (header files included)
for writing the filters
"""
lst = []
def add(x):
if x.height() > self.tg.path.height() and x not in lst:
lst.append(x)
add(x.parent)
for x in self.source:
add(x.parent)
return lst

def write(self):
Logs.debug('codelite: creating %r', self.path)
#print "self.name:",self.name

# first write the project file
template1 = compile_template(PROJECT_TEMPLATE)
proj_str = template1(self)
proj_str = rm_blank_lines(proj_str)
self.path.stealth_write(proj_str)

# then write the filter
#template2 = compile_template(FILTER_TEMPLATE)
#filter_str = template2(self)
#filter_str = rm_blank_lines(filter_str)
#tmp = self.path.parent.make_node(self.path.name + '.filters')
#tmp.stealth_write(filter_str)

def get_key(self, node):
"""
required for writing the source files
"""
name = node.name
if name.endswith(('.cpp', '.c')):
return 'sourcefile'
return 'headerfile'

def collect_properties(self):
"""
Returns a list of triplet (configuration, platform, output_directory)
"""
ret = []
for c in self.ctx.configurations:
for p in self.ctx.platforms:
x = build_property()
x.outdir = ''

x.configuration = c
x.platform = p

x.preprocessor_definitions = ''
x.includes_search_path = ''

# can specify "deploy_dir" too
ret.append(x)
self.build_properties = ret

def get_build_params(self, props):
opt = ''
return (self.get_waf(), opt)

def get_build_command(self, props):
return "%s build %s" % self.get_build_params(props)

def get_clean_command(self, props):
return "%s clean %s" % self.get_build_params(props)

def get_rebuild_command(self, props):
return "%s clean build %s" % self.get_build_params(props)
def get_install_command(self, props):
return "%s install %s" % self.get_build_params(props)
def get_build_and_install_command(self, props):
return "%s build install %s" % self.get_build_params(props)
def get_build_and_install_all_command(self, props):
return "%s build install" % self.get_build_params(props)[0]
def get_clean_all_command(self, props):
return "%s clean" % self.get_build_params(props)[0]
def get_build_all_command(self, props):
return "%s build" % self.get_build_params(props)[0]
def get_rebuild_all_command(self, props):
return "%s clean build" % self.get_build_params(props)[0]

def get_filter_name(self, node):
lst = diff(node, self.tg.path)
return '\\'.join(lst) or '.'

class vsnode_alias(vsnode_project):
def __init__(self, ctx, node, name):
vsnode_project.__init__(self, ctx, node)
self.name = name
self.output_file = ''

class vsnode_build_all(vsnode_alias):
"""
Fake target used to emulate the behaviour of "make all" (starting one process by target is slow)
This is the only alias enabled by default
"""
def __init__(self, ctx, node, name='build_all_projects'):
vsnode_alias.__init__(self, ctx, node, name)
self.is_active = True

class vsnode_install_all(vsnode_alias):
"""
Fake target used to emulate the behaviour of "make install"
"""
def __init__(self, ctx, node, name='install_all_projects'):
vsnode_alias.__init__(self, ctx, node, name)

def get_build_command(self, props):
return "%s build install %s" % self.get_build_params(props)

def get_clean_command(self, props):
return "%s clean %s" % self.get_build_params(props)

def get_rebuild_command(self, props):
return "%s clean build install %s" % self.get_build_params(props)

class vsnode_project_view(vsnode_alias):
"""
Fake target used to emulate a file system view
"""
def __init__(self, ctx, node, name='project_view'):
vsnode_alias.__init__(self, ctx, node, name)
self.tg = self.ctx() # fake one, cannot remove
self.exclude_files = Node.exclude_regs + '''
waf-2*
waf3-2*/**
.waf-2*
.waf3-2*/**
**/*.sdf
**/*.suo
**/*.ncb
**/%s
''' % Options.lockfile

def collect_source(self):
# this is likely to be slow
self.source = self.ctx.srcnode.ant_glob('**', excl=self.exclude_files)

def get_build_command(self, props):
params = self.get_build_params(props) + (self.ctx.cmd,)
return "%s %s %s" % params

def get_clean_command(self, props):
return ""

def get_rebuild_command(self, props):
return self.get_build_command(props)

class vsnode_target(vsnode_project):
"""
CodeLite project representing a targets (programs, libraries, etc) and bound
to a task generator
"""
def __init__(self, ctx, tg):
"""
A project is more or less equivalent to a file/folder
"""
base = getattr(ctx, 'projects_dir', None) or tg.path
node = base.make_node(quote(tg.name) + ctx.project_extension) # the project file as a Node
vsnode_project.__init__(self, ctx, node)
self.name = quote(tg.name)
self.tg = tg # task generator

def get_build_params(self, props):
"""
Override the default to add the target name
"""
opt = ''
if getattr(self, 'tg', None):
opt += " --targets=%s" % self.tg.name
return (self.get_waf(), opt)

def collect_source(self):
tg = self.tg
source_files = tg.to_nodes(getattr(tg, 'source', []))
include_dirs = Utils.to_list(getattr(tg, 'codelite_includes', []))
include_files = []
for x in include_dirs:
if isinstance(x, str):
x = tg.path.find_node(x)
if x:
lst = [y for y in x.ant_glob(HEADERS_GLOB, flat=False)]
include_files.extend(lst)

# remove duplicates
self.source.extend(list(set(source_files + include_files)))
self.source.sort(key=lambda x: x.abspath())

def collect_properties(self):
"""
CodeLite projects are associated with platforms and configurations (for building especially)
"""
super(vsnode_target, self).collect_properties()
for x in self.build_properties:
x.outdir = self.path.parent.abspath()
x.preprocessor_definitions = ''
x.includes_search_path = ''

try:
tsk = self.tg.link_task
except AttributeError:
pass
else:
x.output_file = tsk.outputs[0].abspath()
x.preprocessor_definitions = ';'.join(tsk.env.DEFINES)
x.includes_search_path = ';'.join(self.tg.env.INCPATHS)

class codelite_generator(BuildContext):
'''generates a CodeLite workspace'''
cmd = 'codelite'
fun = 'build'

def init(self):
"""
Some data that needs to be present
"""
if not getattr(self, 'configurations', None):
self.configurations = ['Release'] # LocalRelease, RemoteDebug, etc
if not getattr(self, 'platforms', None):
self.platforms = ['Win32']
if not getattr(self, 'all_projects', None):
self.all_projects = []
if not getattr(self, 'project_extension', None):
self.project_extension = '.project'
if not getattr(self, 'projects_dir', None):
self.projects_dir = self.srcnode.make_node('')
self.projects_dir.mkdir()

# bind the classes to the object, so that subclass can provide custom generators
if not getattr(self, 'vsnode_vsdir', None):
self.vsnode_vsdir = vsnode_vsdir
if not getattr(self, 'vsnode_target', None):
self.vsnode_target = vsnode_target
if not getattr(self, 'vsnode_build_all', None):
self.vsnode_build_all = vsnode_build_all
if not getattr(self, 'vsnode_install_all', None):
self.vsnode_install_all = vsnode_install_all
if not getattr(self, 'vsnode_project_view', None):
self.vsnode_project_view = vsnode_project_view

self.numver = '11.00'
self.vsver = '2010'

def execute(self):
"""
Entry point
"""
self.restore()
if not self.all_envs:
self.load_envs()
self.recurse([self.run_dir])

# user initialization
self.init()

# two phases for creating the solution
self.collect_projects() # add project objects into "self.all_projects"
self.write_files() # write the corresponding project and solution files

def collect_projects(self):
"""
Fill the list self.all_projects with project objects
Fill the list of build targets
"""
self.collect_targets()
#self.add_aliases()
#self.collect_dirs()
default_project = getattr(self, 'default_project', None)
def sortfun(x):
if x.name == default_project:
return ''
return getattr(x, 'path', None) and x.path.abspath() or x.name
self.all_projects.sort(key=sortfun)

def write_files(self):
"""
Write the project and solution files from the data collected
so far. It is unlikely that you will want to change this
"""
for p in self.all_projects:
p.write()

# and finally write the solution file
node = self.get_solution_node()
node.parent.mkdir()
Logs.warn('Creating %r', node)
#a = dir(self.root)
#for b in a:
# print b
#print self.group_names
#print "Hallo2: ",self.root.listdir()
#print getattr(self, 'codelite_solution_name', None)
template1 = compile_template(SOLUTION_TEMPLATE)
sln_str = template1(self)
sln_str = rm_blank_lines(sln_str)
node.stealth_write(sln_str)

def get_solution_node(self):
"""
The solution filename is required when writing the .vcproj files
return self.solution_node and if it does not exist, make one
"""
try:
return self.solution_node
except:
pass

codelite_solution_name = getattr(self, 'codelite_solution_name', None)
if not codelite_solution_name:
codelite_solution_name = getattr(Context.g_module, Context.APPNAME, 'project') + '.workspace'
setattr(self, 'codelite_solution_name', codelite_solution_name)
if os.path.isabs(codelite_solution_name):
self.solution_node = self.root.make_node(codelite_solution_name)
else:
self.solution_node = self.srcnode.make_node(codelite_solution_name)
return self.solution_node

def project_configurations(self):
"""
Helper that returns all the pairs (config,platform)
"""
ret = []
for c in self.configurations:
for p in self.platforms:
ret.append((c, p))
return ret

def collect_targets(self):
"""
Process the list of task generators
"""
for g in self.groups:
for tg in g:
if not isinstance(tg, TaskGen.task_gen):
continue

if not hasattr(tg, 'codelite_includes'):
tg.codelite_includes = tg.to_list(getattr(tg, 'includes', [])) + tg.to_list(getattr(tg, 'export_includes', []))
tg.post()
if not getattr(tg, 'link_task', None):
continue

p = self.vsnode_target(self, tg)
p.collect_source() # delegate this processing
p.collect_properties()
self.all_projects.append(p)

def add_aliases(self):
"""
Add a specific target that emulates the "make all" necessary for Visual studio when pressing F7
We also add an alias for "make install" (disabled by default)
"""
base = getattr(self, 'projects_dir', None) or self.tg.path

node_project = base.make_node('build_all_projects' + self.project_extension) # Node
p_build = self.vsnode_build_all(self, node_project)
p_build.collect_properties()
self.all_projects.append(p_build)

node_project = base.make_node('install_all_projects' + self.project_extension) # Node
p_install = self.vsnode_install_all(self, node_project)
p_install.collect_properties()
self.all_projects.append(p_install)

node_project = base.make_node('project_view' + self.project_extension) # Node
p_view = self.vsnode_project_view(self, node_project)
p_view.collect_source()
p_view.collect_properties()
self.all_projects.append(p_view)

n = self.vsnode_vsdir(self, make_uuid(self.srcnode.abspath() + 'build_aliases'), "build_aliases")
p_build.parent = p_install.parent = p_view.parent = n
self.all_projects.append(n)

def collect_dirs(self):
"""
Create the folder structure in the CodeLite project view
"""
seen = {}
def make_parents(proj):
# look at a project, try to make a parent
if getattr(proj, 'parent', None):
# aliases already have parents
return
x = proj.iter_path
if x in seen:
proj.parent = seen[x]
return

# There is not vsnode_vsdir for x.
# So create a project representing the folder "x"
n = proj.parent = seen[x] = self.vsnode_vsdir(self, make_uuid(x.abspath()), x.name)
n.iter_path = x.parent
self.all_projects.append(n)

# recurse up to the project directory
if x.height() > self.srcnode.height() + 1:
make_parents(n)

for p in self.all_projects[:]: # iterate over a copy of all projects
if not getattr(p, 'tg', None):
# but only projects that have a task generator
continue

# make a folder for each task generator
p.iter_path = p.tg.path
make_parents(p)


+ 39
- 0
waflib/extras/color_gcc.py View File

@@ -0,0 +1,39 @@
#!/usr/bin/env python
# encoding: utf-8

# Replaces the default formatter by one which understands GCC output and colorizes it.

__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
__copyright__ = "Jérôme Carretero, 2012"

import sys
from waflib import Logs

class ColorGCCFormatter(Logs.formatter):
def __init__(self, colors):
self.colors = colors
Logs.formatter.__init__(self)
def format(self, rec):
frame = sys._getframe()
while frame:
func = frame.f_code.co_name
if func == 'exec_command':
cmd = frame.f_locals.get('cmd')
if isinstance(cmd, list) and ('gcc' in cmd[0] or 'g++' in cmd[0]):
lines = []
for line in rec.msg.splitlines():
if 'warning: ' in line:
lines.append(self.colors.YELLOW + line)
elif 'error: ' in line:
lines.append(self.colors.RED + line)
elif 'note: ' in line:
lines.append(self.colors.CYAN + line)
else:
lines.append(line)
rec.msg = "\n".join(lines)
frame = frame.f_back
return Logs.formatter.format(self, rec)

def options(opt):
Logs.log.handlers[0].setFormatter(ColorGCCFormatter(Logs.colors))


+ 51
- 0
waflib/extras/color_rvct.py View File

@@ -0,0 +1,51 @@
#!/usr/bin/env python
# encoding: utf-8

# Replaces the default formatter by one which understands RVCT output and colorizes it.

__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
__copyright__ = "Jérôme Carretero, 2012"

import sys
import atexit
from waflib import Logs

errors = []

def show_errors():
for i, e in enumerate(errors):
if i > 5:
break
print("Error: %s" % e)

atexit.register(show_errors)

class RcvtFormatter(Logs.formatter):
def __init__(self, colors):
Logs.formatter.__init__(self)
self.colors = colors
def format(self, rec):
frame = sys._getframe()
while frame:
func = frame.f_code.co_name
if func == 'exec_command':
cmd = frame.f_locals['cmd']
if isinstance(cmd, list) and ('armcc' in cmd[0] or 'armld' in cmd[0]):
lines = []
for line in rec.msg.splitlines():
if 'Warning: ' in line:
lines.append(self.colors.YELLOW + line)
elif 'Error: ' in line:
lines.append(self.colors.RED + line)
errors.append(line)
elif 'note: ' in line:
lines.append(self.colors.CYAN + line)
else:
lines.append(line)
rec.msg = "\n".join(lines)
frame = frame.f_back
return Logs.formatter.format(self, rec)

def options(opt):
Logs.log.handlers[0].setFormatter(RcvtFormatter(Logs.colors))


+ 406
- 0
waflib/extras/compat15.py View File

@@ -0,0 +1,406 @@
#! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2010 (ita)

"""
This file is provided to enable compatibility with waf 1.5
It was enabled by default in waf 1.6, but it is not used in waf 1.7
"""

import sys
from waflib import ConfigSet, Logs, Options, Scripting, Task, Build, Configure, Node, Runner, TaskGen, Utils, Errors, Context

# the following is to bring some compatibility with waf 1.5 "import waflib.Configure → import Configure"
sys.modules['Environment'] = ConfigSet
ConfigSet.Environment = ConfigSet.ConfigSet

sys.modules['Logs'] = Logs
sys.modules['Options'] = Options
sys.modules['Scripting'] = Scripting
sys.modules['Task'] = Task
sys.modules['Build'] = Build
sys.modules['Configure'] = Configure
sys.modules['Node'] = Node
sys.modules['Runner'] = Runner
sys.modules['TaskGen'] = TaskGen
sys.modules['Utils'] = Utils
sys.modules['Constants'] = Context
Context.SRCDIR = ''
Context.BLDDIR = ''

from waflib.Tools import c_preproc
sys.modules['preproc'] = c_preproc

from waflib.Tools import c_config
sys.modules['config_c'] = c_config

ConfigSet.ConfigSet.copy = ConfigSet.ConfigSet.derive
ConfigSet.ConfigSet.set_variant = Utils.nada

Utils.pproc = Utils.subprocess

Build.BuildContext.add_subdirs = Build.BuildContext.recurse
Build.BuildContext.new_task_gen = Build.BuildContext.__call__
Build.BuildContext.is_install = 0
Node.Node.relpath_gen = Node.Node.path_from

Utils.pproc = Utils.subprocess
Utils.get_term_cols = Logs.get_term_cols

def cmd_output(cmd, **kw):

silent = False
if 'silent' in kw:
silent = kw['silent']
del(kw['silent'])

if 'e' in kw:
tmp = kw['e']
del(kw['e'])
kw['env'] = tmp

kw['shell'] = isinstance(cmd, str)
kw['stdout'] = Utils.subprocess.PIPE
if silent:
kw['stderr'] = Utils.subprocess.PIPE

try:
p = Utils.subprocess.Popen(cmd, **kw)
output = p.communicate()[0]
except OSError as e:
raise ValueError(str(e))

if p.returncode:
if not silent:
msg = "command execution failed: %s -> %r" % (cmd, str(output))
raise ValueError(msg)
output = ''
return output
Utils.cmd_output = cmd_output

def name_to_obj(self, s, env=None):
if Logs.verbose:
Logs.warn('compat: change "name_to_obj(name, env)" by "get_tgen_by_name(name)"')
return self.get_tgen_by_name(s)
Build.BuildContext.name_to_obj = name_to_obj

def env_of_name(self, name):
try:
return self.all_envs[name]
except KeyError:
Logs.error('no such environment: '+name)
return None
Build.BuildContext.env_of_name = env_of_name


def set_env_name(self, name, env):
self.all_envs[name] = env
return env
Configure.ConfigurationContext.set_env_name = set_env_name

def retrieve(self, name, fromenv=None):
try:
env = self.all_envs[name]
except KeyError:
env = ConfigSet.ConfigSet()
self.prepare_env(env)
self.all_envs[name] = env
else:
if fromenv:
Logs.warn('The environment %s may have been configured already', name)
return env
Configure.ConfigurationContext.retrieve = retrieve

Configure.ConfigurationContext.sub_config = Configure.ConfigurationContext.recurse
Configure.ConfigurationContext.check_tool = Configure.ConfigurationContext.load
Configure.conftest = Configure.conf
Configure.ConfigurationError = Errors.ConfigurationError
Utils.WafError = Errors.WafError

Options.OptionsContext.sub_options = Options.OptionsContext.recurse
Options.OptionsContext.tool_options = Context.Context.load
Options.Handler = Options.OptionsContext

Task.simple_task_type = Task.task_type_from_func = Task.task_factory
Task.Task.classes = Task.classes

def setitem(self, key, value):
if key.startswith('CCFLAGS'):
key = key[1:]
self.table[key] = value
ConfigSet.ConfigSet.__setitem__ = setitem

@TaskGen.feature('d')
@TaskGen.before('apply_incpaths')
def old_importpaths(self):
if getattr(self, 'importpaths', []):
self.includes = self.importpaths

from waflib import Context
eld = Context.load_tool
def load_tool(*k, **kw):
ret = eld(*k, **kw)
if 'set_options' in ret.__dict__:
if Logs.verbose:
Logs.warn('compat: rename "set_options" to options')
ret.options = ret.set_options
if 'detect' in ret.__dict__:
if Logs.verbose:
Logs.warn('compat: rename "detect" to "configure"')
ret.configure = ret.detect
return ret
Context.load_tool = load_tool

def get_curdir(self):
return self.path.abspath()
Context.Context.curdir = property(get_curdir, Utils.nada)

def get_srcdir(self):
return self.srcnode.abspath()
Configure.ConfigurationContext.srcdir = property(get_srcdir, Utils.nada)

def get_blddir(self):
return self.bldnode.abspath()
Configure.ConfigurationContext.blddir = property(get_blddir, Utils.nada)

Configure.ConfigurationContext.check_message_1 = Configure.ConfigurationContext.start_msg
Configure.ConfigurationContext.check_message_2 = Configure.ConfigurationContext.end_msg

rev = Context.load_module
def load_module(path, encoding=None):
ret = rev(path, encoding)
if 'set_options' in ret.__dict__:
if Logs.verbose:
Logs.warn('compat: rename "set_options" to "options" (%r)', path)
ret.options = ret.set_options
if 'srcdir' in ret.__dict__:
if Logs.verbose:
Logs.warn('compat: rename "srcdir" to "top" (%r)', path)
ret.top = ret.srcdir
if 'blddir' in ret.__dict__:
if Logs.verbose:
Logs.warn('compat: rename "blddir" to "out" (%r)', path)
ret.out = ret.blddir
Utils.g_module = Context.g_module
Options.launch_dir = Context.launch_dir
return ret
Context.load_module = load_module

old_post = TaskGen.task_gen.post
def post(self):
self.features = self.to_list(self.features)
if 'cc' in self.features:
if Logs.verbose:
Logs.warn('compat: the feature cc does not exist anymore (use "c")')
self.features.remove('cc')
self.features.append('c')
if 'cstaticlib' in self.features:
if Logs.verbose:
Logs.warn('compat: the feature cstaticlib does not exist anymore (use "cstlib" or "cxxstlib")')
self.features.remove('cstaticlib')
self.features.append(('cxx' in self.features) and 'cxxstlib' or 'cstlib')
if getattr(self, 'ccflags', None):
if Logs.verbose:
Logs.warn('compat: "ccflags" was renamed to "cflags"')
self.cflags = self.ccflags
return old_post(self)
TaskGen.task_gen.post = post

def waf_version(*k, **kw):
Logs.warn('wrong version (waf_version was removed in waf 1.6)')
Utils.waf_version = waf_version


import os
@TaskGen.feature('c', 'cxx', 'd')
@TaskGen.before('apply_incpaths', 'propagate_uselib_vars')
@TaskGen.after('apply_link', 'process_source')
def apply_uselib_local(self):
"""
process the uselib_local attribute
execute after apply_link because of the execution order set on 'link_task'
"""
env = self.env
from waflib.Tools.ccroot import stlink_task

# 1. the case of the libs defined in the project (visit ancestors first)
# the ancestors external libraries (uselib) will be prepended
self.uselib = self.to_list(getattr(self, 'uselib', []))
self.includes = self.to_list(getattr(self, 'includes', []))
names = self.to_list(getattr(self, 'uselib_local', []))
get = self.bld.get_tgen_by_name
seen = set()
seen_uselib = set()
tmp = Utils.deque(names) # consume a copy of the list of names
if tmp:
if Logs.verbose:
Logs.warn('compat: "uselib_local" is deprecated, replace by "use"')
while tmp:
lib_name = tmp.popleft()
# visit dependencies only once
if lib_name in seen:
continue

y = get(lib_name)
y.post()
seen.add(lib_name)

# object has ancestors to process (shared libraries): add them to the end of the list
if getattr(y, 'uselib_local', None):
for x in self.to_list(getattr(y, 'uselib_local', [])):
obj = get(x)
obj.post()
if getattr(obj, 'link_task', None):
if not isinstance(obj.link_task, stlink_task):
tmp.append(x)

# link task and flags
if getattr(y, 'link_task', None):

link_name = y.target[y.target.rfind(os.sep) + 1:]
if isinstance(y.link_task, stlink_task):
env.append_value('STLIB', [link_name])
else:
# some linkers can link against programs
env.append_value('LIB', [link_name])

# the order
self.link_task.set_run_after(y.link_task)

# for the recompilation
self.link_task.dep_nodes += y.link_task.outputs

# add the link path too
tmp_path = y.link_task.outputs[0].parent.bldpath()
if not tmp_path in env['LIBPATH']:
env.prepend_value('LIBPATH', [tmp_path])

# add ancestors uselib too - but only propagate those that have no staticlib defined
for v in self.to_list(getattr(y, 'uselib', [])):
if v not in seen_uselib:
seen_uselib.add(v)
if not env['STLIB_' + v]:
if not v in self.uselib:
self.uselib.insert(0, v)

# if the library task generator provides 'export_includes', add to the include path
# the export_includes must be a list of paths relative to the other library
if getattr(y, 'export_includes', None):
self.includes.extend(y.to_incnodes(y.export_includes))

@TaskGen.feature('cprogram', 'cxxprogram', 'cstlib', 'cxxstlib', 'cshlib', 'cxxshlib', 'dprogram', 'dstlib', 'dshlib')
@TaskGen.after('apply_link')
def apply_objdeps(self):
"add the .o files produced by some other object files in the same manner as uselib_local"
names = getattr(self, 'add_objects', [])
if not names:
return
names = self.to_list(names)

get = self.bld.get_tgen_by_name
seen = []
while names:
x = names[0]

# visit dependencies only once
if x in seen:
names = names[1:]
continue

# object does not exist ?
y = get(x)

# object has ancestors to process first ? update the list of names
if getattr(y, 'add_objects', None):
added = 0
lst = y.to_list(y.add_objects)
lst.reverse()
for u in lst:
if u in seen:
continue
added = 1
names = [u]+names
if added:
continue # list of names modified, loop

# safe to process the current object
y.post()
seen.append(x)

for t in getattr(y, 'compiled_tasks', []):
self.link_task.inputs.extend(t.outputs)

@TaskGen.after('apply_link')
def process_obj_files(self):
if not hasattr(self, 'obj_files'):
return
for x in self.obj_files:
node = self.path.find_resource(x)
self.link_task.inputs.append(node)

@TaskGen.taskgen_method
def add_obj_file(self, file):
"""Small example on how to link object files as if they were source
obj = bld.create_obj('cc')
obj.add_obj_file('foo.o')"""
if not hasattr(self, 'obj_files'):
self.obj_files = []
if not 'process_obj_files' in self.meths:
self.meths.append('process_obj_files')
self.obj_files.append(file)


old_define = Configure.ConfigurationContext.__dict__['define']

@Configure.conf
def define(self, key, val, quote=True, comment=''):
old_define(self, key, val, quote, comment)
if key.startswith('HAVE_'):
self.env[key] = 1

old_undefine = Configure.ConfigurationContext.__dict__['undefine']

@Configure.conf
def undefine(self, key, comment=''):
old_undefine(self, key, comment)
if key.startswith('HAVE_'):
self.env[key] = 0

# some people might want to use export_incdirs, but it was renamed
def set_incdirs(self, val):
Logs.warn('compat: change "export_incdirs" by "export_includes"')
self.export_includes = val
TaskGen.task_gen.export_incdirs = property(None, set_incdirs)

def install_dir(self, path):
if not path:
return []

destpath = Utils.subst_vars(path, self.env)

if self.is_install > 0:
Logs.info('* creating %s', destpath)
Utils.check_dir(destpath)
elif self.is_install < 0:
Logs.info('* removing %s', destpath)
try:
os.remove(destpath)
except OSError:
pass
Build.BuildContext.install_dir = install_dir

# before/after names
repl = {'apply_core': 'process_source',
'apply_lib_vars': 'process_source',
'apply_obj_vars': 'propagate_uselib_vars',
'exec_rule': 'process_rule'
}
def after(*k):
k = [repl.get(key, key) for key in k]
return TaskGen.after_method(*k)

def before(*k):
k = [repl.get(key, key) for key in k]
return TaskGen.before_method(*k)
TaskGen.before = before


+ 591
- 0
waflib/extras/cppcheck.py View File

@@ -0,0 +1,591 @@
#! /usr/bin/env python
# -*- encoding: utf-8 -*-
# Michel Mooij, michel.mooij7@gmail.com

"""
Tool Description
================
This module provides a waf wrapper (i.e. waftool) around the C/C++ source code
checking tool 'cppcheck'.

See http://cppcheck.sourceforge.net/ for more information on the cppcheck tool
itself.
Note that many linux distributions already provide a ready to install version
of cppcheck. On fedora, for instance, it can be installed using yum:

'sudo yum install cppcheck'


Usage
=====
In order to use this waftool simply add it to the 'options' and 'configure'
functions of your main waf script as shown in the example below:

def options(opt):
opt.load('cppcheck', tooldir='./waftools')

def configure(conf):
conf.load('cppcheck')

Note that example shown above assumes that the cppcheck waftool is located in
the sub directory named 'waftools'.

When configured as shown in the example above, cppcheck will automatically
perform a source code analysis on all C/C++ build tasks that have been
defined in your waf build system.

The example shown below for a C program will be used as input for cppcheck when
building the task.

def build(bld):
bld.program(name='foo', src='foobar.c')

The result of the source code analysis will be stored both as xml and html
files in the build location for the task. Should any error be detected by
cppcheck the build will be aborted and a link to the html report will be shown.
By default, one index.html file is created for each task generator. A global
index.html file can be obtained by setting the following variable
in the configuration section:

conf.env.CPPCHECK_SINGLE_HTML = False

When needed source code checking by cppcheck can be disabled per task, per
detected error or warning for a particular task. It can be also be disabled for
all tasks.

In order to exclude a task from source code checking add the skip option to the
task as shown below:

def build(bld):
bld.program(
name='foo',
src='foobar.c'
cppcheck_skip=True
)

When needed problems detected by cppcheck may be suppressed using a file
containing a list of suppression rules. The relative or absolute path to this
file can be added to the build task as shown in the example below:

bld.program(
name='bar',
src='foobar.c',
cppcheck_suppress='bar.suppress'
)

A cppcheck suppress file should contain one suppress rule per line. Each of
these rules will be passed as an '--suppress=<rule>' argument to cppcheck.

Dependencies
================
This waftool depends on the python pygments module, it is used for source code
syntax highlighting when creating the html reports. see http://pygments.org/ for
more information on this package.

Remarks
================
The generation of the html report is originally based on the cppcheck-htmlreport.py
script that comes shipped with the cppcheck tool.
"""

import sys
import xml.etree.ElementTree as ElementTree
from waflib import Task, TaskGen, Logs, Context, Options

PYGMENTS_EXC_MSG= '''
The required module 'pygments' could not be found. Please install it using your
platform package manager (e.g. apt-get or yum), using 'pip' or 'easy_install',
see 'http://pygments.org/download/' for installation instructions.
'''

try:
import pygments
from pygments import formatters, lexers
except ImportError as e:
Logs.warn(PYGMENTS_EXC_MSG)
raise e


def options(opt):
opt.add_option('--cppcheck-skip', dest='cppcheck_skip',
default=False, action='store_true',
help='do not check C/C++ sources (default=False)')

opt.add_option('--cppcheck-err-resume', dest='cppcheck_err_resume',
default=False, action='store_true',
help='continue in case of errors (default=False)')

opt.add_option('--cppcheck-bin-enable', dest='cppcheck_bin_enable',
default='warning,performance,portability,style,unusedFunction', action='store',
help="cppcheck option '--enable=' for binaries (default=warning,performance,portability,style,unusedFunction)")

opt.add_option('--cppcheck-lib-enable', dest='cppcheck_lib_enable',
default='warning,performance,portability,style', action='store',
help="cppcheck option '--enable=' for libraries (default=warning,performance,portability,style)")

opt.add_option('--cppcheck-std-c', dest='cppcheck_std_c',
default='c99', action='store',
help='cppcheck standard to use when checking C (default=c99)')

opt.add_option('--cppcheck-std-cxx', dest='cppcheck_std_cxx',
default='c++03', action='store',
help='cppcheck standard to use when checking C++ (default=c++03)')

opt.add_option('--cppcheck-check-config', dest='cppcheck_check_config',
default=False, action='store_true',
help='forced check for missing buildin include files, e.g. stdio.h (default=False)')

opt.add_option('--cppcheck-max-configs', dest='cppcheck_max_configs',
default='20', action='store',
help='maximum preprocessor (--max-configs) define iterations (default=20)')

opt.add_option('--cppcheck-jobs', dest='cppcheck_jobs',
default='1', action='store',
help='number of jobs (-j) to do the checking work (default=1)')

def configure(conf):
if conf.options.cppcheck_skip:
conf.env.CPPCHECK_SKIP = [True]
conf.env.CPPCHECK_STD_C = conf.options.cppcheck_std_c
conf.env.CPPCHECK_STD_CXX = conf.options.cppcheck_std_cxx
conf.env.CPPCHECK_MAX_CONFIGS = conf.options.cppcheck_max_configs
conf.env.CPPCHECK_BIN_ENABLE = conf.options.cppcheck_bin_enable
conf.env.CPPCHECK_LIB_ENABLE = conf.options.cppcheck_lib_enable
conf.env.CPPCHECK_JOBS = conf.options.cppcheck_jobs
if conf.options.cppcheck_jobs != '1' and ('unusedFunction' in conf.options.cppcheck_bin_enable or 'unusedFunction' in conf.options.cppcheck_lib_enable or 'all' in conf.options.cppcheck_bin_enable or 'all' in conf.options.cppcheck_lib_enable):
Logs.warn('cppcheck: unusedFunction cannot be used with multiple threads, cppcheck will disable it automatically')
conf.find_program('cppcheck', var='CPPCHECK')

# set to True to get a single index.html file
conf.env.CPPCHECK_SINGLE_HTML = False

@TaskGen.feature('c')
@TaskGen.feature('cxx')
def cppcheck_execute(self):
if hasattr(self.bld, 'conf'):
return
if len(self.env.CPPCHECK_SKIP) or Options.options.cppcheck_skip:
return
if getattr(self, 'cppcheck_skip', False):
return
task = self.create_task('cppcheck')
task.cmd = _tgen_create_cmd(self)
task.fatal = []
if not Options.options.cppcheck_err_resume:
task.fatal.append('error')


def _tgen_create_cmd(self):
features = getattr(self, 'features', [])
std_c = self.env.CPPCHECK_STD_C
std_cxx = self.env.CPPCHECK_STD_CXX
max_configs = self.env.CPPCHECK_MAX_CONFIGS
bin_enable = self.env.CPPCHECK_BIN_ENABLE
lib_enable = self.env.CPPCHECK_LIB_ENABLE
jobs = self.env.CPPCHECK_JOBS

cmd = self.env.CPPCHECK
args = ['--inconclusive','--report-progress','--verbose','--xml','--xml-version=2']
args.append('--max-configs=%s' % max_configs)
args.append('-j %s' % jobs)

if 'cxx' in features:
args.append('--language=c++')
args.append('--std=%s' % std_cxx)
else:
args.append('--language=c')
args.append('--std=%s' % std_c)

if Options.options.cppcheck_check_config:
args.append('--check-config')

if set(['cprogram','cxxprogram']) & set(features):
args.append('--enable=%s' % bin_enable)
else:
args.append('--enable=%s' % lib_enable)

for src in self.to_list(getattr(self, 'source', [])):
if not isinstance(src, str):
src = repr(src)
args.append(src)
for inc in self.to_incnodes(self.to_list(getattr(self, 'includes', []))):
if not isinstance(inc, str):
inc = repr(inc)
args.append('-I%s' % inc)
for inc in self.to_incnodes(self.to_list(self.env.INCLUDES)):
if not isinstance(inc, str):
inc = repr(inc)
args.append('-I%s' % inc)
return cmd + args


class cppcheck(Task.Task):
quiet = True

def run(self):
stderr = self.generator.bld.cmd_and_log(self.cmd, quiet=Context.STDERR, output=Context.STDERR)
self._save_xml_report(stderr)
defects = self._get_defects(stderr)
index = self._create_html_report(defects)
self._errors_evaluate(defects, index)
return 0

def _save_xml_report(self, s):
'''use cppcheck xml result string, add the command string used to invoke cppcheck
and save as xml file.
'''
header = '%s\n' % s.splitlines()[0]
root = ElementTree.fromstring(s)
cmd = ElementTree.SubElement(root.find('cppcheck'), 'cmd')
cmd.text = str(self.cmd)
body = ElementTree.tostring(root).decode('us-ascii')
body_html_name = 'cppcheck-%s.xml' % self.generator.get_name()
if self.env.CPPCHECK_SINGLE_HTML:
body_html_name = 'cppcheck.xml'
node = self.generator.path.get_bld().find_or_declare(body_html_name)
node.write(header + body)

def _get_defects(self, xml_string):
'''evaluate the xml string returned by cppcheck (on sdterr) and use it to create
a list of defects.
'''
defects = []
for error in ElementTree.fromstring(xml_string).iter('error'):
defect = {}
defect['id'] = error.get('id')
defect['severity'] = error.get('severity')
defect['msg'] = str(error.get('msg')).replace('<','&lt;')
defect['verbose'] = error.get('verbose')
for location in error.findall('location'):
defect['file'] = location.get('file')
defect['line'] = str(int(location.get('line')) - 1)
defects.append(defect)
return defects

def _create_html_report(self, defects):
files, css_style_defs = self._create_html_files(defects)
index = self._create_html_index(files)
self._create_css_file(css_style_defs)
return index

def _create_html_files(self, defects):
sources = {}
defects = [defect for defect in defects if 'file' in defect]
for defect in defects:
name = defect['file']
if not name in sources:
sources[name] = [defect]
else:
sources[name].append(defect)

files = {}
css_style_defs = None
bpath = self.generator.path.get_bld().abspath()
names = list(sources.keys())
for i in range(0,len(names)):
name = names[i]
if self.env.CPPCHECK_SINGLE_HTML:
htmlfile = 'cppcheck/%i.html' % (i)
else:
htmlfile = 'cppcheck/%s%i.html' % (self.generator.get_name(),i)
errors = sources[name]
files[name] = { 'htmlfile': '%s/%s' % (bpath, htmlfile), 'errors': errors }
css_style_defs = self._create_html_file(name, htmlfile, errors)
return files, css_style_defs

def _create_html_file(self, sourcefile, htmlfile, errors):
name = self.generator.get_name()
root = ElementTree.fromstring(CPPCHECK_HTML_FILE)
title = root.find('head/title')
title.text = 'cppcheck - report - %s' % name

body = root.find('body')
for div in body.findall('div'):
if div.get('id') == 'page':
page = div
break
for div in page.findall('div'):
if div.get('id') == 'header':
h1 = div.find('h1')
h1.text = 'cppcheck report - %s' % name
if div.get('id') == 'menu':
indexlink = div.find('a')
if self.env.CPPCHECK_SINGLE_HTML:
indexlink.attrib['href'] = 'index.html'
else:
indexlink.attrib['href'] = 'index-%s.html' % name
if div.get('id') == 'content':
content = div
srcnode = self.generator.bld.root.find_node(sourcefile)
hl_lines = [e['line'] for e in errors if 'line' in e]
formatter = CppcheckHtmlFormatter(linenos=True, style='colorful', hl_lines=hl_lines, lineanchors='line')
formatter.errors = [e for e in errors if 'line' in e]
css_style_defs = formatter.get_style_defs('.highlight')
lexer = pygments.lexers.guess_lexer_for_filename(sourcefile, "")
s = pygments.highlight(srcnode.read(), lexer, formatter)
table = ElementTree.fromstring(s)
content.append(table)

s = ElementTree.tostring(root, method='html').decode('us-ascii')
s = CCPCHECK_HTML_TYPE + s
node = self.generator.path.get_bld().find_or_declare(htmlfile)
node.write(s)
return css_style_defs

def _create_html_index(self, files):
name = self.generator.get_name()
root = ElementTree.fromstring(CPPCHECK_HTML_FILE)
title = root.find('head/title')
title.text = 'cppcheck - report - %s' % name

body = root.find('body')
for div in body.findall('div'):
if div.get('id') == 'page':
page = div
break
for div in page.findall('div'):
if div.get('id') == 'header':
h1 = div.find('h1')
h1.text = 'cppcheck report - %s' % name
if div.get('id') == 'content':
content = div
self._create_html_table(content, files)
if div.get('id') == 'menu':
indexlink = div.find('a')
if self.env.CPPCHECK_SINGLE_HTML:
indexlink.attrib['href'] = 'index.html'
else:
indexlink.attrib['href'] = 'index-%s.html' % name

s = ElementTree.tostring(root, method='html').decode('us-ascii')
s = CCPCHECK_HTML_TYPE + s
index_html_name = 'cppcheck/index-%s.html' % name
if self.env.CPPCHECK_SINGLE_HTML:
index_html_name = 'cppcheck/index.html'
node = self.generator.path.get_bld().find_or_declare(index_html_name)
node.write(s)
return node

def _create_html_table(self, content, files):
table = ElementTree.fromstring(CPPCHECK_HTML_TABLE)
for name, val in files.items():
f = val['htmlfile']
s = '<tr><td colspan="4"><a href="%s">%s</a></td></tr>\n' % (f,name)
row = ElementTree.fromstring(s)
table.append(row)

errors = sorted(val['errors'], key=lambda e: int(e['line']) if 'line' in e else sys.maxint)
for e in errors:
if not 'line' in e:
s = '<tr><td></td><td>%s</td><td>%s</td><td>%s</td></tr>\n' % (e['id'], e['severity'], e['msg'])
else:
attr = ''
if e['severity'] == 'error':
attr = 'class="error"'
s = '<tr><td><a href="%s#line-%s">%s</a></td>' % (f, e['line'], e['line'])
s+= '<td>%s</td><td>%s</td><td %s>%s</td></tr>\n' % (e['id'], e['severity'], attr, e['msg'])
row = ElementTree.fromstring(s)
table.append(row)
content.append(table)

def _create_css_file(self, css_style_defs):
css = str(CPPCHECK_CSS_FILE)
if css_style_defs:
css = "%s\n%s\n" % (css, css_style_defs)
node = self.generator.path.get_bld().find_or_declare('cppcheck/style.css')
node.write(css)

def _errors_evaluate(self, errors, http_index):
name = self.generator.get_name()
fatal = self.fatal
severity = [err['severity'] for err in errors]
problems = [err for err in errors if err['severity'] != 'information']

if set(fatal) & set(severity):
exc = "\n"
exc += "\nccpcheck detected fatal error(s) in task '%s', see report for details:" % name
exc += "\n file://%r" % (http_index)
exc += "\n"
self.generator.bld.fatal(exc)

elif len(problems):
msg = "\nccpcheck detected (possible) problem(s) in task '%s', see report for details:" % name
msg += "\n file://%r" % http_index
msg += "\n"
Logs.error(msg)


class CppcheckHtmlFormatter(pygments.formatters.HtmlFormatter):
errors = []

def wrap(self, source, outfile):
line_no = 1
for i, t in super(CppcheckHtmlFormatter, self).wrap(source, outfile):
# If this is a source code line we want to add a span tag at the end.
if i == 1:
for error in self.errors:
if int(error['line']) == line_no:
t = t.replace('\n', CPPCHECK_HTML_ERROR % error['msg'])
line_no += 1
yield i, t


CCPCHECK_HTML_TYPE = \
'<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">\n'

CPPCHECK_HTML_FILE = """
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd" [<!ENTITY nbsp "&#160;">]>
<html>
<head>
<title>cppcheck - report - XXX</title>
<link href="style.css" rel="stylesheet" type="text/css" />
<style type="text/css">
</style>
</head>
<body class="body">
<div id="page-header">&nbsp;</div>
<div id="page">
<div id="header">
<h1>cppcheck report - XXX</h1>
</div>
<div id="menu">
<a href="index.html">Defect list</a>
</div>
<div id="content">
</div>
<div id="footer">
<div>cppcheck - a tool for static C/C++ code analysis</div>
<div>
Internet: <a href="http://cppcheck.sourceforge.net">http://cppcheck.sourceforge.net</a><br/>
Forum: <a href="http://apps.sourceforge.net/phpbb/cppcheck/">http://apps.sourceforge.net/phpbb/cppcheck/</a><br/>
IRC: #cppcheck at irc.freenode.net
</div>
&nbsp;
</div>
&nbsp;
</div>
<div id="page-footer">&nbsp;</div>
</body>
</html>
"""

CPPCHECK_HTML_TABLE = """
<table>
<tr>
<th>Line</th>
<th>Id</th>
<th>Severity</th>
<th>Message</th>
</tr>
</table>
"""

CPPCHECK_HTML_ERROR = \
'<span style="background: #ffaaaa;padding: 3px;">&lt;--- %s</span>\n'

CPPCHECK_CSS_FILE = """
body.body {
font-family: Arial;
font-size: 13px;
background-color: black;
padding: 0px;
margin: 0px;
}

.error {
font-family: Arial;
font-size: 13px;
background-color: #ffb7b7;
padding: 0px;
margin: 0px;
}

th, td {
min-width: 100px;
text-align: left;
}

#page-header {
clear: both;
width: 1200px;
margin: 20px auto 0px auto;
height: 10px;
border-bottom-width: 2px;
border-bottom-style: solid;
border-bottom-color: #aaaaaa;
}

#page {
width: 1160px;
margin: auto;
border-left-width: 2px;
border-left-style: solid;
border-left-color: #aaaaaa;
border-right-width: 2px;
border-right-style: solid;
border-right-color: #aaaaaa;
background-color: White;
padding: 20px;
}

#page-footer {
clear: both;
width: 1200px;
margin: auto;
height: 10px;
border-top-width: 2px;
border-top-style: solid;
border-top-color: #aaaaaa;
}

#header {
width: 100%;
height: 70px;
background-image: url(logo.png);
background-repeat: no-repeat;
background-position: left top;
border-bottom-style: solid;
border-bottom-width: thin;
border-bottom-color: #aaaaaa;
}

#menu {
margin-top: 5px;
text-align: left;
float: left;
width: 100px;
height: 300px;
}

#menu > a {
margin-left: 10px;
display: block;
}

#content {
float: left;
width: 1020px;
margin: 5px;
padding: 0px 10px 10px 10px;
border-left-style: solid;
border-left-width: thin;
border-left-color: #aaaaaa;
}

#footer {
padding-bottom: 5px;
padding-top: 5px;
border-top-style: solid;
border-top-width: thin;
border-top-color: #aaaaaa;
clear: both;
font-size: 10px;
}

#footer > div {
float: left;
width: 33%;
}

"""


+ 209
- 0
waflib/extras/cpplint.py View File

@@ -0,0 +1,209 @@
#! /usr/bin/env python
# encoding: utf-8
#
# written by Sylvain Rouquette, 2014

'''

This is an extra tool, not bundled with the default waf binary.
To add the cpplint tool to the waf file:
$ ./waf-light --tools=compat15,cpplint

this tool also requires cpplint for python.
If you have PIP, you can install it like this: pip install cpplint

When using this tool, the wscript will look like:

def options(opt):
opt.load('compiler_cxx cpplint')

def configure(conf):
conf.load('compiler_cxx cpplint')
# optional, you can also specify them on the command line
conf.env.CPPLINT_FILTERS = ','.join((
'-whitespace/newline', # c++11 lambda
'-readability/braces', # c++11 constructor
'-whitespace/braces', # c++11 constructor
'-build/storage_class', # c++11 for-range
'-whitespace/blank_line', # user pref
'-whitespace/labels' # user pref
))

def build(bld):
bld(features='cpplint', source='main.cpp', target='app')
# add include files, because they aren't usually built
bld(features='cpplint', source=bld.path.ant_glob('**/*.hpp'))
'''

from __future__ import absolute_import
import sys, re
import logging
from waflib import Errors, Task, TaskGen, Logs, Options, Node, Utils


critical_errors = 0
CPPLINT_FORMAT = '[CPPLINT] %(filename)s:\nline %(linenum)s, severity %(confidence)s, category: %(category)s\n%(message)s\n'
RE_EMACS = re.compile('(?P<filename>.*):(?P<linenum>\d+): (?P<message>.*) \[(?P<category>.*)\] \[(?P<confidence>\d+)\]')
CPPLINT_RE = {
'waf': RE_EMACS,
'emacs': RE_EMACS,
'vs7': re.compile('(?P<filename>.*)\((?P<linenum>\d+)\): (?P<message>.*) \[(?P<category>.*)\] \[(?P<confidence>\d+)\]'),
'eclipse': re.compile('(?P<filename>.*):(?P<linenum>\d+): warning: (?P<message>.*) \[(?P<category>.*)\] \[(?P<confidence>\d+)\]'),
}
CPPLINT_STR = ('${CPPLINT} '
'--verbose=${CPPLINT_LEVEL} '
'--output=${CPPLINT_OUTPUT} '
'--filter=${CPPLINT_FILTERS} '
'--root=${CPPLINT_ROOT} '
'--linelength=${CPPLINT_LINE_LENGTH} ')


def options(opt):
opt.add_option('--cpplint-filters', type='string',
default='', dest='CPPLINT_FILTERS',
help='add filters to cpplint')
opt.add_option('--cpplint-length', type='int',
default=80, dest='CPPLINT_LINE_LENGTH',
help='specify the line length (default: 80)')
opt.add_option('--cpplint-level', default=1, type='int', dest='CPPLINT_LEVEL',
help='specify the log level (default: 1)')
opt.add_option('--cpplint-break', default=5, type='int', dest='CPPLINT_BREAK',
help='break the build if error >= level (default: 5)')
opt.add_option('--cpplint-root', type='string',
default='', dest='CPPLINT_ROOT',
help='root directory used to derive header guard')
opt.add_option('--cpplint-skip', action='store_true',
default=False, dest='CPPLINT_SKIP',
help='skip cpplint during build')
opt.add_option('--cpplint-output', type='string',
default='waf', dest='CPPLINT_OUTPUT',
help='select output format (waf, emacs, vs7, eclipse)')


def configure(conf):
try:
conf.find_program('cpplint', var='CPPLINT')
except Errors.ConfigurationError:
conf.env.CPPLINT_SKIP = True


class cpplint_formatter(Logs.formatter, object):
def __init__(self, fmt):
logging.Formatter.__init__(self, CPPLINT_FORMAT)
self.fmt = fmt

def format(self, rec):
if self.fmt == 'waf':
result = CPPLINT_RE[self.fmt].match(rec.msg).groupdict()
rec.msg = CPPLINT_FORMAT % result
if rec.levelno <= logging.INFO:
rec.c1 = Logs.colors.CYAN
return super(cpplint_formatter, self).format(rec)


class cpplint_handler(Logs.log_handler, object):
def __init__(self, stream=sys.stderr, **kw):
super(cpplint_handler, self).__init__(stream, **kw)
self.stream = stream

def emit(self, rec):
rec.stream = self.stream
self.emit_override(rec)
self.flush()


class cpplint_wrapper(object):
def __init__(self, logger, threshold, fmt):
self.logger = logger
self.threshold = threshold
self.fmt = fmt

def __enter__(self):
return self

def __exit__(self, exc_type, exc_value, traceback):
if isinstance(exc_value, Utils.subprocess.CalledProcessError):
messages = [m for m in exc_value.output.splitlines()
if 'Done processing' not in m
and 'Total errors found' not in m]
for message in messages:
self.write(message)
return True

def write(self, message):
global critical_errors
result = CPPLINT_RE[self.fmt].match(message)
if not result:
return
level = int(result.groupdict()['confidence'])
if level >= self.threshold:
critical_errors += 1
if level <= 2:
self.logger.info(message)
elif level <= 4:
self.logger.warning(message)
else:
self.logger.error(message)


cpplint_logger = None
def get_cpplint_logger(fmt):
global cpplint_logger
if cpplint_logger:
return cpplint_logger
cpplint_logger = logging.getLogger('cpplint')
hdlr = cpplint_handler()
hdlr.setFormatter(cpplint_formatter(fmt))
cpplint_logger.addHandler(hdlr)
cpplint_logger.setLevel(logging.DEBUG)
return cpplint_logger


class cpplint(Task.Task):
color = 'PINK'

def __init__(self, *k, **kw):
super(cpplint, self).__init__(*k, **kw)

def run(self):
global critical_errors
with cpplint_wrapper(get_cpplint_logger(self.env.CPPLINT_OUTPUT), self.env.CPPLINT_BREAK, self.env.CPPLINT_OUTPUT):
params = {key: str(self.env[key]) for key in self.env if 'CPPLINT_' in key}
if params['CPPLINT_OUTPUT'] is 'waf':
params['CPPLINT_OUTPUT'] = 'emacs'
params['CPPLINT'] = self.env.get_flat('CPPLINT')
cmd = Utils.subst_vars(CPPLINT_STR, params)
env = self.env.env or None
Utils.subprocess.check_output(cmd + self.inputs[0].abspath(),
stderr=Utils.subprocess.STDOUT,
env=env, shell=True)
return critical_errors

@TaskGen.extension('.h', '.hh', '.hpp', '.hxx')
def cpplint_includes(self, node):
pass

@TaskGen.feature('cpplint')
@TaskGen.before_method('process_source')
def post_cpplint(self):
if not self.env.CPPLINT_INITIALIZED:
for key, value in Options.options.__dict__.items():
if not key.startswith('CPPLINT_') or self.env[key]:
continue
self.env[key] = value
self.env.CPPLINT_INITIALIZED = True

if self.env.CPPLINT_SKIP:
return

if not self.env.CPPLINT_OUTPUT in CPPLINT_RE:
return

for src in self.to_list(getattr(self, 'source', [])):
if isinstance(src, Node.Node):
node = src
else:
node = self.path.find_or_declare(src)
if not node:
self.bld.fatal('Could not find %r' % src)
self.create_task('cpplint', node)

+ 227
- 0
waflib/extras/cross_gnu.py View File

@@ -0,0 +1,227 @@
#!/usr/bin/python
# -*- coding: utf-8 vi:ts=4:noexpandtab
# Tool to provide dedicated variables for cross-compilation

__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
__copyright__ = "Jérôme Carretero, 2014"

"""
This tool allows to use environment variables to define cross-compilation
variables intended for build variants.

The variables are obtained from the environment in 3 ways:

1. By defining CHOST, they can be derived as ${CHOST}-${TOOL}
2. By defining HOST_x
3. By defining ${CHOST//-/_}_x

else one can set ``cfg.env.CHOST`` in ``wscript`` before loading ``cross_gnu``.

Usage:

- In your build script::

def configure(cfg):
...
for variant in x_variants:
setenv(variant)
conf.load('cross_gnu')
conf.xcheck_host_var('POUET')
...


- Then::

CHOST=arm-hardfloat-linux-gnueabi waf configure
env arm-hardfloat-linux-gnueabi-CC="clang -..." waf configure
CFLAGS=... CHOST=arm-hardfloat-linux-gnueabi HOST_CFLAGS=-g waf configure
HOST_CC="clang -..." waf configure

This example ``wscript`` compiles to Microchip PIC (xc16-gcc-xyz must be in PATH):

.. code:: python

from waflib import Configure

#from https://gist.github.com/rpuntaie/2bddfb5d7b77db26415ee14371289971
import waf_variants

variants='pc fw/variant1 fw/variant2'.split()

top = "."
out = "../build"

PIC = '33FJ128GP804' #dsPICxxx

@Configure.conf
def gcc_modifier_xc16(cfg):
v = cfg.env
v.cprogram_PATTERN = '%s.elf'
v.LINKFLAGS_cprogram = ','.join(['-Wl','','','--defsym=__MPLAB_BUILD=0','','--script=p'+PIC+'.gld',
'--stack=16','--check-sections','--data-init','--pack-data','--handles','--isr','--no-gc-sections',
'--fill-upper=0','--stackguard=16','--no-force-link','--smart-io']) #,'--report-mem'])
v.CFLAGS_cprogram=['-mcpu='+PIC,'-omf=elf','-mlarge-code','-msmart-io=1',
'-msfr-warn=off','-mno-override-inline','-finline','-Winline']

def configure(cfg):
if 'fw' in cfg.variant: #firmware
cfg.env.DEST_OS = 'xc16' #cfg.env.CHOST = 'xc16' #works too
cfg.load('c cross_gnu') #cfg.env.CHOST becomes ['xc16']
...
else: #configure for pc SW
...

def build(bld):
if 'fw' in bld.variant: #firmware
bld.program(source='maintst.c', target='maintst');
bld(source='maintst.elf', target='maintst.hex', rule="xc16-bin2hex ${SRC} -a -omf=elf")
else: #build for pc SW
...

"""

import os
from waflib import Utils, Configure
from waflib.Tools import ccroot, gcc

try:
from shlex import quote
except ImportError:
from pipes import quote

def get_chost_stuff(conf):
"""
Get the CHOST environment variable contents
"""
chost = None
chost_envar = None
if conf.env.CHOST:
chost = conf.env.CHOST[0]
chost_envar = chost.replace('-', '_')
return chost, chost_envar


@Configure.conf
def xcheck_var(conf, name, wafname=None, cross=False):
wafname = wafname or name

if wafname in conf.env:
value = conf.env[wafname]
if isinstance(value, str):
value = [value]
else:
envar = os.environ.get(name)
if not envar:
return
value = Utils.to_list(envar) if envar != '' else [envar]

conf.env[wafname] = value
if cross:
pretty = 'cross-compilation %s' % wafname
else:
pretty = wafname
conf.msg('Will use %s' % pretty, " ".join(quote(x) for x in value))

@Configure.conf
def xcheck_host_prog(conf, name, tool, wafname=None):
wafname = wafname or name

chost, chost_envar = get_chost_stuff(conf)

specific = None
if chost:
specific = os.environ.get('%s_%s' % (chost_envar, name))

if specific:
value = Utils.to_list(specific)
conf.env[wafname] += value
conf.msg('Will use cross-compilation %s from %s_%s' % (name, chost_envar, name),
" ".join(quote(x) for x in value))
return
else:
envar = os.environ.get('HOST_%s' % name)
if envar is not None:
value = Utils.to_list(envar)
conf.env[wafname] = value
conf.msg('Will use cross-compilation %s from HOST_%s' % (name, name),
" ".join(quote(x) for x in value))
return

if conf.env[wafname]:
return

value = None
if chost:
value = '%s-%s' % (chost, tool)

if value:
conf.env[wafname] = value
conf.msg('Will use cross-compilation %s from CHOST' % wafname, value)

@Configure.conf
def xcheck_host_envar(conf, name, wafname=None):
wafname = wafname or name

chost, chost_envar = get_chost_stuff(conf)

specific = None
if chost:
specific = os.environ.get('%s_%s' % (chost_envar, name))

if specific:
value = Utils.to_list(specific)
conf.env[wafname] += value
conf.msg('Will use cross-compilation %s from %s_%s' \
% (name, chost_envar, name),
" ".join(quote(x) for x in value))
return


envar = os.environ.get('HOST_%s' % name)
if envar is None:
return

value = Utils.to_list(envar) if envar != '' else [envar]

conf.env[wafname] = value
conf.msg('Will use cross-compilation %s from HOST_%s' % (name, name),
" ".join(quote(x) for x in value))


@Configure.conf
def xcheck_host(conf):
conf.xcheck_var('CHOST', cross=True)
conf.env.CHOST = conf.env.CHOST or [conf.env.DEST_OS]
conf.env.DEST_OS = conf.env.CHOST[0].replace('-','_')
conf.xcheck_host_prog('CC', 'gcc')
conf.xcheck_host_prog('CXX', 'g++')
conf.xcheck_host_prog('LINK_CC', 'gcc')
conf.xcheck_host_prog('LINK_CXX', 'g++')
conf.xcheck_host_prog('AR', 'ar')
conf.xcheck_host_prog('AS', 'as')
conf.xcheck_host_prog('LD', 'ld')
conf.xcheck_host_envar('CFLAGS')
conf.xcheck_host_envar('CXXFLAGS')
conf.xcheck_host_envar('LDFLAGS', 'LINKFLAGS')
conf.xcheck_host_envar('LIB')
conf.xcheck_host_envar('PKG_CONFIG_LIBDIR')
conf.xcheck_host_envar('PKG_CONFIG_PATH')

if not conf.env.env:
conf.env.env = {}
conf.env.env.update(os.environ)
if conf.env.PKG_CONFIG_LIBDIR:
conf.env.env['PKG_CONFIG_LIBDIR'] = conf.env.PKG_CONFIG_LIBDIR[0]
if conf.env.PKG_CONFIG_PATH:
conf.env.env['PKG_CONFIG_PATH'] = conf.env.PKG_CONFIG_PATH[0]

def configure(conf):
"""
Configuration example for gcc, it will not work for g++/clang/clang++
"""
conf.xcheck_host()
conf.gcc_common_flags()
conf.gcc_modifier_platform()
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()

+ 146
- 0
waflib/extras/cython.py View File

@@ -0,0 +1,146 @@
#! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2010-2015

import re
from waflib import Task, Logs
from waflib.TaskGen import extension

cy_api_pat = re.compile(r'\s*?cdef\s*?(public|api)\w*')
re_cyt = re.compile(r"""
(?:from\s+(\w+)\s+)? # optionally match "from foo" and capture foo
c?import\s(\w+|[*]) # require "import bar" and capture bar
""", re.M | re.VERBOSE)

@extension('.pyx')
def add_cython_file(self, node):
"""
Process a *.pyx* file given in the list of source files. No additional
feature is required::

def build(bld):
bld(features='c cshlib pyext', source='main.c foo.pyx', target='app')
"""
ext = '.c'
if 'cxx' in self.features:
self.env.append_unique('CYTHONFLAGS', '--cplus')
ext = '.cc'

for x in getattr(self, 'cython_includes', []):
# TODO re-use these nodes in "scan" below
d = self.path.find_dir(x)
if d:
self.env.append_unique('CYTHONFLAGS', '-I%s' % d.abspath())

tsk = self.create_task('cython', node, node.change_ext(ext))
self.source += tsk.outputs

class cython(Task.Task):
run_str = '${CYTHON} ${CYTHONFLAGS} -o ${TGT[0].abspath()} ${SRC}'
color = 'GREEN'

vars = ['INCLUDES']
"""
Rebuild whenever the INCLUDES change. The variables such as CYTHONFLAGS will be appended
by the metaclass.
"""

ext_out = ['.h']
"""
The creation of a .h file is known only after the build has begun, so it is not
possible to compute a build order just by looking at the task inputs/outputs.
"""

def runnable_status(self):
"""
Perform a double-check to add the headers created by cython
to the output nodes. The scanner is executed only when the cython task
must be executed (optimization).
"""
ret = super(cython, self).runnable_status()
if ret == Task.ASK_LATER:
return ret
for x in self.generator.bld.raw_deps[self.uid()]:
if x.startswith('header:'):
self.outputs.append(self.inputs[0].parent.find_or_declare(x.replace('header:', '')))
return super(cython, self).runnable_status()

def post_run(self):
for x in self.outputs:
if x.name.endswith('.h'):
if not x.exists():
if Logs.verbose:
Logs.warn('Expected %r', x.abspath())
x.write('')
return Task.Task.post_run(self)

def scan(self):
"""
Return the dependent files (.pxd) by looking in the include folders.
Put the headers to generate in the custom list "bld.raw_deps".
To inspect the scanne results use::

$ waf clean build --zones=deps
"""
node = self.inputs[0]
txt = node.read()

mods = []
for m in re_cyt.finditer(txt):
if m.group(1): # matches "from foo import bar"
mods.append(m.group(1))
else:
mods.append(m.group(2))

Logs.debug('cython: mods %r', mods)
incs = getattr(self.generator, 'cython_includes', [])
incs = [self.generator.path.find_dir(x) for x in incs]
incs.append(node.parent)

found = []
missing = []
for x in mods:
for y in incs:
k = y.find_resource(x + '.pxd')
if k:
found.append(k)
break
else:
missing.append(x)

# the cython file implicitly depends on a pxd file that might be present
implicit = node.parent.find_resource(node.name[:-3] + 'pxd')
if implicit:
found.append(implicit)

Logs.debug('cython: found %r', found)

# Now the .h created - store them in bld.raw_deps for later use
has_api = False
has_public = False
for l in txt.splitlines():
if cy_api_pat.match(l):
if ' api ' in l:
has_api = True
if ' public ' in l:
has_public = True
name = node.name.replace('.pyx', '')
if has_api:
missing.append('header:%s_api.h' % name)
if has_public:
missing.append('header:%s.h' % name)

return (found, missing)

def options(ctx):
ctx.add_option('--cython-flags', action='store', default='', help='space separated list of flags to pass to cython')

def configure(ctx):
if not ctx.env.CC and not ctx.env.CXX:
ctx.fatal('Load a C/C++ compiler first')
if not ctx.env.PYTHON:
ctx.fatal('Load the python tool first!')
ctx.find_program('cython', var='CYTHON')
if hasattr(ctx.options, 'cython_flags'):
ctx.env.CYTHONFLAGS = ctx.options.cython_flags


Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save