@@ -0,0 +1,342 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2005-2010 (ita) | |||||
""" | |||||
ConfigSet: a special dict | |||||
The values put in :py:class:`ConfigSet` must be lists | |||||
""" | |||||
import copy, re, os | |||||
from waflib import Logs, Utils | |||||
re_imp = re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$', re.M) | |||||
class ConfigSet(object): | |||||
""" | |||||
A dict that honor serialization and parent relationships. The serialization format | |||||
is human-readable (python-like) and performed by using eval() and repr(). | |||||
For high performance prefer pickle. Do not store functions as they are not serializable. | |||||
The values can be accessed by attributes or by keys:: | |||||
from waflib.ConfigSet import ConfigSet | |||||
env = ConfigSet() | |||||
env.FOO = 'test' | |||||
env['FOO'] = 'test' | |||||
""" | |||||
__slots__ = ('table', 'parent') | |||||
def __init__(self, filename=None): | |||||
self.table = {} | |||||
""" | |||||
Internal dict holding the object values | |||||
""" | |||||
#self.parent = None | |||||
if filename: | |||||
self.load(filename) | |||||
def __contains__(self, key): | |||||
""" | |||||
Enable the *in* syntax:: | |||||
if 'foo' in env: | |||||
print(env['foo']) | |||||
""" | |||||
if key in self.table: return True | |||||
try: return self.parent.__contains__(key) | |||||
except AttributeError: return False # parent may not exist | |||||
def keys(self): | |||||
"""Dict interface (unknown purpose)""" | |||||
keys = set() | |||||
cur = self | |||||
while cur: | |||||
keys.update(cur.table.keys()) | |||||
cur = getattr(cur, 'parent', None) | |||||
keys = list(keys) | |||||
keys.sort() | |||||
return keys | |||||
def __str__(self): | |||||
"""Text representation of the ConfigSet (for debugging purposes)""" | |||||
return "\n".join(["%r %r" % (x, self.__getitem__(x)) for x in self.keys()]) | |||||
def __getitem__(self, key): | |||||
""" | |||||
Dictionary interface: get value from key:: | |||||
def configure(conf): | |||||
conf.env['foo'] = {} | |||||
print(env['foo']) | |||||
""" | |||||
try: | |||||
while 1: | |||||
x = self.table.get(key, None) | |||||
if not x is None: | |||||
return x | |||||
self = self.parent | |||||
except AttributeError: | |||||
return [] | |||||
def __setitem__(self, key, value): | |||||
""" | |||||
Dictionary interface: get value from key | |||||
""" | |||||
self.table[key] = value | |||||
def __delitem__(self, key): | |||||
""" | |||||
Dictionary interface: get value from key | |||||
""" | |||||
self[key] = [] | |||||
def __getattr__(self, name): | |||||
""" | |||||
Attribute access provided for convenience. The following forms are equivalent:: | |||||
def configure(conf): | |||||
conf.env.value | |||||
conf.env['value'] | |||||
""" | |||||
if name in self.__slots__: | |||||
return object.__getattr__(self, name) | |||||
else: | |||||
return self[name] | |||||
def __setattr__(self, name, value): | |||||
""" | |||||
Attribute access provided for convenience. The following forms are equivalent:: | |||||
def configure(conf): | |||||
conf.env.value = x | |||||
env['value'] = x | |||||
""" | |||||
if name in self.__slots__: | |||||
object.__setattr__(self, name, value) | |||||
else: | |||||
self[name] = value | |||||
def __delattr__(self, name): | |||||
""" | |||||
Attribute access provided for convenience. The following forms are equivalent:: | |||||
def configure(conf): | |||||
del env.value | |||||
del env['value'] | |||||
""" | |||||
if name in self.__slots__: | |||||
object.__delattr__(self, name) | |||||
else: | |||||
del self[name] | |||||
def derive(self): | |||||
""" | |||||
Returns a new ConfigSet deriving from self. The copy returned | |||||
will be a shallow copy:: | |||||
from waflib.ConfigSet import ConfigSet | |||||
env = ConfigSet() | |||||
env.append_value('CFLAGS', ['-O2']) | |||||
child = env.derive() | |||||
child.CFLAGS.append('test') # warning! this will modify 'env' | |||||
child.CFLAGS = ['-O3'] # new list, ok | |||||
child.append_value('CFLAGS', ['-O3']) # ok | |||||
Use :py:func:`ConfigSet.detach` to detach the child from the parent. | |||||
""" | |||||
newenv = ConfigSet() | |||||
newenv.parent = self | |||||
return newenv | |||||
def detach(self): | |||||
""" | |||||
Detach self from its parent (if existing) | |||||
Modifying the parent :py:class:`ConfigSet` will not change the current object | |||||
Modifying this :py:class:`ConfigSet` will not modify the parent one. | |||||
""" | |||||
tbl = self.get_merged_dict() | |||||
try: | |||||
delattr(self, 'parent') | |||||
except AttributeError: | |||||
pass | |||||
else: | |||||
keys = tbl.keys() | |||||
for x in keys: | |||||
tbl[x] = copy.deepcopy(tbl[x]) | |||||
self.table = tbl | |||||
def get_flat(self, key): | |||||
""" | |||||
Return a value as a string. If the input is a list, the value returned is space-separated. | |||||
:param key: key to use | |||||
:type key: string | |||||
""" | |||||
s = self[key] | |||||
if isinstance(s, str): return s | |||||
return ' '.join(s) | |||||
def _get_list_value_for_modification(self, key): | |||||
""" | |||||
Return a list value for further modification. | |||||
The list may be modified inplace and there is no need to do this afterwards:: | |||||
self.table[var] = value | |||||
""" | |||||
try: | |||||
value = self.table[key] | |||||
except KeyError: | |||||
try: value = self.parent[key] | |||||
except AttributeError: value = [] | |||||
if isinstance(value, list): | |||||
value = value[:] | |||||
else: | |||||
value = [value] | |||||
else: | |||||
if not isinstance(value, list): | |||||
value = [value] | |||||
self.table[key] = value | |||||
return value | |||||
def append_value(self, var, val): | |||||
""" | |||||
Appends a value to the specified config key:: | |||||
def build(bld): | |||||
bld.env.append_value('CFLAGS', ['-O2']) | |||||
The value must be a list or a tuple | |||||
""" | |||||
if isinstance(val, str): # if there were string everywhere we could optimize this | |||||
val = [val] | |||||
current_value = self._get_list_value_for_modification(var) | |||||
current_value.extend(val) | |||||
def prepend_value(self, var, val): | |||||
""" | |||||
Prepends a value to the specified item:: | |||||
def configure(conf): | |||||
conf.env.prepend_value('CFLAGS', ['-O2']) | |||||
The value must be a list or a tuple | |||||
""" | |||||
if isinstance(val, str): | |||||
val = [val] | |||||
self.table[var] = val + self._get_list_value_for_modification(var) | |||||
def append_unique(self, var, val): | |||||
""" | |||||
Append a value to the specified item only if it's not already present:: | |||||
def build(bld): | |||||
bld.env.append_unique('CFLAGS', ['-O2', '-g']) | |||||
The value must be a list or a tuple | |||||
""" | |||||
if isinstance(val, str): | |||||
val = [val] | |||||
current_value = self._get_list_value_for_modification(var) | |||||
for x in val: | |||||
if x not in current_value: | |||||
current_value.append(x) | |||||
def get_merged_dict(self): | |||||
""" | |||||
Compute the merged dictionary from the fusion of self and all its parent | |||||
:rtype: a ConfigSet object | |||||
""" | |||||
table_list = [] | |||||
env = self | |||||
while 1: | |||||
table_list.insert(0, env.table) | |||||
try: env = env.parent | |||||
except AttributeError: break | |||||
merged_table = {} | |||||
for table in table_list: | |||||
merged_table.update(table) | |||||
return merged_table | |||||
def store(self, filename): | |||||
""" | |||||
Write the :py:class:`ConfigSet` data into a file. See :py:meth:`ConfigSet.load` for reading such files. | |||||
:param filename: file to use | |||||
:type filename: string | |||||
""" | |||||
try: | |||||
os.makedirs(os.path.split(filename)[0]) | |||||
except OSError: | |||||
pass | |||||
buf = [] | |||||
merged_table = self.get_merged_dict() | |||||
keys = list(merged_table.keys()) | |||||
keys.sort() | |||||
try: | |||||
fun = ascii | |||||
except NameError: | |||||
fun = repr | |||||
for k in keys: | |||||
if k != 'undo_stack': | |||||
buf.append('%s = %s\n' % (k, fun(merged_table[k]))) | |||||
Utils.writef(filename, ''.join(buf)) | |||||
def load(self, filename): | |||||
""" | |||||
Retrieve the :py:class:`ConfigSet` data from a file. See :py:meth:`ConfigSet.store` for writing such files | |||||
:param filename: file to use | |||||
:type filename: string | |||||
""" | |||||
tbl = self.table | |||||
code = Utils.readf(filename, m='rU') | |||||
for m in re_imp.finditer(code): | |||||
g = m.group | |||||
tbl[g(2)] = eval(g(3)) | |||||
Logs.debug('env: %s' % str(self.table)) | |||||
def update(self, d): | |||||
""" | |||||
Dictionary interface: replace values from another dict | |||||
:param d: object to use the value from | |||||
:type d: dict-like object | |||||
""" | |||||
for k, v in d.items(): | |||||
self[k] = v | |||||
def stash(self): | |||||
""" | |||||
Store the object state, to provide a kind of transaction support:: | |||||
env = ConfigSet() | |||||
env.stash() | |||||
try: | |||||
env.append_value('CFLAGS', '-O3') | |||||
call_some_method(env) | |||||
finally: | |||||
env.revert() | |||||
The history is kept in a stack, and is lost during the serialization by :py:meth:`ConfigSet.store` | |||||
""" | |||||
orig = self.table | |||||
tbl = self.table = self.table.copy() | |||||
for x in tbl.keys(): | |||||
tbl[x] = copy.deepcopy(tbl[x]) | |||||
self.undo_stack = self.undo_stack + [orig] | |||||
def revert(self): | |||||
""" | |||||
Reverts the object to a previous state. See :py:meth:`ConfigSet.stash` | |||||
""" | |||||
self.table = self.undo_stack.pop(-1) | |||||
@@ -0,0 +1,665 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2005-2010 (ita) | |||||
""" | |||||
Configuration system | |||||
A :py:class:`waflib.Configure.ConfigurationContext` instance is created when ``waf configure`` is called, it is used to: | |||||
* create data dictionaries (ConfigSet instances) | |||||
* store the list of modules to import | |||||
* hold configuration routines such as ``find_program``, etc | |||||
""" | |||||
import os, shlex, sys, time, re, shutil | |||||
from waflib import ConfigSet, Utils, Options, Logs, Context, Build, Errors | |||||
BREAK = 'break' | |||||
"""In case of a configuration error, break""" | |||||
CONTINUE = 'continue' | |||||
"""In case of a configuration error, continue""" | |||||
WAF_CONFIG_LOG = 'config.log' | |||||
"""Name of the configuration log file""" | |||||
autoconfig = False | |||||
"""Execute the configuration automatically""" | |||||
conf_template = '''# project %(app)s configured on %(now)s by | |||||
# waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s) | |||||
# using %(args)s | |||||
#''' | |||||
class ConfigurationContext(Context.Context): | |||||
'''configures the project''' | |||||
cmd = 'configure' | |||||
error_handlers = [] | |||||
""" | |||||
Additional functions to handle configuration errors | |||||
""" | |||||
def __init__(self, **kw): | |||||
super(ConfigurationContext, self).__init__(**kw) | |||||
self.environ = dict(os.environ) | |||||
self.all_envs = {} | |||||
self.top_dir = None | |||||
self.out_dir = None | |||||
self.tools = [] # tools loaded in the configuration, and that will be loaded when building | |||||
self.hash = 0 | |||||
self.files = [] | |||||
self.tool_cache = [] | |||||
self.setenv('') | |||||
def setenv(self, name, env=None): | |||||
""" | |||||
Set a new config set for conf.env. If a config set of that name already exists, | |||||
recall it without modification. | |||||
The name is the filename prefix to save to ``c4che/NAME_cache.py``, and it | |||||
is also used as *variants* by the build commands. | |||||
Though related to variants, whatever kind of data may be stored in the config set:: | |||||
def configure(cfg): | |||||
cfg.env.ONE = 1 | |||||
cfg.setenv('foo') | |||||
cfg.env.ONE = 2 | |||||
def build(bld): | |||||
2 == bld.env_of_name('foo').ONE | |||||
:param name: name of the configuration set | |||||
:type name: string | |||||
:param env: ConfigSet to copy, or an empty ConfigSet is created | |||||
:type env: :py:class:`waflib.ConfigSet.ConfigSet` | |||||
""" | |||||
if name not in self.all_envs or env: | |||||
if not env: | |||||
env = ConfigSet.ConfigSet() | |||||
self.prepare_env(env) | |||||
else: | |||||
env = env.derive() | |||||
self.all_envs[name] = env | |||||
self.variant = name | |||||
def get_env(self): | |||||
"""Getter for the env property""" | |||||
return self.all_envs[self.variant] | |||||
def set_env(self, val): | |||||
"""Setter for the env property""" | |||||
self.all_envs[self.variant] = val | |||||
env = property(get_env, set_env) | |||||
def init_dirs(self): | |||||
""" | |||||
Initialize the project directory and the build directory | |||||
""" | |||||
top = self.top_dir | |||||
if not top: | |||||
top = Options.options.top | |||||
if not top: | |||||
top = getattr(Context.g_module, Context.TOP, None) | |||||
if not top: | |||||
top = self.path.abspath() | |||||
top = os.path.abspath(top) | |||||
self.srcnode = (os.path.isabs(top) and self.root or self.path).find_dir(top) | |||||
assert(self.srcnode) | |||||
out = self.out_dir | |||||
if not out: | |||||
out = Options.options.out | |||||
if not out: | |||||
out = getattr(Context.g_module, Context.OUT, None) | |||||
if not out: | |||||
out = Options.lockfile.replace('.lock-waf_%s_' % sys.platform, '').replace('.lock-waf', '') | |||||
# someone can be messing with symlinks | |||||
out = os.path.realpath(out) | |||||
self.bldnode = (os.path.isabs(out) and self.root or self.path).make_node(out) | |||||
self.bldnode.mkdir() | |||||
if not os.path.isdir(self.bldnode.abspath()): | |||||
conf.fatal('Could not create the build directory %s' % self.bldnode.abspath()) | |||||
def execute(self): | |||||
""" | |||||
See :py:func:`waflib.Context.Context.execute` | |||||
""" | |||||
self.init_dirs() | |||||
self.cachedir = self.bldnode.make_node(Build.CACHE_DIR) | |||||
self.cachedir.mkdir() | |||||
path = os.path.join(self.bldnode.abspath(), WAF_CONFIG_LOG) | |||||
self.logger = Logs.make_logger(path, 'cfg') | |||||
app = getattr(Context.g_module, 'APPNAME', '') | |||||
if app: | |||||
ver = getattr(Context.g_module, 'VERSION', '') | |||||
if ver: | |||||
app = "%s (%s)" % (app, ver) | |||||
now = time.ctime() | |||||
pyver = sys.hexversion | |||||
systype = sys.platform | |||||
args = " ".join(sys.argv) | |||||
wafver = Context.WAFVERSION | |||||
abi = Context.ABI | |||||
self.to_log(conf_template % vars()) | |||||
self.msg('Setting top to', self.srcnode.abspath()) | |||||
self.msg('Setting out to', self.bldnode.abspath()) | |||||
if id(self.srcnode) == id(self.bldnode): | |||||
Logs.warn('Setting top == out (remember to use "update_outputs")') | |||||
elif id(self.path) != id(self.srcnode): | |||||
if self.srcnode.is_child_of(self.path): | |||||
Logs.warn('Are you certain that you do not want to set top="." ?') | |||||
super(ConfigurationContext, self).execute() | |||||
self.store() | |||||
Context.top_dir = self.srcnode.abspath() | |||||
Context.out_dir = self.bldnode.abspath() | |||||
# this will write a configure lock so that subsequent builds will | |||||
# consider the current path as the root directory (see prepare_impl). | |||||
# to remove: use 'waf distclean' | |||||
env = ConfigSet.ConfigSet() | |||||
env['argv'] = sys.argv | |||||
env['options'] = Options.options.__dict__ | |||||
env.run_dir = Context.run_dir | |||||
env.top_dir = Context.top_dir | |||||
env.out_dir = Context.out_dir | |||||
# conf.hash & conf.files hold wscript files paths and hash | |||||
# (used only by Configure.autoconfig) | |||||
env['hash'] = self.hash | |||||
env['files'] = self.files | |||||
env['environ'] = dict(self.environ) | |||||
if not self.env.NO_LOCK_IN_RUN: | |||||
env.store(os.path.join(Context.run_dir, Options.lockfile)) | |||||
if not self.env.NO_LOCK_IN_TOP: | |||||
env.store(os.path.join(Context.top_dir, Options.lockfile)) | |||||
if not self.env.NO_LOCK_IN_OUT: | |||||
env.store(os.path.join(Context.out_dir, Options.lockfile)) | |||||
def prepare_env(self, env): | |||||
""" | |||||
Insert *PREFIX*, *BINDIR* and *LIBDIR* values into ``env`` | |||||
:type env: :py:class:`waflib.ConfigSet.ConfigSet` | |||||
:param env: a ConfigSet, usually ``conf.env`` | |||||
""" | |||||
if not env.PREFIX: | |||||
if Options.options.prefix or Utils.is_win32: | |||||
env.PREFIX = os.path.abspath(os.path.expanduser(Options.options.prefix)) | |||||
else: | |||||
env.PREFIX = '' | |||||
if not env.BINDIR: | |||||
if Options.options.bindir: | |||||
env.BINDIR = os.path.abspath(os.path.expanduser(Options.options.bindir)) | |||||
else: | |||||
env.BINDIR = Utils.subst_vars('${PREFIX}/bin', env) | |||||
if not env.LIBDIR: | |||||
if Options.options.libdir: | |||||
env.LIBDIR = os.path.abspath(os.path.expanduser(Options.options.libdir)) | |||||
else: | |||||
env.LIBDIR = Utils.subst_vars('${PREFIX}/lib%s' % Utils.lib64(), env) | |||||
def store(self): | |||||
"""Save the config results into the cache file""" | |||||
n = self.cachedir.make_node('build.config.py') | |||||
n.write('version = 0x%x\ntools = %r\n' % (Context.HEXVERSION, self.tools)) | |||||
if not self.all_envs: | |||||
self.fatal('nothing to store in the configuration context!') | |||||
for key in self.all_envs: | |||||
tmpenv = self.all_envs[key] | |||||
tmpenv.store(os.path.join(self.cachedir.abspath(), key + Build.CACHE_SUFFIX)) | |||||
def load(self, input, tooldir=None, funs=None): | |||||
""" | |||||
Load Waf tools, which will be imported whenever a build is started. | |||||
:param input: waf tools to import | |||||
:type input: list of string | |||||
:param tooldir: paths for the imports | |||||
:type tooldir: list of string | |||||
:param funs: functions to execute from the waf tools | |||||
:type funs: list of string | |||||
""" | |||||
tools = Utils.to_list(input) | |||||
if tooldir: tooldir = Utils.to_list(tooldir) | |||||
for tool in tools: | |||||
# avoid loading the same tool more than once with the same functions | |||||
# used by composite projects | |||||
mag = (tool, id(self.env), funs) | |||||
if mag in self.tool_cache: | |||||
self.to_log('(tool %s is already loaded, skipping)' % tool) | |||||
continue | |||||
self.tool_cache.append(mag) | |||||
module = None | |||||
try: | |||||
module = Context.load_tool(tool, tooldir, ctx=self) | |||||
except ImportError as e: | |||||
self.fatal('Could not load the Waf tool %r from %r\n%s' % (tool, sys.path, e)) | |||||
except Exception as e: | |||||
self.to_log('imp %r (%r & %r)' % (tool, tooldir, funs)) | |||||
self.to_log(Utils.ex_stack()) | |||||
raise | |||||
if funs is not None: | |||||
self.eval_rules(funs) | |||||
else: | |||||
func = getattr(module, 'configure', None) | |||||
if func: | |||||
if type(func) is type(Utils.readf): func(self) | |||||
else: self.eval_rules(func) | |||||
self.tools.append({'tool':tool, 'tooldir':tooldir, 'funs':funs}) | |||||
def post_recurse(self, node): | |||||
""" | |||||
Records the path and a hash of the scripts visited, see :py:meth:`waflib.Context.Context.post_recurse` | |||||
:param node: script | |||||
:type node: :py:class:`waflib.Node.Node` | |||||
""" | |||||
super(ConfigurationContext, self).post_recurse(node) | |||||
self.hash = Utils.h_list((self.hash, node.read('rb'))) | |||||
self.files.append(node.abspath()) | |||||
def eval_rules(self, rules): | |||||
""" | |||||
Execute the configuration tests. The method :py:meth:`waflib.Configure.ConfigurationContext.err_handler` | |||||
is used to process the eventual exceptions | |||||
:param rules: list of configuration method names | |||||
:type rules: list of string | |||||
""" | |||||
self.rules = Utils.to_list(rules) | |||||
for x in self.rules: | |||||
f = getattr(self, x) | |||||
if not f: self.fatal("No such method '%s'." % x) | |||||
try: | |||||
f() | |||||
except Exception as e: | |||||
ret = self.err_handler(x, e) | |||||
if ret == BREAK: | |||||
break | |||||
elif ret == CONTINUE: | |||||
continue | |||||
else: | |||||
raise | |||||
def err_handler(self, fun, error): | |||||
""" | |||||
Error handler for the configuration tests, the default is to let the exception raise | |||||
:param fun: configuration test | |||||
:type fun: method | |||||
:param error: exception | |||||
:type error: exception | |||||
""" | |||||
pass | |||||
def conf(f): | |||||
""" | |||||
Decorator: attach new configuration functions to :py:class:`waflib.Build.BuildContext` and | |||||
:py:class:`waflib.Configure.ConfigurationContext`. The methods bound will accept a parameter | |||||
named 'mandatory' to disable the configuration errors:: | |||||
def configure(conf): | |||||
conf.find_program('abc', mandatory=False) | |||||
:param f: method to bind | |||||
:type f: function | |||||
""" | |||||
def fun(*k, **kw): | |||||
mandatory = True | |||||
if 'mandatory' in kw: | |||||
mandatory = kw['mandatory'] | |||||
del kw['mandatory'] | |||||
try: | |||||
return f(*k, **kw) | |||||
except Errors.ConfigurationError: | |||||
if mandatory: | |||||
raise | |||||
setattr(ConfigurationContext, f.__name__, fun) | |||||
setattr(Build.BuildContext, f.__name__, fun) | |||||
return f | |||||
@conf | |||||
def add_os_flags(self, var, dest=None): | |||||
""" | |||||
Import operating system environment values into ``conf.env`` dict:: | |||||
def configure(conf): | |||||
conf.add_os_flags('CFLAGS') | |||||
:param var: variable to use | |||||
:type var: string | |||||
:param dest: destination variable, by default the same as var | |||||
:type dest: string | |||||
""" | |||||
# do not use 'get' to make certain the variable is not defined | |||||
try: self.env.append_value(dest or var, shlex.split(self.environ[var])) | |||||
except KeyError: pass | |||||
@conf | |||||
def cmd_to_list(self, cmd): | |||||
""" | |||||
Detect if a command is written in pseudo shell like ``ccache g++`` and return a list. | |||||
:param cmd: command | |||||
:type cmd: a string or a list of string | |||||
""" | |||||
if isinstance(cmd, str) and cmd.find(' '): | |||||
try: | |||||
os.stat(cmd) | |||||
except OSError: | |||||
return shlex.split(cmd) | |||||
else: | |||||
return [cmd] | |||||
return cmd | |||||
@conf | |||||
def check_waf_version(self, mini='1.7.99', maxi='1.9.0', **kw): | |||||
""" | |||||
Raise a Configuration error if the Waf version does not strictly match the given bounds:: | |||||
conf.check_waf_version(mini='1.8.0', maxi='1.9.0') | |||||
:type mini: number, tuple or string | |||||
:param mini: Minimum required version | |||||
:type maxi: number, tuple or string | |||||
:param maxi: Maximum allowed version | |||||
""" | |||||
self.start_msg('Checking for waf version in %s-%s' % (str(mini), str(maxi)), **kw) | |||||
ver = Context.HEXVERSION | |||||
if Utils.num2ver(mini) > ver: | |||||
self.fatal('waf version should be at least %r (%r found)' % (Utils.num2ver(mini), ver)) | |||||
if Utils.num2ver(maxi) < ver: | |||||
self.fatal('waf version should be at most %r (%r found)' % (Utils.num2ver(maxi), ver)) | |||||
self.end_msg('ok', **kw) | |||||
@conf | |||||
def find_file(self, filename, path_list=[]): | |||||
""" | |||||
Find a file in a list of paths | |||||
:param filename: name of the file to search for | |||||
:param path_list: list of directories to search | |||||
:return: the first occurrence filename or '' if filename could not be found | |||||
""" | |||||
for n in Utils.to_list(filename): | |||||
for d in Utils.to_list(path_list): | |||||
p = os.path.join(d, n) | |||||
if os.path.exists(p): | |||||
return p | |||||
self.fatal('Could not find %r' % filename) | |||||
@conf | |||||
def find_program(self, filename, **kw): | |||||
""" | |||||
Search for a program on the operating system | |||||
When var is used, you may set os.environ[var] to help find a specific program version, for example:: | |||||
$ CC='ccache gcc' waf configure | |||||
:param path_list: paths to use for searching | |||||
:type param_list: list of string | |||||
:param var: store the result to conf.env[var], by default use filename.upper() | |||||
:type var: string | |||||
:param ext: list of extensions for the binary (do not add an extension for portability) | |||||
:type ext: list of string | |||||
:param msg: name to display in the log, by default filename is used | |||||
:type msg: string | |||||
:param interpreter: interpreter for the program | |||||
:type interpreter: ConfigSet variable key | |||||
""" | |||||
exts = kw.get('exts', Utils.is_win32 and '.exe,.com,.bat,.cmd' or ',.sh,.pl,.py') | |||||
environ = kw.get('environ', getattr(self, 'environ', os.environ)) | |||||
ret = '' | |||||
filename = Utils.to_list(filename) | |||||
msg = kw.get('msg', ', '.join(filename)) | |||||
var = kw.get('var', '') | |||||
if not var: | |||||
var = re.sub(r'[-.]', '_', filename[0].upper()) | |||||
path_list = kw.get('path_list', '') | |||||
if path_list: | |||||
path_list = Utils.to_list(path_list) | |||||
else: | |||||
path_list = environ.get('PATH', '').split(os.pathsep) | |||||
if var in environ: | |||||
filename = environ[var] | |||||
if os.path.isfile(filename): | |||||
# typical CC=/usr/bin/gcc waf configure build | |||||
ret = [filename] | |||||
else: | |||||
# case CC='ccache gcc' waf configure build | |||||
ret = self.cmd_to_list(filename) | |||||
elif self.env[var]: | |||||
# set by the user in the wscript file | |||||
ret = self.env[var] | |||||
ret = self.cmd_to_list(ret) | |||||
else: | |||||
if not ret: | |||||
ret = self.find_binary(filename, exts.split(','), path_list) | |||||
if not ret and Utils.winreg: | |||||
ret = Utils.get_registry_app_path(Utils.winreg.HKEY_CURRENT_USER, filename) | |||||
if not ret and Utils.winreg: | |||||
ret = Utils.get_registry_app_path(Utils.winreg.HKEY_LOCAL_MACHINE, filename) | |||||
ret = self.cmd_to_list(ret) | |||||
if ret: | |||||
if len(ret) == 1: | |||||
retmsg = ret[0] | |||||
else: | |||||
retmsg = ret | |||||
else: | |||||
retmsg = False | |||||
self.msg("Checking for program '%s'" % msg, retmsg, **kw) | |||||
if not kw.get('quiet', None): | |||||
self.to_log('find program=%r paths=%r var=%r -> %r' % (filename, path_list, var, ret)) | |||||
if not ret: | |||||
self.fatal(kw.get('errmsg', '') or 'Could not find the program %r' % filename) | |||||
interpreter = kw.get('interpreter', None) | |||||
if interpreter is None: | |||||
if not Utils.check_exe(ret[0], env=environ): | |||||
self.fatal('Program %r is not executable' % ret) | |||||
self.env[var] = ret | |||||
else: | |||||
self.env[var] = self.env[interpreter] + ret | |||||
return ret | |||||
@conf | |||||
def find_binary(self, filenames, exts, paths): | |||||
for f in filenames: | |||||
for ext in exts: | |||||
exe_name = f + ext | |||||
if os.path.isabs(exe_name): | |||||
if os.path.isfile(exe_name): | |||||
return exe_name | |||||
else: | |||||
for path in paths: | |||||
x = os.path.expanduser(os.path.join(path, exe_name)) | |||||
if os.path.isfile(x): | |||||
return x | |||||
return None | |||||
@conf | |||||
def run_build(self, *k, **kw): | |||||
""" | |||||
Create a temporary build context to execute a build. A reference to that build | |||||
context is kept on self.test_bld for debugging purposes, and you should not rely | |||||
on it too much (read the note on the cache below). | |||||
The parameters given in the arguments to this function are passed as arguments for | |||||
a single task generator created in the build. Only three parameters are obligatory: | |||||
:param features: features to pass to a task generator created in the build | |||||
:type features: list of string | |||||
:param compile_filename: file to create for the compilation (default: *test.c*) | |||||
:type compile_filename: string | |||||
:param code: code to write in the filename to compile | |||||
:type code: string | |||||
Though this function returns *0* by default, the build may set an attribute named *retval* on the | |||||
build context object to return a particular value. See :py:func:`waflib.Tools.c_config.test_exec_fun` for example. | |||||
This function also provides a limited cache. To use it, provide the following option:: | |||||
def options(opt): | |||||
opt.add_option('--confcache', dest='confcache', default=0, | |||||
action='count', help='Use a configuration cache') | |||||
And execute the configuration with the following command-line:: | |||||
$ waf configure --confcache | |||||
""" | |||||
lst = [str(v) for (p, v) in kw.items() if p != 'env'] | |||||
h = Utils.h_list(lst) | |||||
dir = self.bldnode.abspath() + os.sep + (not Utils.is_win32 and '.' or '') + 'conf_check_' + Utils.to_hex(h) | |||||
try: | |||||
os.makedirs(dir) | |||||
except OSError: | |||||
pass | |||||
try: | |||||
os.stat(dir) | |||||
except OSError: | |||||
self.fatal('cannot use the configuration test folder %r' % dir) | |||||
cachemode = getattr(Options.options, 'confcache', None) | |||||
if cachemode == 1: | |||||
try: | |||||
proj = ConfigSet.ConfigSet(os.path.join(dir, 'cache_run_build')) | |||||
except OSError: | |||||
pass | |||||
except IOError: | |||||
pass | |||||
else: | |||||
ret = proj['cache_run_build'] | |||||
if isinstance(ret, str) and ret.startswith('Test does not build'): | |||||
self.fatal(ret) | |||||
return ret | |||||
bdir = os.path.join(dir, 'testbuild') | |||||
if not os.path.exists(bdir): | |||||
os.makedirs(bdir) | |||||
self.test_bld = bld = Build.BuildContext(top_dir=dir, out_dir=bdir) | |||||
bld.init_dirs() | |||||
bld.progress_bar = 0 | |||||
bld.targets = '*' | |||||
bld.logger = self.logger | |||||
bld.all_envs.update(self.all_envs) # not really necessary | |||||
bld.env = kw['env'] | |||||
# OMG huge hack | |||||
bld.kw = kw | |||||
bld.conf = self | |||||
kw['build_fun'](bld) | |||||
ret = -1 | |||||
try: | |||||
try: | |||||
bld.compile() | |||||
except Errors.WafError: | |||||
ret = 'Test does not build: %s' % Utils.ex_stack() | |||||
self.fatal(ret) | |||||
else: | |||||
ret = getattr(bld, 'retval', 0) | |||||
finally: | |||||
if cachemode == 1: | |||||
# cache the results each time | |||||
proj = ConfigSet.ConfigSet() | |||||
proj['cache_run_build'] = ret | |||||
proj.store(os.path.join(dir, 'cache_run_build')) | |||||
else: | |||||
shutil.rmtree(dir) | |||||
return ret | |||||
@conf | |||||
def ret_msg(self, msg, args): | |||||
if isinstance(msg, str): | |||||
return msg | |||||
return msg(args) | |||||
@conf | |||||
def test(self, *k, **kw): | |||||
if not 'env' in kw: | |||||
kw['env'] = self.env.derive() | |||||
# validate_c for example | |||||
if kw.get('validate', None): | |||||
kw['validate'](kw) | |||||
self.start_msg(kw['msg'], **kw) | |||||
ret = None | |||||
try: | |||||
ret = self.run_build(*k, **kw) | |||||
except self.errors.ConfigurationError: | |||||
self.end_msg(kw['errmsg'], 'YELLOW', **kw) | |||||
if Logs.verbose > 1: | |||||
raise | |||||
else: | |||||
self.fatal('The configuration failed') | |||||
else: | |||||
kw['success'] = ret | |||||
if kw.get('post_check', None): | |||||
ret = kw['post_check'](kw) | |||||
if ret: | |||||
self.end_msg(kw['errmsg'], 'YELLOW', **kw) | |||||
self.fatal('The configuration failed %r' % ret) | |||||
else: | |||||
self.end_msg(self.ret_msg(kw['okmsg'], kw), **kw) | |||||
return ret | |||||
@@ -0,0 +1,667 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2010 (ita) | |||||
""" | |||||
Classes and functions required for waf commands | |||||
""" | |||||
import os, re, imp, sys | |||||
from waflib import Utils, Errors, Logs | |||||
import waflib.Node | |||||
# the following 3 constants are updated on each new release (do not touch) | |||||
HEXVERSION=0x1080900 | |||||
"""Constant updated on new releases""" | |||||
WAFVERSION="1.8.9" | |||||
"""Constant updated on new releases""" | |||||
WAFREVISION="06e49b2a82166aeb14dde8357c58387f252fc722" | |||||
"""Git revision when the waf version is updated""" | |||||
ABI = 98 | |||||
"""Version of the build data cache file format (used in :py:const:`waflib.Context.DBFILE`)""" | |||||
DBFILE = '.wafpickle-%s-%d-%d' % (sys.platform, sys.hexversion, ABI) | |||||
"""Name of the pickle file for storing the build data""" | |||||
APPNAME = 'APPNAME' | |||||
"""Default application name (used by ``waf dist``)""" | |||||
VERSION = 'VERSION' | |||||
"""Default application version (used by ``waf dist``)""" | |||||
TOP = 'top' | |||||
"""The variable name for the top-level directory in wscript files""" | |||||
OUT = 'out' | |||||
"""The variable name for the output directory in wscript files""" | |||||
WSCRIPT_FILE = 'wscript' | |||||
"""Name of the waf script files""" | |||||
launch_dir = '' | |||||
"""Directory from which waf has been called""" | |||||
run_dir = '' | |||||
"""Location of the wscript file to use as the entry point""" | |||||
top_dir = '' | |||||
"""Location of the project directory (top), if the project was configured""" | |||||
out_dir = '' | |||||
"""Location of the build directory (out), if the project was configured""" | |||||
waf_dir = '' | |||||
"""Directory containing the waf modules""" | |||||
local_repo = '' | |||||
"""Local repository containing additional Waf tools (plugins)""" | |||||
remote_repo = 'http://waf.googlecode.com/git/' | |||||
""" | |||||
Remote directory containing downloadable waf tools. The missing tools can be downloaded by using:: | |||||
$ waf configure --download | |||||
""" | |||||
remote_locs = ['waflib/extras', 'waflib/Tools'] | |||||
""" | |||||
Remote directories for use with :py:const:`waflib.Context.remote_repo` | |||||
""" | |||||
g_module = None | |||||
""" | |||||
Module representing the main wscript file (see :py:const:`waflib.Context.run_dir`) | |||||
""" | |||||
STDOUT = 1 | |||||
STDERR = -1 | |||||
BOTH = 0 | |||||
classes = [] | |||||
""" | |||||
List of :py:class:`waflib.Context.Context` subclasses that can be used as waf commands. The classes | |||||
are added automatically by a metaclass. | |||||
""" | |||||
def create_context(cmd_name, *k, **kw): | |||||
""" | |||||
Create a new :py:class:`waflib.Context.Context` instance corresponding to the given command. | |||||
Used in particular by :py:func:`waflib.Scripting.run_command` | |||||
:param cmd_name: command | |||||
:type cmd_name: string | |||||
:param k: arguments to give to the context class initializer | |||||
:type k: list | |||||
:param k: keyword arguments to give to the context class initializer | |||||
:type k: dict | |||||
""" | |||||
global classes | |||||
for x in classes: | |||||
if x.cmd == cmd_name: | |||||
return x(*k, **kw) | |||||
ctx = Context(*k, **kw) | |||||
ctx.fun = cmd_name | |||||
return ctx | |||||
class store_context(type): | |||||
""" | |||||
Metaclass for storing the command classes into the list :py:const:`waflib.Context.classes` | |||||
Context classes must provide an attribute 'cmd' representing the command to execute | |||||
""" | |||||
def __init__(cls, name, bases, dict): | |||||
super(store_context, cls).__init__(name, bases, dict) | |||||
name = cls.__name__ | |||||
if name == 'ctx' or name == 'Context': | |||||
return | |||||
try: | |||||
cls.cmd | |||||
except AttributeError: | |||||
raise Errors.WafError('Missing command for the context class %r (cmd)' % name) | |||||
if not getattr(cls, 'fun', None): | |||||
cls.fun = cls.cmd | |||||
global classes | |||||
classes.insert(0, cls) | |||||
ctx = store_context('ctx', (object,), {}) | |||||
"""Base class for the :py:class:`waflib.Context.Context` classes""" | |||||
class Context(ctx): | |||||
""" | |||||
Default context for waf commands, and base class for new command contexts. | |||||
Context objects are passed to top-level functions:: | |||||
def foo(ctx): | |||||
print(ctx.__class__.__name__) # waflib.Context.Context | |||||
Subclasses must define the attribute 'cmd': | |||||
:param cmd: command to execute as in ``waf cmd`` | |||||
:type cmd: string | |||||
:param fun: function name to execute when the command is called | |||||
:type fun: string | |||||
.. inheritance-diagram:: waflib.Context.Context waflib.Build.BuildContext waflib.Build.InstallContext waflib.Build.UninstallContext waflib.Build.StepContext waflib.Build.ListContext waflib.Configure.ConfigurationContext waflib.Scripting.Dist waflib.Scripting.DistCheck waflib.Build.CleanContext | |||||
""" | |||||
errors = Errors | |||||
""" | |||||
Shortcut to :py:mod:`waflib.Errors` provided for convenience | |||||
""" | |||||
tools = {} | |||||
""" | |||||
A cache for modules (wscript files) read by :py:meth:`Context.Context.load` | |||||
""" | |||||
def __init__(self, **kw): | |||||
try: | |||||
rd = kw['run_dir'] | |||||
except KeyError: | |||||
global run_dir | |||||
rd = run_dir | |||||
# binds the context to the nodes in use to avoid a context singleton | |||||
self.node_class = type("Nod3", (waflib.Node.Node,), {}) | |||||
self.node_class.__module__ = "waflib.Node" | |||||
self.node_class.ctx = self | |||||
self.root = self.node_class('', None) | |||||
self.cur_script = None | |||||
self.path = self.root.find_dir(rd) | |||||
self.stack_path = [] | |||||
self.exec_dict = {'ctx':self, 'conf':self, 'bld':self, 'opt':self} | |||||
self.logger = None | |||||
def __hash__(self): | |||||
""" | |||||
Return a hash value for storing context objects in dicts or sets. The value is not persistent. | |||||
:return: hash value | |||||
:rtype: int | |||||
""" | |||||
return id(self) | |||||
def finalize(self): | |||||
""" | |||||
Use to free resources such as open files potentially held by the logger | |||||
""" | |||||
try: | |||||
logger = self.logger | |||||
except AttributeError: | |||||
pass | |||||
else: | |||||
Logs.free_logger(logger) | |||||
delattr(self, 'logger') | |||||
def load(self, tool_list, *k, **kw): | |||||
""" | |||||
Load a Waf tool as a module, and try calling the function named :py:const:`waflib.Context.Context.fun` from it. | |||||
A ``tooldir`` value may be provided as a list of module paths. | |||||
:type tool_list: list of string or space-separated string | |||||
:param tool_list: list of Waf tools to use | |||||
""" | |||||
tools = Utils.to_list(tool_list) | |||||
path = Utils.to_list(kw.get('tooldir', '')) | |||||
for t in tools: | |||||
module = load_tool(t, path) | |||||
fun = getattr(module, kw.get('name', self.fun), None) | |||||
if fun: | |||||
fun(self) | |||||
def execute(self): | |||||
""" | |||||
Execute the command. Redefine this method in subclasses. | |||||
""" | |||||
global g_module | |||||
self.recurse([os.path.dirname(g_module.root_path)]) | |||||
def pre_recurse(self, node): | |||||
""" | |||||
Method executed immediately before a folder is read by :py:meth:`waflib.Context.Context.recurse`. The node given is set | |||||
as an attribute ``self.cur_script``, and as the current path ``self.path`` | |||||
:param node: script | |||||
:type node: :py:class:`waflib.Node.Node` | |||||
""" | |||||
self.stack_path.append(self.cur_script) | |||||
self.cur_script = node | |||||
self.path = node.parent | |||||
def post_recurse(self, node): | |||||
""" | |||||
Restore ``self.cur_script`` and ``self.path`` right after :py:meth:`waflib.Context.Context.recurse` terminates. | |||||
:param node: script | |||||
:type node: :py:class:`waflib.Node.Node` | |||||
""" | |||||
self.cur_script = self.stack_path.pop() | |||||
if self.cur_script: | |||||
self.path = self.cur_script.parent | |||||
def recurse(self, dirs, name=None, mandatory=True, once=True, encoding=None): | |||||
""" | |||||
Run user code from the supplied list of directories. | |||||
The directories can be either absolute, or relative to the directory | |||||
of the wscript file. The methods :py:meth:`waflib.Context.Context.pre_recurse` and :py:meth:`waflib.Context.Context.post_recurse` | |||||
are called immediately before and after a script has been executed. | |||||
:param dirs: List of directories to visit | |||||
:type dirs: list of string or space-separated string | |||||
:param name: Name of function to invoke from the wscript | |||||
:type name: string | |||||
:param mandatory: whether sub wscript files are required to exist | |||||
:type mandatory: bool | |||||
:param once: read the script file once for a particular context | |||||
:type once: bool | |||||
""" | |||||
try: | |||||
cache = self.recurse_cache | |||||
except AttributeError: | |||||
cache = self.recurse_cache = {} | |||||
for d in Utils.to_list(dirs): | |||||
if not os.path.isabs(d): | |||||
# absolute paths only | |||||
d = os.path.join(self.path.abspath(), d) | |||||
WSCRIPT = os.path.join(d, WSCRIPT_FILE) | |||||
WSCRIPT_FUN = WSCRIPT + '_' + (name or self.fun) | |||||
node = self.root.find_node(WSCRIPT_FUN) | |||||
if node and (not once or node not in cache): | |||||
cache[node] = True | |||||
self.pre_recurse(node) | |||||
try: | |||||
function_code = node.read('rU', encoding) | |||||
exec(compile(function_code, node.abspath(), 'exec'), self.exec_dict) | |||||
finally: | |||||
self.post_recurse(node) | |||||
elif not node: | |||||
node = self.root.find_node(WSCRIPT) | |||||
tup = (node, name or self.fun) | |||||
if node and (not once or tup not in cache): | |||||
cache[tup] = True | |||||
self.pre_recurse(node) | |||||
try: | |||||
wscript_module = load_module(node.abspath(), encoding=encoding) | |||||
user_function = getattr(wscript_module, (name or self.fun), None) | |||||
if not user_function: | |||||
if not mandatory: | |||||
continue | |||||
raise Errors.WafError('No function %s defined in %s' % (name or self.fun, node.abspath())) | |||||
user_function(self) | |||||
finally: | |||||
self.post_recurse(node) | |||||
elif not node: | |||||
if not mandatory: | |||||
continue | |||||
raise Errors.WafError('No wscript file in directory %s' % d) | |||||
def exec_command(self, cmd, **kw): | |||||
""" | |||||
Execute a command and return the exit status. If the context has the attribute 'log', | |||||
capture and log the process stderr/stdout for logging purposes:: | |||||
def run(tsk): | |||||
ret = tsk.generator.bld.exec_command('touch foo.txt') | |||||
return ret | |||||
This method captures the standard/error outputs (Issue 1101), but it does not return the values | |||||
unlike :py:meth:`waflib.Context.Context.cmd_and_log` | |||||
:param cmd: command argument for subprocess.Popen | |||||
:param kw: keyword arguments for subprocess.Popen | |||||
""" | |||||
subprocess = Utils.subprocess | |||||
kw['shell'] = isinstance(cmd, str) | |||||
Logs.debug('runner: %r' % cmd) | |||||
Logs.debug('runner_env: kw=%s' % kw) | |||||
if self.logger: | |||||
self.logger.info(cmd) | |||||
if 'stdout' not in kw: | |||||
kw['stdout'] = subprocess.PIPE | |||||
if 'stderr' not in kw: | |||||
kw['stderr'] = subprocess.PIPE | |||||
if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]): | |||||
raise Errors.WafError("Program %s not found!" % cmd[0]) | |||||
try: | |||||
if kw['stdout'] or kw['stderr']: | |||||
p = subprocess.Popen(cmd, **kw) | |||||
(out, err) = p.communicate() | |||||
ret = p.returncode | |||||
else: | |||||
out, err = (None, None) | |||||
ret = subprocess.Popen(cmd, **kw).wait() | |||||
except Exception as e: | |||||
raise Errors.WafError('Execution failure: %s' % str(e), ex=e) | |||||
if out: | |||||
if not isinstance(out, str): | |||||
out = out.decode(sys.stdout.encoding or 'iso8859-1') | |||||
if self.logger: | |||||
self.logger.debug('out: %s' % out) | |||||
else: | |||||
Logs.info(out, extra={'stream':sys.stdout, 'c1': ''}) | |||||
if err: | |||||
if not isinstance(err, str): | |||||
err = err.decode(sys.stdout.encoding or 'iso8859-1') | |||||
if self.logger: | |||||
self.logger.error('err: %s' % err) | |||||
else: | |||||
Logs.info(err, extra={'stream':sys.stderr, 'c1': ''}) | |||||
return ret | |||||
def cmd_and_log(self, cmd, **kw): | |||||
""" | |||||
Execute a command and return stdout if the execution is successful. | |||||
An exception is thrown when the exit status is non-0. In that case, both stderr and stdout | |||||
will be bound to the WafError object:: | |||||
def configure(conf): | |||||
out = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.STDOUT, quiet=waflib.Context.BOTH) | |||||
(out, err) = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.BOTH) | |||||
try: | |||||
conf.cmd_and_log(['which', 'someapp'], output=waflib.Context.BOTH) | |||||
except Exception as e: | |||||
print(e.stdout, e.stderr) | |||||
:param cmd: args for subprocess.Popen | |||||
:param kw: keyword arguments for subprocess.Popen | |||||
""" | |||||
subprocess = Utils.subprocess | |||||
kw['shell'] = isinstance(cmd, str) | |||||
Logs.debug('runner: %r' % cmd) | |||||
if 'quiet' in kw: | |||||
quiet = kw['quiet'] | |||||
del kw['quiet'] | |||||
else: | |||||
quiet = None | |||||
if 'output' in kw: | |||||
to_ret = kw['output'] | |||||
del kw['output'] | |||||
else: | |||||
to_ret = STDOUT | |||||
if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]): | |||||
raise Errors.WafError("Program %s not found!" % cmd[0]) | |||||
kw['stdout'] = kw['stderr'] = subprocess.PIPE | |||||
if quiet is None: | |||||
self.to_log(cmd) | |||||
try: | |||||
p = subprocess.Popen(cmd, **kw) | |||||
(out, err) = p.communicate() | |||||
except Exception as e: | |||||
raise Errors.WafError('Execution failure: %s' % str(e), ex=e) | |||||
if not isinstance(out, str): | |||||
out = out.decode(sys.stdout.encoding or 'iso8859-1') | |||||
if not isinstance(err, str): | |||||
err = err.decode(sys.stdout.encoding or 'iso8859-1') | |||||
if out and quiet != STDOUT and quiet != BOTH: | |||||
self.to_log('out: %s' % out) | |||||
if err and quiet != STDERR and quiet != BOTH: | |||||
self.to_log('err: %s' % err) | |||||
if p.returncode: | |||||
e = Errors.WafError('Command %r returned %r' % (cmd, p.returncode)) | |||||
e.returncode = p.returncode | |||||
e.stderr = err | |||||
e.stdout = out | |||||
raise e | |||||
if to_ret == BOTH: | |||||
return (out, err) | |||||
elif to_ret == STDERR: | |||||
return err | |||||
return out | |||||
def fatal(self, msg, ex=None): | |||||
""" | |||||
Raise a configuration error to interrupt the execution immediately:: | |||||
def configure(conf): | |||||
conf.fatal('a requirement is missing') | |||||
:param msg: message to display | |||||
:type msg: string | |||||
:param ex: optional exception object | |||||
:type ex: exception | |||||
""" | |||||
if self.logger: | |||||
self.logger.info('from %s: %s' % (self.path.abspath(), msg)) | |||||
try: | |||||
msg = '%s\n(complete log in %s)' % (msg, self.logger.handlers[0].baseFilename) | |||||
except Exception: | |||||
pass | |||||
raise self.errors.ConfigurationError(msg, ex=ex) | |||||
def to_log(self, msg): | |||||
""" | |||||
Log some information to the logger (if present), or to stderr. If the message is empty, | |||||
it is not printed:: | |||||
def build(bld): | |||||
bld.to_log('starting the build') | |||||
When in doubt, override this method, or provide a logger on the context class. | |||||
:param msg: message | |||||
:type msg: string | |||||
""" | |||||
if not msg: | |||||
return | |||||
if self.logger: | |||||
self.logger.info(msg) | |||||
else: | |||||
sys.stderr.write(str(msg)) | |||||
sys.stderr.flush() | |||||
def msg(self, *k, **kw): | |||||
""" | |||||
Print a configuration message of the form ``msg: result``. | |||||
The second part of the message will be in colors. The output | |||||
can be disabled easly by setting ``in_msg`` to a positive value:: | |||||
def configure(conf): | |||||
self.in_msg = 1 | |||||
conf.msg('Checking for library foo', 'ok') | |||||
# no output | |||||
:param msg: message to display to the user | |||||
:type msg: string | |||||
:param result: result to display | |||||
:type result: string or boolean | |||||
:param color: color to use, see :py:const:`waflib.Logs.colors_lst` | |||||
:type color: string | |||||
""" | |||||
try: | |||||
msg = kw['msg'] | |||||
except KeyError: | |||||
msg = k[0] | |||||
self.start_msg(msg, **kw) | |||||
try: | |||||
result = kw['result'] | |||||
except KeyError: | |||||
result = k[1] | |||||
color = kw.get('color', None) | |||||
if not isinstance(color, str): | |||||
color = result and 'GREEN' or 'YELLOW' | |||||
self.end_msg(result, color, **kw) | |||||
def start_msg(self, *k, **kw): | |||||
""" | |||||
Print the beginning of a 'Checking for xxx' message. See :py:meth:`waflib.Context.Context.msg` | |||||
""" | |||||
if kw.get('quiet', None): | |||||
return | |||||
msg = kw.get('msg', None) or k[0] | |||||
try: | |||||
if self.in_msg: | |||||
self.in_msg += 1 | |||||
return | |||||
except AttributeError: | |||||
self.in_msg = 0 | |||||
self.in_msg += 1 | |||||
try: | |||||
self.line_just = max(self.line_just, len(msg)) | |||||
except AttributeError: | |||||
self.line_just = max(40, len(msg)) | |||||
for x in (self.line_just * '-', msg): | |||||
self.to_log(x) | |||||
Logs.pprint('NORMAL', "%s :" % msg.ljust(self.line_just), sep='') | |||||
def end_msg(self, *k, **kw): | |||||
"""Print the end of a 'Checking for' message. See :py:meth:`waflib.Context.Context.msg`""" | |||||
if kw.get('quiet', None): | |||||
return | |||||
self.in_msg -= 1 | |||||
if self.in_msg: | |||||
return | |||||
result = kw.get('result', None) or k[0] | |||||
defcolor = 'GREEN' | |||||
if result == True: | |||||
msg = 'ok' | |||||
elif result == False: | |||||
msg = 'not found' | |||||
defcolor = 'YELLOW' | |||||
else: | |||||
msg = str(result) | |||||
self.to_log(msg) | |||||
try: | |||||
color = kw['color'] | |||||
except KeyError: | |||||
if len(k) > 1 and k[1] in Logs.colors_lst: | |||||
# compatibility waf 1.7 | |||||
color = k[1] | |||||
else: | |||||
color = defcolor | |||||
Logs.pprint(color, msg) | |||||
def load_special_tools(self, var, ban=[]): | |||||
global waf_dir | |||||
if os.path.isdir(waf_dir): | |||||
lst = self.root.find_node(waf_dir).find_node('waflib/extras').ant_glob(var) | |||||
for x in lst: | |||||
if not x.name in ban: | |||||
load_tool(x.name.replace('.py', '')) | |||||
else: | |||||
from zipfile import PyZipFile | |||||
waflibs = PyZipFile(waf_dir) | |||||
lst = waflibs.namelist() | |||||
for x in lst: | |||||
if not re.match("waflib/extras/%s" % var.replace("*", ".*"), var): | |||||
continue | |||||
f = os.path.basename(x) | |||||
doban = False | |||||
for b in ban: | |||||
r = b.replace("*", ".*") | |||||
if re.match(b, f): | |||||
doban = True | |||||
if not doban: | |||||
f = f.replace('.py', '') | |||||
load_tool(f) | |||||
cache_modules = {} | |||||
""" | |||||
Dictionary holding already loaded modules, keyed by their absolute path. | |||||
The modules are added automatically by :py:func:`waflib.Context.load_module` | |||||
""" | |||||
def load_module(path, encoding=None): | |||||
""" | |||||
Load a source file as a python module. | |||||
:param path: file path | |||||
:type path: string | |||||
:return: Loaded Python module | |||||
:rtype: module | |||||
""" | |||||
try: | |||||
return cache_modules[path] | |||||
except KeyError: | |||||
pass | |||||
module = imp.new_module(WSCRIPT_FILE) | |||||
try: | |||||
code = Utils.readf(path, m='rU', encoding=encoding) | |||||
except EnvironmentError: | |||||
raise Errors.WafError('Could not read the file %r' % path) | |||||
module_dir = os.path.dirname(path) | |||||
sys.path.insert(0, module_dir) | |||||
exec(compile(code, path, 'exec'), module.__dict__) | |||||
sys.path.remove(module_dir) | |||||
cache_modules[path] = module | |||||
return module | |||||
def load_tool(tool, tooldir=None, ctx=None): | |||||
""" | |||||
Import a Waf tool (python module), and store it in the dict :py:const:`waflib.Context.Context.tools` | |||||
:type tool: string | |||||
:param tool: Name of the tool | |||||
:type tooldir: list | |||||
:param tooldir: List of directories to search for the tool module | |||||
""" | |||||
if tool == 'java': | |||||
tool = 'javaw' # jython | |||||
else: | |||||
tool = tool.replace('++', 'xx') | |||||
if tooldir: | |||||
assert isinstance(tooldir, list) | |||||
sys.path = tooldir + sys.path | |||||
try: | |||||
__import__(tool) | |||||
ret = sys.modules[tool] | |||||
Context.tools[tool] = ret | |||||
return ret | |||||
finally: | |||||
for d in tooldir: | |||||
sys.path.remove(d) | |||||
else: | |||||
for x in ('waflib.Tools.%s', 'waflib.extras.%s', 'waflib.%s', '%s'): | |||||
try: | |||||
__import__(x % tool) | |||||
break | |||||
except ImportError: | |||||
x = None | |||||
if x is None: # raise an exception | |||||
__import__(tool) | |||||
ret = sys.modules[x % tool] | |||||
Context.tools[tool] = ret | |||||
return ret | |||||
@@ -0,0 +1,70 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2010 (ita) | |||||
""" | |||||
Exceptions used in the Waf code | |||||
""" | |||||
import traceback, sys | |||||
class WafError(Exception): | |||||
"""Base class for all Waf errors""" | |||||
def __init__(self, msg='', ex=None): | |||||
""" | |||||
:param msg: error message | |||||
:type msg: string | |||||
:param ex: exception causing this error (optional) | |||||
:type ex: exception | |||||
""" | |||||
self.msg = msg | |||||
assert not isinstance(msg, Exception) | |||||
self.stack = [] | |||||
if ex: | |||||
if not msg: | |||||
self.msg = str(ex) | |||||
if isinstance(ex, WafError): | |||||
self.stack = ex.stack | |||||
else: | |||||
self.stack = traceback.extract_tb(sys.exc_info()[2]) | |||||
self.stack += traceback.extract_stack()[:-1] | |||||
self.verbose_msg = ''.join(traceback.format_list(self.stack)) | |||||
def __str__(self): | |||||
return str(self.msg) | |||||
class BuildError(WafError): | |||||
""" | |||||
Errors raised during the build and install phases | |||||
""" | |||||
def __init__(self, error_tasks=[]): | |||||
""" | |||||
:param error_tasks: tasks that could not complete normally | |||||
:type error_tasks: list of task objects | |||||
""" | |||||
self.tasks = error_tasks | |||||
WafError.__init__(self, self.format_error()) | |||||
def format_error(self): | |||||
"""format the error messages from the tasks that failed""" | |||||
lst = ['Build failed'] | |||||
for tsk in self.tasks: | |||||
txt = tsk.format_error() | |||||
if txt: lst.append(txt) | |||||
return '\n'.join(lst) | |||||
class ConfigurationError(WafError): | |||||
""" | |||||
Configuration exception raised in particular by :py:meth:`waflib.Context.Context.fatal` | |||||
""" | |||||
pass | |||||
class TaskRescan(WafError): | |||||
"""task-specific exception type, trigger a signature recomputation""" | |||||
pass | |||||
class TaskNotReady(WafError): | |||||
"""task-specific exception type, raised when the task signature cannot be computed""" | |||||
pass | |||||
@@ -0,0 +1,338 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2005-2010 (ita) | |||||
""" | |||||
logging, colors, terminal width and pretty-print | |||||
""" | |||||
import os, re, traceback, sys, types | |||||
from waflib import Utils, ansiterm | |||||
if not os.environ.get('NOSYNC', False): | |||||
# synchronized output is nearly mandatory to prevent garbled output | |||||
if sys.stdout.isatty() and id(sys.stdout) == id(sys.__stdout__): | |||||
sys.stdout = ansiterm.AnsiTerm(sys.stdout) | |||||
if sys.stderr.isatty() and id(sys.stderr) == id(sys.__stderr__): | |||||
sys.stderr = ansiterm.AnsiTerm(sys.stderr) | |||||
# import the logging module after since it holds a reference on sys.stderr | |||||
# in case someone uses the root logger | |||||
import logging | |||||
LOG_FORMAT = "%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s" | |||||
HOUR_FORMAT = "%H:%M:%S" | |||||
zones = '' | |||||
verbose = 0 | |||||
colors_lst = { | |||||
'USE' : True, | |||||
'BOLD' :'\x1b[01;1m', | |||||
'RED' :'\x1b[01;31m', | |||||
'GREEN' :'\x1b[32m', | |||||
'YELLOW':'\x1b[33m', | |||||
'PINK' :'\x1b[35m', | |||||
'BLUE' :'\x1b[01;34m', | |||||
'CYAN' :'\x1b[36m', | |||||
'NORMAL':'\x1b[0m', | |||||
'cursor_on' :'\x1b[?25h', | |||||
'cursor_off' :'\x1b[?25l', | |||||
} | |||||
indicator = '\r\x1b[K%s%s%s' | |||||
def enable_colors(use): | |||||
if use == 1: | |||||
if not (sys.stderr.isatty() or sys.stdout.isatty()): | |||||
use = 0 | |||||
if Utils.is_win32 and os.name != 'java': | |||||
term = os.environ.get('TERM', '') # has ansiterm | |||||
else: | |||||
term = os.environ.get('TERM', 'dumb') | |||||
if term in ('dumb', 'emacs'): | |||||
use = 0 | |||||
if use >= 1: | |||||
os.environ['TERM'] = 'vt100' | |||||
colors_lst['USE'] = use | |||||
# If console packages are available, replace the dummy function with a real | |||||
# implementation | |||||
try: | |||||
get_term_cols = ansiterm.get_term_cols | |||||
except AttributeError: | |||||
def get_term_cols(): | |||||
return 80 | |||||
get_term_cols.__doc__ = """ | |||||
Get the console width in characters. | |||||
:return: the number of characters per line | |||||
:rtype: int | |||||
""" | |||||
def get_color(cl): | |||||
if not colors_lst['USE']: return '' | |||||
return colors_lst.get(cl, '') | |||||
class color_dict(object): | |||||
"""attribute-based color access, eg: colors.PINK""" | |||||
def __getattr__(self, a): | |||||
return get_color(a) | |||||
def __call__(self, a): | |||||
return get_color(a) | |||||
colors = color_dict() | |||||
re_log = re.compile(r'(\w+): (.*)', re.M) | |||||
class log_filter(logging.Filter): | |||||
""" | |||||
The waf logs are of the form 'name: message', and can be filtered by 'waf --zones=name'. | |||||
For example, the following:: | |||||
from waflib import Logs | |||||
Logs.debug('test: here is a message') | |||||
Will be displayed only when executing:: | |||||
$ waf --zones=test | |||||
""" | |||||
def __init__(self, name=None): | |||||
pass | |||||
def filter(self, rec): | |||||
""" | |||||
filter a record, adding the colors automatically | |||||
* error: red | |||||
* warning: yellow | |||||
:param rec: message to record | |||||
""" | |||||
rec.zone = rec.module | |||||
if rec.levelno >= logging.INFO: | |||||
return True | |||||
m = re_log.match(rec.msg) | |||||
if m: | |||||
rec.zone = m.group(1) | |||||
rec.msg = m.group(2) | |||||
if zones: | |||||
return getattr(rec, 'zone', '') in zones or '*' in zones | |||||
elif not verbose > 2: | |||||
return False | |||||
return True | |||||
class log_handler(logging.StreamHandler): | |||||
"""Dispatches messages to stderr/stdout depending on the severity level""" | |||||
def emit(self, record): | |||||
# default implementation | |||||
try: | |||||
try: | |||||
self.stream = record.stream | |||||
except AttributeError: | |||||
if record.levelno >= logging.WARNING: | |||||
record.stream = self.stream = sys.stderr | |||||
else: | |||||
record.stream = self.stream = sys.stdout | |||||
self.emit_override(record) | |||||
self.flush() | |||||
except (KeyboardInterrupt, SystemExit): | |||||
raise | |||||
except: # from the python library -_- | |||||
self.handleError(record) | |||||
def emit_override(self, record, **kw): | |||||
self.terminator = getattr(record, 'terminator', '\n') | |||||
stream = self.stream | |||||
if hasattr(types, "UnicodeType"): | |||||
# python2 | |||||
msg = self.formatter.format(record) | |||||
fs = '%s' + self.terminator | |||||
try: | |||||
if (isinstance(msg, unicode) and getattr(stream, 'encoding', None)): | |||||
fs = fs.decode(stream.encoding) | |||||
try: | |||||
stream.write(fs % msg) | |||||
except UnicodeEncodeError: | |||||
stream.write((fs % msg).encode(stream.encoding)) | |||||
else: | |||||
stream.write(fs % msg) | |||||
except UnicodeError: | |||||
stream.write((fs % msg).encode("UTF-8")) | |||||
else: | |||||
logging.StreamHandler.emit(self, record) | |||||
class formatter(logging.Formatter): | |||||
"""Simple log formatter which handles colors""" | |||||
def __init__(self): | |||||
logging.Formatter.__init__(self, LOG_FORMAT, HOUR_FORMAT) | |||||
def format(self, rec): | |||||
"""Messages in warning, error or info mode are displayed in color by default""" | |||||
try: | |||||
msg = rec.msg.decode('utf-8') | |||||
except Exception: | |||||
msg = rec.msg | |||||
use = colors_lst['USE'] | |||||
if (use == 1 and rec.stream.isatty()) or use == 2: | |||||
c1 = getattr(rec, 'c1', None) | |||||
if c1 is None: | |||||
c1 = '' | |||||
if rec.levelno >= logging.ERROR: | |||||
c1 = colors.RED | |||||
elif rec.levelno >= logging.WARNING: | |||||
c1 = colors.YELLOW | |||||
elif rec.levelno >= logging.INFO: | |||||
c1 = colors.GREEN | |||||
c2 = getattr(rec, 'c2', colors.NORMAL) | |||||
msg = '%s%s%s' % (c1, msg, c2) | |||||
else: | |||||
msg = msg.replace('\r', '\n') | |||||
msg = re.sub(r'\x1B\[(K|.*?(m|h|l))', '', msg) | |||||
if rec.levelno >= logging.INFO: # ?? | |||||
return msg | |||||
rec.msg = msg | |||||
rec.c1 = colors.PINK | |||||
rec.c2 = colors.NORMAL | |||||
return logging.Formatter.format(self, rec) | |||||
log = None | |||||
"""global logger for Logs.debug, Logs.error, etc""" | |||||
def debug(*k, **kw): | |||||
""" | |||||
Wrap logging.debug, the output is filtered for performance reasons | |||||
""" | |||||
if verbose: | |||||
k = list(k) | |||||
k[0] = k[0].replace('\n', ' ') | |||||
global log | |||||
log.debug(*k, **kw) | |||||
def error(*k, **kw): | |||||
""" | |||||
Wrap logging.errors, display the origin of the message when '-vv' is set | |||||
""" | |||||
global log | |||||
log.error(*k, **kw) | |||||
if verbose > 2: | |||||
st = traceback.extract_stack() | |||||
if st: | |||||
st = st[:-1] | |||||
buf = [] | |||||
for filename, lineno, name, line in st: | |||||
buf.append(' File "%s", line %d, in %s' % (filename, lineno, name)) | |||||
if line: | |||||
buf.append(' %s' % line.strip()) | |||||
if buf: log.error("\n".join(buf)) | |||||
def warn(*k, **kw): | |||||
""" | |||||
Wrap logging.warn | |||||
""" | |||||
global log | |||||
log.warn(*k, **kw) | |||||
def info(*k, **kw): | |||||
""" | |||||
Wrap logging.info | |||||
""" | |||||
global log | |||||
log.info(*k, **kw) | |||||
def init_log(): | |||||
""" | |||||
Initialize the loggers globally | |||||
""" | |||||
global log | |||||
log = logging.getLogger('waflib') | |||||
log.handlers = [] | |||||
log.filters = [] | |||||
hdlr = log_handler() | |||||
hdlr.setFormatter(formatter()) | |||||
log.addHandler(hdlr) | |||||
log.addFilter(log_filter()) | |||||
log.setLevel(logging.DEBUG) | |||||
def make_logger(path, name): | |||||
""" | |||||
Create a simple logger, which is often used to redirect the context command output:: | |||||
from waflib import Logs | |||||
bld.logger = Logs.make_logger('test.log', 'build') | |||||
bld.check(header_name='sadlib.h', features='cxx cprogram', mandatory=False) | |||||
# have the file closed immediately | |||||
Logs.free_logger(bld.logger) | |||||
# stop logging | |||||
bld.logger = None | |||||
The method finalize() of the command will try to free the logger, if any | |||||
:param path: file name to write the log output to | |||||
:type path: string | |||||
:param name: logger name (loggers are reused) | |||||
:type name: string | |||||
""" | |||||
logger = logging.getLogger(name) | |||||
hdlr = logging.FileHandler(path, 'w') | |||||
formatter = logging.Formatter('%(message)s') | |||||
hdlr.setFormatter(formatter) | |||||
logger.addHandler(hdlr) | |||||
logger.setLevel(logging.DEBUG) | |||||
return logger | |||||
def make_mem_logger(name, to_log, size=8192): | |||||
""" | |||||
Create a memory logger to avoid writing concurrently to the main logger | |||||
""" | |||||
from logging.handlers import MemoryHandler | |||||
logger = logging.getLogger(name) | |||||
hdlr = MemoryHandler(size, target=to_log) | |||||
formatter = logging.Formatter('%(message)s') | |||||
hdlr.setFormatter(formatter) | |||||
logger.addHandler(hdlr) | |||||
logger.memhandler = hdlr | |||||
logger.setLevel(logging.DEBUG) | |||||
return logger | |||||
def free_logger(logger): | |||||
""" | |||||
Free the resources held by the loggers created through make_logger or make_mem_logger. | |||||
This is used for file cleanup and for handler removal (logger objects are re-used). | |||||
""" | |||||
try: | |||||
for x in logger.handlers: | |||||
x.close() | |||||
logger.removeHandler(x) | |||||
except Exception as e: | |||||
pass | |||||
def pprint(col, msg, label='', sep='\n'): | |||||
""" | |||||
Print messages in color immediately on stderr:: | |||||
from waflib import Logs | |||||
Logs.pprint('RED', 'Something bad just happened') | |||||
:param col: color name to use in :py:const:`Logs.colors_lst` | |||||
:type col: string | |||||
:param msg: message to display | |||||
:type msg: string or a value that can be printed by %s | |||||
:param label: a message to add after the colored output | |||||
:type label: string | |||||
:param sep: a string to append at the end (line separator) | |||||
:type sep: string | |||||
""" | |||||
info("%s%s%s %s" % (colors(col), msg, colors.NORMAL, label), extra={'terminator':sep}) | |||||
@@ -0,0 +1,788 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2005-2010 (ita) | |||||
""" | |||||
Node: filesystem structure, contains lists of nodes | |||||
#. Each file/folder is represented by exactly one node. | |||||
#. Some potential class properties are stored on :py:class:`waflib.Build.BuildContext` : nodes to depend on, etc. | |||||
Unused class members can increase the `.wafpickle` file size sensibly. | |||||
#. Node objects should never be created directly, use | |||||
the methods :py:func:`Node.make_node` or :py:func:`Node.find_node` | |||||
#. The methods :py:func:`Node.find_resource`, :py:func:`Node.find_dir` :py:func:`Node.find_or_declare` should be | |||||
used when a build context is present | |||||
#. Each instance of :py:class:`waflib.Context.Context` has a unique :py:class:`Node` subclass. | |||||
(:py:class:`waflib.Node.Nod3`, see the :py:class:`waflib.Context.Context` initializer). A reference to the context owning a node is held as self.ctx | |||||
""" | |||||
import os, re, sys, shutil | |||||
from waflib import Utils, Errors | |||||
exclude_regs = ''' | |||||
**/*~ | |||||
**/#*# | |||||
**/.#* | |||||
**/%*% | |||||
**/._* | |||||
**/CVS | |||||
**/CVS/** | |||||
**/.cvsignore | |||||
**/SCCS | |||||
**/SCCS/** | |||||
**/vssver.scc | |||||
**/.svn | |||||
**/.svn/** | |||||
**/BitKeeper | |||||
**/.git | |||||
**/.git/** | |||||
**/.gitignore | |||||
**/.bzr | |||||
**/.bzrignore | |||||
**/.bzr/** | |||||
**/.hg | |||||
**/.hg/** | |||||
**/_MTN | |||||
**/_MTN/** | |||||
**/.arch-ids | |||||
**/{arch} | |||||
**/_darcs | |||||
**/_darcs/** | |||||
**/.intlcache | |||||
**/.DS_Store''' | |||||
""" | |||||
Ant patterns for files and folders to exclude while doing the | |||||
recursive traversal in :py:meth:`waflib.Node.Node.ant_glob` | |||||
""" | |||||
# TODO waf 1.9 | |||||
split_path = Utils.split_path_unix | |||||
split_path_cygwin = Utils.split_path_cygwin | |||||
split_path_win32 = Utils.split_path_win32 | |||||
if sys.platform == 'cygwin': | |||||
split_path = split_path_cygwin | |||||
elif Utils.is_win32: | |||||
split_path = split_path_win32 | |||||
class Node(object): | |||||
""" | |||||
This class is organized in two parts | |||||
* The basic methods meant for filesystem access (compute paths, create folders, etc) | |||||
* The methods bound to a :py:class:`waflib.Build.BuildContext` (require ``bld.srcnode`` and ``bld.bldnode``) | |||||
The Node objects are not thread safe in any way. | |||||
""" | |||||
dict_class = dict | |||||
__slots__ = ('name', 'sig', 'children', 'parent', 'cache_abspath', 'cache_isdir', 'cache_sig') | |||||
def __init__(self, name, parent): | |||||
self.name = name | |||||
self.parent = parent | |||||
if parent: | |||||
if name in parent.children: | |||||
raise Errors.WafError('node %s exists in the parent files %r already' % (name, parent)) | |||||
parent.children[name] = self | |||||
def __setstate__(self, data): | |||||
"Deserializes from data" | |||||
self.name = data[0] | |||||
self.parent = data[1] | |||||
if data[2] is not None: | |||||
# Issue 1480 | |||||
self.children = self.dict_class(data[2]) | |||||
if data[3] is not None: | |||||
self.sig = data[3] | |||||
def __getstate__(self): | |||||
"Serialize the node info" | |||||
return (self.name, self.parent, getattr(self, 'children', None), getattr(self, 'sig', None)) | |||||
def __str__(self): | |||||
"String representation (name), for debugging purposes" | |||||
return self.name | |||||
def __repr__(self): | |||||
"String representation (abspath), for debugging purposes" | |||||
return self.abspath() | |||||
def __hash__(self): | |||||
"Node hash, used for storage in dicts. This hash is not persistent." | |||||
return id(self) | |||||
def __eq__(self, node): | |||||
"Node comparison, based on the IDs" | |||||
return id(self) == id(node) | |||||
def __copy__(self): | |||||
"Implemented to prevent nodes from being copied (raises an exception)" | |||||
raise Errors.WafError('nodes are not supposed to be copied') | |||||
def read(self, flags='r', encoding='ISO8859-1'): | |||||
""" | |||||
Return the contents of the file represented by this node:: | |||||
def build(bld): | |||||
bld.path.find_node('wscript').read() | |||||
:type fname: string | |||||
:param fname: Path to file | |||||
:type m: string | |||||
:param m: Open mode | |||||
:rtype: string | |||||
:return: File contents | |||||
""" | |||||
return Utils.readf(self.abspath(), flags, encoding) | |||||
def write(self, data, flags='w', encoding='ISO8859-1'): | |||||
""" | |||||
Write some text to the physical file represented by this node:: | |||||
def build(bld): | |||||
bld.path.make_node('foo.txt').write('Hello, world!') | |||||
:type data: string | |||||
:param data: data to write | |||||
:type flags: string | |||||
:param flags: Write mode | |||||
""" | |||||
Utils.writef(self.abspath(), data, flags, encoding) | |||||
def chmod(self, val): | |||||
""" | |||||
Change file/dir permissions:: | |||||
def build(bld): | |||||
bld.path.chmod(493) # 0755 | |||||
""" | |||||
os.chmod(self.abspath(), val) | |||||
def delete(self): | |||||
"""Delete the file/folder, and remove this node from the tree. Do not use this object after calling this method.""" | |||||
try: | |||||
try: | |||||
if hasattr(self, 'children'): | |||||
shutil.rmtree(self.abspath()) | |||||
else: | |||||
os.remove(self.abspath()) | |||||
except OSError as e: | |||||
if os.path.exists(self.abspath()): | |||||
raise e | |||||
finally: | |||||
self.evict() | |||||
def evict(self): | |||||
"""Internal - called when a node is removed""" | |||||
del self.parent.children[self.name] | |||||
def suffix(self): | |||||
"""Return the file extension""" | |||||
k = max(0, self.name.rfind('.')) | |||||
return self.name[k:] | |||||
def height(self): | |||||
"""Depth in the folder hierarchy from the filesystem root or from all the file drives""" | |||||
d = self | |||||
val = -1 | |||||
while d: | |||||
d = d.parent | |||||
val += 1 | |||||
return val | |||||
def listdir(self): | |||||
"""List the folder contents""" | |||||
lst = Utils.listdir(self.abspath()) | |||||
lst.sort() | |||||
return lst | |||||
def mkdir(self): | |||||
""" | |||||
Create a folder represented by this node, creating intermediate nodes as needed | |||||
An exception will be raised only when the folder cannot possibly exist there | |||||
""" | |||||
if getattr(self, 'cache_isdir', None): | |||||
return | |||||
try: | |||||
self.parent.mkdir() | |||||
except OSError: | |||||
pass | |||||
if self.name: | |||||
try: | |||||
os.makedirs(self.abspath()) | |||||
except OSError: | |||||
pass | |||||
if not os.path.isdir(self.abspath()): | |||||
raise Errors.WafError('Could not create the directory %s' % self.abspath()) | |||||
try: | |||||
self.children | |||||
except AttributeError: | |||||
self.children = self.dict_class() | |||||
self.cache_isdir = True | |||||
def find_node(self, lst): | |||||
""" | |||||
Find a node on the file system (files or folders), create intermediate nodes as needed | |||||
:param lst: path | |||||
:type lst: string or list of string | |||||
""" | |||||
if isinstance(lst, str): | |||||
lst = [x for x in split_path(lst) if x and x != '.'] | |||||
cur = self | |||||
for x in lst: | |||||
if x == '..': | |||||
cur = cur.parent or cur | |||||
continue | |||||
try: | |||||
ch = cur.children | |||||
except AttributeError: | |||||
cur.children = self.dict_class() | |||||
else: | |||||
try: | |||||
cur = cur.children[x] | |||||
continue | |||||
except KeyError: | |||||
pass | |||||
# optimistic: create the node first then look if it was correct to do so | |||||
cur = self.__class__(x, cur) | |||||
try: | |||||
os.stat(cur.abspath()) | |||||
except OSError: | |||||
cur.evict() | |||||
return None | |||||
ret = cur | |||||
try: | |||||
os.stat(ret.abspath()) | |||||
except OSError: | |||||
ret.evict() | |||||
return None | |||||
try: | |||||
while not getattr(cur.parent, 'cache_isdir', None): | |||||
cur = cur.parent | |||||
cur.cache_isdir = True | |||||
except AttributeError: | |||||
pass | |||||
return ret | |||||
def make_node(self, lst): | |||||
""" | |||||
Find or create a node without looking on the filesystem | |||||
:param lst: path | |||||
:type lst: string or list of string | |||||
""" | |||||
if isinstance(lst, str): | |||||
lst = [x for x in split_path(lst) if x and x != '.'] | |||||
cur = self | |||||
for x in lst: | |||||
if x == '..': | |||||
cur = cur.parent or cur | |||||
continue | |||||
if getattr(cur, 'children', {}): | |||||
if x in cur.children: | |||||
cur = cur.children[x] | |||||
continue | |||||
else: | |||||
cur.children = self.dict_class() | |||||
cur = self.__class__(x, cur) | |||||
return cur | |||||
def search_node(self, lst): | |||||
""" | |||||
Search for a node without looking on the filesystem | |||||
:param lst: path | |||||
:type lst: string or list of string | |||||
""" | |||||
if isinstance(lst, str): | |||||
lst = [x for x in split_path(lst) if x and x != '.'] | |||||
cur = self | |||||
for x in lst: | |||||
if x == '..': | |||||
cur = cur.parent or cur | |||||
else: | |||||
try: | |||||
cur = cur.children[x] | |||||
except (AttributeError, KeyError): | |||||
return None | |||||
return cur | |||||
def path_from(self, node): | |||||
""" | |||||
Path of this node seen from the other:: | |||||
def build(bld): | |||||
n1 = bld.path.find_node('foo/bar/xyz.txt') | |||||
n2 = bld.path.find_node('foo/stuff/') | |||||
n1.path_from(n2) # '../bar/xyz.txt' | |||||
:param node: path to use as a reference | |||||
:type node: :py:class:`waflib.Node.Node` | |||||
""" | |||||
c1 = self | |||||
c2 = node | |||||
c1h = c1.height() | |||||
c2h = c2.height() | |||||
lst = [] | |||||
up = 0 | |||||
while c1h > c2h: | |||||
lst.append(c1.name) | |||||
c1 = c1.parent | |||||
c1h -= 1 | |||||
while c2h > c1h: | |||||
up += 1 | |||||
c2 = c2.parent | |||||
c2h -= 1 | |||||
while id(c1) != id(c2): | |||||
lst.append(c1.name) | |||||
up += 1 | |||||
c1 = c1.parent | |||||
c2 = c2.parent | |||||
if c1.parent: | |||||
for i in range(up): | |||||
lst.append('..') | |||||
else: | |||||
if lst and not Utils.is_win32: | |||||
lst.append('') | |||||
lst.reverse() | |||||
return os.sep.join(lst) or '.' | |||||
def abspath(self): | |||||
""" | |||||
Absolute path. A cache is kept in the context as ``cache_node_abspath`` | |||||
""" | |||||
try: | |||||
return self.cache_abspath | |||||
except AttributeError: | |||||
pass | |||||
# think twice before touching this (performance + complexity + correctness) | |||||
if not self.parent: | |||||
val = os.sep | |||||
elif not self.parent.name: | |||||
val = os.sep + self.name | |||||
else: | |||||
val = self.parent.abspath() + os.sep + self.name | |||||
self.cache_abspath = val | |||||
return val | |||||
if Utils.is_win32: | |||||
def abspath(self): | |||||
try: | |||||
return self.cache_abspath | |||||
except AttributeError: | |||||
pass | |||||
if not self.parent: | |||||
val = '' | |||||
elif not self.parent.name: | |||||
val = self.name + os.sep | |||||
else: | |||||
val = self.parent.abspath().rstrip(os.sep) + os.sep + self.name | |||||
self.cache_abspath = val | |||||
return val | |||||
def is_child_of(self, node): | |||||
""" | |||||
Does this node belong to the subtree node?:: | |||||
def build(bld): | |||||
node = bld.path.find_node('wscript') | |||||
node.is_child_of(bld.path) # True | |||||
:param node: path to use as a reference | |||||
:type node: :py:class:`waflib.Node.Node` | |||||
""" | |||||
p = self | |||||
diff = self.height() - node.height() | |||||
while diff > 0: | |||||
diff -= 1 | |||||
p = p.parent | |||||
return id(p) == id(node) | |||||
def ant_iter(self, accept=None, maxdepth=25, pats=[], dir=False, src=True, remove=True): | |||||
""" | |||||
Semi-private and recursive method used by ant_glob. | |||||
:param accept: function used for accepting/rejecting a node, returns the patterns that can be still accepted in recursion | |||||
:type accept: function | |||||
:param maxdepth: maximum depth in the filesystem (25) | |||||
:type maxdepth: int | |||||
:param pats: list of patterns to accept and list of patterns to exclude | |||||
:type pats: tuple | |||||
:param dir: return folders too (False by default) | |||||
:type dir: bool | |||||
:param src: return files (True by default) | |||||
:type src: bool | |||||
:param remove: remove files/folders that do not exist (True by default) | |||||
:type remove: bool | |||||
""" | |||||
dircont = self.listdir() | |||||
dircont.sort() | |||||
try: | |||||
lst = set(self.children.keys()) | |||||
except AttributeError: | |||||
self.children = self.dict_class() | |||||
else: | |||||
if remove: | |||||
for x in lst - set(dircont): | |||||
self.children[x].evict() | |||||
for name in dircont: | |||||
npats = accept(name, pats) | |||||
if npats and npats[0]: | |||||
accepted = [] in npats[0] | |||||
node = self.make_node([name]) | |||||
isdir = os.path.isdir(node.abspath()) | |||||
if accepted: | |||||
if isdir: | |||||
if dir: | |||||
yield node | |||||
else: | |||||
if src: | |||||
yield node | |||||
if getattr(node, 'cache_isdir', None) or isdir: | |||||
node.cache_isdir = True | |||||
if maxdepth: | |||||
for k in node.ant_iter(accept=accept, maxdepth=maxdepth - 1, pats=npats, dir=dir, src=src, remove=remove): | |||||
yield k | |||||
raise StopIteration | |||||
def ant_glob(self, *k, **kw): | |||||
""" | |||||
This method is used for finding files across folders. It behaves like ant patterns: | |||||
* ``**/*`` find all files recursively | |||||
* ``**/*.class`` find all files ending by .class | |||||
* ``..`` find files having two dot characters | |||||
For example:: | |||||
def configure(cfg): | |||||
cfg.path.ant_glob('**/*.cpp') # find all .cpp files | |||||
cfg.root.ant_glob('etc/*.txt') # using the filesystem root can be slow | |||||
cfg.path.ant_glob('*.cpp', excl=['*.c'], src=True, dir=False) | |||||
For more information see http://ant.apache.org/manual/dirtasks.html | |||||
The nodes that correspond to files and folders that do not exist will be removed. To prevent this | |||||
behaviour, pass 'remove=False' | |||||
:param incl: ant patterns or list of patterns to include | |||||
:type incl: string or list of strings | |||||
:param excl: ant patterns or list of patterns to exclude | |||||
:type excl: string or list of strings | |||||
:param dir: return folders too (False by default) | |||||
:type dir: bool | |||||
:param src: return files (True by default) | |||||
:type src: bool | |||||
:param remove: remove files/folders that do not exist (True by default) | |||||
:type remove: bool | |||||
:param maxdepth: maximum depth of recursion | |||||
:type maxdepth: int | |||||
:param ignorecase: ignore case while matching (False by default) | |||||
:type ignorecase: bool | |||||
""" | |||||
src = kw.get('src', True) | |||||
dir = kw.get('dir', False) | |||||
excl = kw.get('excl', exclude_regs) | |||||
incl = k and k[0] or kw.get('incl', '**') | |||||
reflags = kw.get('ignorecase', 0) and re.I | |||||
def to_pat(s): | |||||
lst = Utils.to_list(s) | |||||
ret = [] | |||||
for x in lst: | |||||
x = x.replace('\\', '/').replace('//', '/') | |||||
if x.endswith('/'): | |||||
x += '**' | |||||
lst2 = x.split('/') | |||||
accu = [] | |||||
for k in lst2: | |||||
if k == '**': | |||||
accu.append(k) | |||||
else: | |||||
k = k.replace('.', '[.]').replace('*','.*').replace('?', '.').replace('+', '\\+') | |||||
k = '^%s$' % k | |||||
try: | |||||
#print "pattern", k | |||||
accu.append(re.compile(k, flags=reflags)) | |||||
except Exception as e: | |||||
raise Errors.WafError("Invalid pattern: %s" % k, e) | |||||
ret.append(accu) | |||||
return ret | |||||
def filtre(name, nn): | |||||
ret = [] | |||||
for lst in nn: | |||||
if not lst: | |||||
pass | |||||
elif lst[0] == '**': | |||||
ret.append(lst) | |||||
if len(lst) > 1: | |||||
if lst[1].match(name): | |||||
ret.append(lst[2:]) | |||||
else: | |||||
ret.append([]) | |||||
elif lst[0].match(name): | |||||
ret.append(lst[1:]) | |||||
return ret | |||||
def accept(name, pats): | |||||
nacc = filtre(name, pats[0]) | |||||
nrej = filtre(name, pats[1]) | |||||
if [] in nrej: | |||||
nacc = [] | |||||
return [nacc, nrej] | |||||
ret = [x for x in self.ant_iter(accept=accept, pats=[to_pat(incl), to_pat(excl)], maxdepth=kw.get('maxdepth', 25), dir=dir, src=src, remove=kw.get('remove', True))] | |||||
if kw.get('flat', False): | |||||
return ' '.join([x.path_from(self) for x in ret]) | |||||
return ret | |||||
# -------------------------------------------------------------------------------- | |||||
# the following methods require the source/build folders (bld.srcnode/bld.bldnode) | |||||
# using a subclass is a possibility, but is that really necessary? | |||||
# -------------------------------------------------------------------------------- | |||||
def is_src(self): | |||||
""" | |||||
True if the node is below the source directory | |||||
note: !is_src does not imply is_bld() | |||||
:rtype: bool | |||||
""" | |||||
cur = self | |||||
x = id(self.ctx.srcnode) | |||||
y = id(self.ctx.bldnode) | |||||
while cur.parent: | |||||
if id(cur) == y: | |||||
return False | |||||
if id(cur) == x: | |||||
return True | |||||
cur = cur.parent | |||||
return False | |||||
def is_bld(self): | |||||
""" | |||||
True if the node is below the build directory | |||||
note: !is_bld does not imply is_src | |||||
:rtype: bool | |||||
""" | |||||
cur = self | |||||
y = id(self.ctx.bldnode) | |||||
while cur.parent: | |||||
if id(cur) == y: | |||||
return True | |||||
cur = cur.parent | |||||
return False | |||||
def get_src(self): | |||||
""" | |||||
Return the equivalent src node (or self if not possible) | |||||
:rtype: :py:class:`waflib.Node.Node` | |||||
""" | |||||
cur = self | |||||
x = id(self.ctx.srcnode) | |||||
y = id(self.ctx.bldnode) | |||||
lst = [] | |||||
while cur.parent: | |||||
if id(cur) == y: | |||||
lst.reverse() | |||||
return self.ctx.srcnode.make_node(lst) | |||||
if id(cur) == x: | |||||
return self | |||||
lst.append(cur.name) | |||||
cur = cur.parent | |||||
return self | |||||
def get_bld(self): | |||||
""" | |||||
Return the equivalent bld node (or self if not possible) | |||||
:rtype: :py:class:`waflib.Node.Node` | |||||
""" | |||||
cur = self | |||||
x = id(self.ctx.srcnode) | |||||
y = id(self.ctx.bldnode) | |||||
lst = [] | |||||
while cur.parent: | |||||
if id(cur) == y: | |||||
return self | |||||
if id(cur) == x: | |||||
lst.reverse() | |||||
return self.ctx.bldnode.make_node(lst) | |||||
lst.append(cur.name) | |||||
cur = cur.parent | |||||
# the file is external to the current project, make a fake root in the current build directory | |||||
lst.reverse() | |||||
if lst and Utils.is_win32 and len(lst[0]) == 2 and lst[0].endswith(':'): | |||||
lst[0] = lst[0][0] | |||||
return self.ctx.bldnode.make_node(['__root__'] + lst) | |||||
def find_resource(self, lst): | |||||
""" | |||||
Try to find a declared build node or a source file | |||||
:param lst: path | |||||
:type lst: string or list of string | |||||
""" | |||||
if isinstance(lst, str): | |||||
lst = [x for x in split_path(lst) if x and x != '.'] | |||||
node = self.get_bld().search_node(lst) | |||||
if not node: | |||||
self = self.get_src() | |||||
node = self.find_node(lst) | |||||
if node: | |||||
if os.path.isdir(node.abspath()): | |||||
return None | |||||
return node | |||||
def find_or_declare(self, lst): | |||||
""" | |||||
if 'self' is in build directory, try to return an existing node | |||||
if no node is found, go to the source directory | |||||
try to find an existing node in the source directory | |||||
if no node is found, create it in the build directory | |||||
:param lst: path | |||||
:type lst: string or list of string | |||||
""" | |||||
if isinstance(lst, str): | |||||
lst = [x for x in split_path(lst) if x and x != '.'] | |||||
node = self.get_bld().search_node(lst) | |||||
if node: | |||||
if not os.path.isfile(node.abspath()): | |||||
node.sig = None | |||||
node.parent.mkdir() | |||||
return node | |||||
self = self.get_src() | |||||
node = self.find_node(lst) | |||||
if node: | |||||
if not os.path.isfile(node.abspath()): | |||||
node.sig = None | |||||
node.parent.mkdir() | |||||
return node | |||||
node = self.get_bld().make_node(lst) | |||||
node.parent.mkdir() | |||||
return node | |||||
def find_dir(self, lst): | |||||
""" | |||||
Search for a folder in the filesystem | |||||
:param lst: path | |||||
:type lst: string or list of string | |||||
""" | |||||
if isinstance(lst, str): | |||||
lst = [x for x in split_path(lst) if x and x != '.'] | |||||
node = self.find_node(lst) | |||||
try: | |||||
if not os.path.isdir(node.abspath()): | |||||
return None | |||||
except (OSError, AttributeError): | |||||
# the node might be None, and raise an AttributeError | |||||
return None | |||||
return node | |||||
# helpers for building things | |||||
def change_ext(self, ext, ext_in=None): | |||||
""" | |||||
:return: A build node of the same path, but with a different extension | |||||
:rtype: :py:class:`waflib.Node.Node` | |||||
""" | |||||
name = self.name | |||||
if ext_in is None: | |||||
k = name.rfind('.') | |||||
if k >= 0: | |||||
name = name[:k] + ext | |||||
else: | |||||
name = name + ext | |||||
else: | |||||
name = name[:- len(ext_in)] + ext | |||||
return self.parent.find_or_declare([name]) | |||||
def bldpath(self): | |||||
"Path seen from the build directory default/src/foo.cpp" | |||||
return self.path_from(self.ctx.bldnode) | |||||
def srcpath(self): | |||||
"Path seen from the source directory ../src/foo.cpp" | |||||
return self.path_from(self.ctx.srcnode) | |||||
def relpath(self): | |||||
"If a file in the build directory, bldpath, else srcpath" | |||||
cur = self | |||||
x = id(self.ctx.bldnode) | |||||
while cur.parent: | |||||
if id(cur) == x: | |||||
return self.bldpath() | |||||
cur = cur.parent | |||||
return self.srcpath() | |||||
def bld_dir(self): | |||||
"Build path without the file name" | |||||
return self.parent.bldpath() | |||||
def get_bld_sig(self): | |||||
""" | |||||
Node signature, assuming the file is in the build directory | |||||
""" | |||||
try: | |||||
return self.cache_sig | |||||
except AttributeError: | |||||
pass | |||||
if not self.is_bld() or self.ctx.bldnode is self.ctx.srcnode: | |||||
self.sig = Utils.h_file(self.abspath()) | |||||
self.cache_sig = ret = self.sig | |||||
return ret | |||||
pickle_lock = Utils.threading.Lock() | |||||
"""Lock mandatory for thread-safe node serialization""" | |||||
class Nod3(Node): | |||||
"""Mandatory subclass for thread-safe node serialization""" | |||||
pass # do not remove | |||||
@@ -0,0 +1,269 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Scott Newton, 2005 (scottn) | |||||
# Thomas Nagy, 2006-2010 (ita) | |||||
""" | |||||
Support for waf command-line options | |||||
Provides default command-line options, | |||||
as well as custom ones, used by the ``options`` wscript function. | |||||
""" | |||||
import os, tempfile, optparse, sys, re | |||||
from waflib import Logs, Utils, Context | |||||
cmds = 'distclean configure build install clean uninstall check dist distcheck'.split() | |||||
""" | |||||
Constant representing the default waf commands displayed in:: | |||||
$ waf --help | |||||
""" | |||||
options = {} | |||||
""" | |||||
A dictionary representing the command-line options:: | |||||
$ waf --foo=bar | |||||
""" | |||||
commands = [] | |||||
""" | |||||
List of commands to execute extracted from the command-line. This list is consumed during the execution, see :py:func:`waflib.Scripting.run_commands`. | |||||
""" | |||||
envvars = [] | |||||
""" | |||||
List of environment variable declarations placed after the Waf executable name. | |||||
These are detected by searching for "=" in the rest arguments. | |||||
""" | |||||
lockfile = os.environ.get('WAFLOCK', '.lock-waf_%s_build' % sys.platform) | |||||
platform = Utils.unversioned_sys_platform() | |||||
class opt_parser(optparse.OptionParser): | |||||
""" | |||||
Command-line options parser. | |||||
""" | |||||
def __init__(self, ctx): | |||||
optparse.OptionParser.__init__(self, conflict_handler="resolve", version='waf %s (%s)' % (Context.WAFVERSION, Context.WAFREVISION)) | |||||
self.formatter.width = Logs.get_term_cols() | |||||
self.ctx = ctx | |||||
def print_usage(self, file=None): | |||||
return self.print_help(file) | |||||
def get_usage(self): | |||||
""" | |||||
Return the message to print on ``waf --help`` | |||||
""" | |||||
cmds_str = {} | |||||
for cls in Context.classes: | |||||
if not cls.cmd or cls.cmd == 'options' or cls.cmd.startswith( '_' ): | |||||
continue | |||||
s = cls.__doc__ or '' | |||||
cmds_str[cls.cmd] = s | |||||
if Context.g_module: | |||||
for (k, v) in Context.g_module.__dict__.items(): | |||||
if k in ('options', 'init', 'shutdown'): | |||||
continue | |||||
if type(v) is type(Context.create_context): | |||||
if v.__doc__ and not k.startswith('_'): | |||||
cmds_str[k] = v.__doc__ | |||||
just = 0 | |||||
for k in cmds_str: | |||||
just = max(just, len(k)) | |||||
lst = [' %s: %s' % (k.ljust(just), v) for (k, v) in cmds_str.items()] | |||||
lst.sort() | |||||
ret = '\n'.join(lst) | |||||
return '''waf [commands] [options] | |||||
Main commands (example: ./waf build -j4) | |||||
%s | |||||
''' % ret | |||||
class OptionsContext(Context.Context): | |||||
""" | |||||
Collect custom options from wscript files and parses the command line. | |||||
Set the global :py:const:`waflib.Options.commands` and :py:const:`waflib.Options.options` values. | |||||
""" | |||||
cmd = 'options' | |||||
fun = 'options' | |||||
def __init__(self, **kw): | |||||
super(OptionsContext, self).__init__(**kw) | |||||
self.parser = opt_parser(self) | |||||
"""Instance of :py:class:`waflib.Options.opt_parser`""" | |||||
self.option_groups = {} | |||||
jobs = self.jobs() | |||||
p = self.add_option | |||||
color = os.environ.get('NOCOLOR', '') and 'no' or 'auto' | |||||
p('-c', '--color', dest='colors', default=color, action='store', help='whether to use colors (yes/no/auto) [default: auto]', choices=('yes', 'no', 'auto')) | |||||
p('-j', '--jobs', dest='jobs', default=jobs, type='int', help='amount of parallel jobs (%r)' % jobs) | |||||
p('-k', '--keep', dest='keep', default=0, action='count', help='continue despite errors (-kk to try harder)') | |||||
p('-v', '--verbose', dest='verbose', default=0, action='count', help='verbosity level -v -vv or -vvv [default: 0]') | |||||
p('--zones', dest='zones', default='', action='store', help='debugging zones (task_gen, deps, tasks, etc)') | |||||
gr = self.add_option_group('Configuration options') | |||||
self.option_groups['configure options'] = gr | |||||
gr.add_option('-o', '--out', action='store', default='', help='build dir for the project', dest='out') | |||||
gr.add_option('-t', '--top', action='store', default='', help='src dir for the project', dest='top') | |||||
default_prefix = getattr(Context.g_module, 'default_prefix', os.environ.get('PREFIX')) | |||||
if not default_prefix: | |||||
if platform == 'win32': | |||||
d = tempfile.gettempdir() | |||||
default_prefix = d[0].upper() + d[1:] | |||||
# win32 preserves the case, but gettempdir does not | |||||
else: | |||||
default_prefix = '/usr/local/' | |||||
gr.add_option('--prefix', dest='prefix', default=default_prefix, help='installation prefix [default: %r]' % default_prefix) | |||||
gr.add_option('--bindir', dest='bindir', help='bindir') | |||||
gr.add_option('--libdir', dest='libdir', help='libdir') | |||||
gr = self.add_option_group('Build and installation options') | |||||
self.option_groups['build and install options'] = gr | |||||
gr.add_option('-p', '--progress', dest='progress_bar', default=0, action='count', help= '-p: progress bar; -pp: ide output') | |||||
gr.add_option('--targets', dest='targets', default='', action='store', help='task generators, e.g. "target1,target2"') | |||||
gr = self.add_option_group('Step options') | |||||
self.option_groups['step options'] = gr | |||||
gr.add_option('--files', dest='files', default='', action='store', help='files to process, by regexp, e.g. "*/main.c,*/test/main.o"') | |||||
default_destdir = os.environ.get('DESTDIR', '') | |||||
gr = self.add_option_group('Installation and uninstallation options') | |||||
self.option_groups['install/uninstall options'] = gr | |||||
gr.add_option('--destdir', help='installation root [default: %r]' % default_destdir, default=default_destdir, dest='destdir') | |||||
gr.add_option('-f', '--force', dest='force', default=False, action='store_true', help='force file installation') | |||||
gr.add_option('--distcheck-args', metavar='ARGS', help='arguments to pass to distcheck', default=None, action='store') | |||||
def jobs(self): | |||||
""" | |||||
Find the amount of cpu cores to set the default amount of tasks executed in parallel. At | |||||
runtime the options can be obtained from :py:const:`waflib.Options.options` :: | |||||
from waflib.Options import options | |||||
njobs = options.jobs | |||||
:return: the amount of cpu cores | |||||
:rtype: int | |||||
""" | |||||
count = int(os.environ.get('JOBS', 0)) | |||||
if count < 1: | |||||
if 'NUMBER_OF_PROCESSORS' in os.environ: | |||||
# on Windows, use the NUMBER_OF_PROCESSORS environment variable | |||||
count = int(os.environ.get('NUMBER_OF_PROCESSORS', 1)) | |||||
else: | |||||
# on everything else, first try the POSIX sysconf values | |||||
if hasattr(os, 'sysconf_names'): | |||||
if 'SC_NPROCESSORS_ONLN' in os.sysconf_names: | |||||
count = int(os.sysconf('SC_NPROCESSORS_ONLN')) | |||||
elif 'SC_NPROCESSORS_CONF' in os.sysconf_names: | |||||
count = int(os.sysconf('SC_NPROCESSORS_CONF')) | |||||
if not count and os.name not in ('nt', 'java'): | |||||
try: | |||||
tmp = self.cmd_and_log(['sysctl', '-n', 'hw.ncpu'], quiet=0) | |||||
except Exception: | |||||
pass | |||||
else: | |||||
if re.match('^[0-9]+$', tmp): | |||||
count = int(tmp) | |||||
if count < 1: | |||||
count = 1 | |||||
elif count > 1024: | |||||
count = 1024 | |||||
return count | |||||
def add_option(self, *k, **kw): | |||||
""" | |||||
Wrapper for optparse.add_option:: | |||||
def options(ctx): | |||||
ctx.add_option('-u', '--use', dest='use', default=False, action='store_true', | |||||
help='a boolean option') | |||||
""" | |||||
return self.parser.add_option(*k, **kw) | |||||
def add_option_group(self, *k, **kw): | |||||
""" | |||||
Wrapper for optparse.add_option_group:: | |||||
def options(ctx): | |||||
gr = ctx.add_option_group('some options') | |||||
gr.add_option('-u', '--use', dest='use', default=False, action='store_true') | |||||
""" | |||||
try: | |||||
gr = self.option_groups[k[0]] | |||||
except KeyError: | |||||
gr = self.parser.add_option_group(*k, **kw) | |||||
self.option_groups[k[0]] = gr | |||||
return gr | |||||
def get_option_group(self, opt_str): | |||||
""" | |||||
Wrapper for optparse.get_option_group:: | |||||
def options(ctx): | |||||
gr = ctx.get_option_group('configure options') | |||||
gr.add_option('-o', '--out', action='store', default='', | |||||
help='build dir for the project', dest='out') | |||||
""" | |||||
try: | |||||
return self.option_groups[opt_str] | |||||
except KeyError: | |||||
for group in self.parser.option_groups: | |||||
if group.title == opt_str: | |||||
return group | |||||
return None | |||||
def parse_args(self, _args=None): | |||||
""" | |||||
Parse arguments from a list (not bound to the command-line). | |||||
:param _args: arguments | |||||
:type _args: list of strings | |||||
""" | |||||
global options, commands, envvars | |||||
(options, leftover_args) = self.parser.parse_args(args=_args) | |||||
for arg in leftover_args: | |||||
if '=' in arg: | |||||
envvars.append(arg) | |||||
else: | |||||
commands.append(arg) | |||||
if options.destdir: | |||||
options.destdir = os.path.abspath(os.path.expanduser(options.destdir)) | |||||
if options.verbose >= 1: | |||||
self.load('errcheck') | |||||
colors = {'yes' : 2, 'auto' : 1, 'no' : 0}[options.colors] | |||||
Logs.enable_colors(colors) | |||||
def execute(self): | |||||
""" | |||||
See :py:func:`waflib.Context.Context.execute` | |||||
""" | |||||
super(OptionsContext, self).execute() | |||||
self.parse_args() | |||||
@@ -0,0 +1,366 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2005-2010 (ita) | |||||
""" | |||||
Runner.py: Task scheduling and execution | |||||
""" | |||||
import random, atexit | |||||
try: | |||||
from queue import Queue | |||||
except ImportError: | |||||
from Queue import Queue | |||||
from waflib import Utils, Task, Errors, Logs | |||||
GAP = 10 | |||||
""" | |||||
Wait for free tasks if there are at least ``GAP * njobs`` in queue | |||||
""" | |||||
class TaskConsumer(Utils.threading.Thread): | |||||
""" | |||||
Task consumers belong to a pool of workers | |||||
They wait for tasks in the queue and then use ``task.process(...)`` | |||||
""" | |||||
def __init__(self): | |||||
Utils.threading.Thread.__init__(self) | |||||
self.ready = Queue() | |||||
""" | |||||
Obtain :py:class:`waflib.Task.TaskBase` instances from this queue. | |||||
""" | |||||
self.setDaemon(1) | |||||
self.start() | |||||
def run(self): | |||||
""" | |||||
Loop over the tasks to execute | |||||
""" | |||||
try: | |||||
self.loop() | |||||
except Exception: | |||||
pass | |||||
def loop(self): | |||||
""" | |||||
Obtain tasks from :py:attr:`waflib.Runner.TaskConsumer.ready` and call | |||||
:py:meth:`waflib.Task.TaskBase.process`. If the object is a function, execute it. | |||||
""" | |||||
while 1: | |||||
tsk = self.ready.get() | |||||
if not isinstance(tsk, Task.TaskBase): | |||||
tsk(self) | |||||
else: | |||||
tsk.process() | |||||
pool = Queue() | |||||
""" | |||||
Pool of task consumer objects | |||||
""" | |||||
def get_pool(): | |||||
""" | |||||
Obtain a task consumer from :py:attr:`waflib.Runner.pool`. | |||||
Do not forget to put it back by using :py:func:`waflib.Runner.put_pool` | |||||
and reset properly (original waiting queue). | |||||
:rtype: :py:class:`waflib.Runner.TaskConsumer` | |||||
""" | |||||
try: | |||||
return pool.get(False) | |||||
except Exception: | |||||
return TaskConsumer() | |||||
def put_pool(x): | |||||
""" | |||||
Return a task consumer to the thread pool :py:attr:`waflib.Runner.pool` | |||||
:param x: task consumer object | |||||
:type x: :py:class:`waflib.Runner.TaskConsumer` | |||||
""" | |||||
pool.put(x) | |||||
def _free_resources(): | |||||
global pool | |||||
lst = [] | |||||
while pool.qsize(): | |||||
lst.append(pool.get()) | |||||
for x in lst: | |||||
x.ready.put(None) | |||||
for x in lst: | |||||
x.join() | |||||
pool = None | |||||
atexit.register(_free_resources) | |||||
class Parallel(object): | |||||
""" | |||||
Schedule the tasks obtained from the build context for execution. | |||||
""" | |||||
def __init__(self, bld, j=2): | |||||
""" | |||||
The initialization requires a build context reference | |||||
for computing the total number of jobs. | |||||
""" | |||||
self.numjobs = j | |||||
""" | |||||
Number of consumers in the pool | |||||
""" | |||||
self.bld = bld | |||||
""" | |||||
Instance of :py:class:`waflib.Build.BuildContext` | |||||
""" | |||||
self.outstanding = [] | |||||
"""List of :py:class:`waflib.Task.TaskBase` that may be ready to be executed""" | |||||
self.frozen = [] | |||||
"""List of :py:class:`waflib.Task.TaskBase` that cannot be executed immediately""" | |||||
self.out = Queue(0) | |||||
"""List of :py:class:`waflib.Task.TaskBase` returned by the task consumers""" | |||||
self.count = 0 | |||||
"""Amount of tasks that may be processed by :py:class:`waflib.Runner.TaskConsumer`""" | |||||
self.processed = 1 | |||||
"""Amount of tasks processed""" | |||||
self.stop = False | |||||
"""Error flag to stop the build""" | |||||
self.error = [] | |||||
"""Tasks that could not be executed""" | |||||
self.biter = None | |||||
"""Task iterator which must give groups of parallelizable tasks when calling ``next()``""" | |||||
self.dirty = False | |||||
"""Flag to indicate that tasks have been executed, and that the build cache must be saved (call :py:meth:`waflib.Build.BuildContext.store`)""" | |||||
def get_next_task(self): | |||||
""" | |||||
Obtain the next task to execute. | |||||
:rtype: :py:class:`waflib.Task.TaskBase` | |||||
""" | |||||
if not self.outstanding: | |||||
return None | |||||
return self.outstanding.pop(0) | |||||
def postpone(self, tsk): | |||||
""" | |||||
A task cannot be executed at this point, put it in the list :py:attr:`waflib.Runner.Parallel.frozen`. | |||||
:param tsk: task | |||||
:type tsk: :py:class:`waflib.Task.TaskBase` | |||||
""" | |||||
if random.randint(0, 1): | |||||
self.frozen.insert(0, tsk) | |||||
else: | |||||
self.frozen.append(tsk) | |||||
def refill_task_list(self): | |||||
""" | |||||
Put the next group of tasks to execute in :py:attr:`waflib.Runner.Parallel.outstanding`. | |||||
""" | |||||
while self.count > self.numjobs * GAP: | |||||
self.get_out() | |||||
while not self.outstanding: | |||||
if self.count: | |||||
self.get_out() | |||||
elif self.frozen: | |||||
try: | |||||
cond = self.deadlock == self.processed | |||||
except AttributeError: | |||||
pass | |||||
else: | |||||
if cond: | |||||
msg = 'check the build order for the tasks' | |||||
for tsk in self.frozen: | |||||
if not tsk.run_after: | |||||
msg = 'check the methods runnable_status' | |||||
break | |||||
lst = [] | |||||
for tsk in self.frozen: | |||||
lst.append('%s\t-> %r' % (repr(tsk), [id(x) for x in tsk.run_after])) | |||||
raise Errors.WafError('Deadlock detected: %s%s' % (msg, ''.join(lst))) | |||||
self.deadlock = self.processed | |||||
if self.frozen: | |||||
self.outstanding += self.frozen | |||||
self.frozen = [] | |||||
elif not self.count: | |||||
self.outstanding.extend(next(self.biter)) | |||||
self.total = self.bld.total() | |||||
break | |||||
def add_more_tasks(self, tsk): | |||||
""" | |||||
Tasks may be added dynamically during the build by binding them to the task :py:attr:`waflib.Task.TaskBase.more_tasks` | |||||
:param tsk: task | |||||
:type tsk: :py:attr:`waflib.Task.TaskBase` | |||||
""" | |||||
if getattr(tsk, 'more_tasks', None): | |||||
self.outstanding += tsk.more_tasks | |||||
self.total += len(tsk.more_tasks) | |||||
def get_out(self): | |||||
""" | |||||
Obtain one task returned from the task consumers, and update the task count. Add more tasks if necessary through | |||||
:py:attr:`waflib.Runner.Parallel.add_more_tasks`. | |||||
:rtype: :py:attr:`waflib.Task.TaskBase` | |||||
""" | |||||
tsk = self.out.get() | |||||
if not self.stop: | |||||
self.add_more_tasks(tsk) | |||||
self.count -= 1 | |||||
self.dirty = True | |||||
return tsk | |||||
def add_task(self, tsk): | |||||
""" | |||||
Pass a task to a consumer. | |||||
:param tsk: task | |||||
:type tsk: :py:attr:`waflib.Task.TaskBase` | |||||
""" | |||||
try: | |||||
self.pool | |||||
except AttributeError: | |||||
self.init_task_pool() | |||||
self.ready.put(tsk) | |||||
def init_task_pool(self): | |||||
# lazy creation, and set a common pool for all task consumers | |||||
pool = self.pool = [get_pool() for i in range(self.numjobs)] | |||||
self.ready = Queue(0) | |||||
def setq(consumer): | |||||
consumer.ready = self.ready | |||||
for x in pool: | |||||
x.ready.put(setq) | |||||
return pool | |||||
def free_task_pool(self): | |||||
# return the consumers, setting a different queue for each of them | |||||
def setq(consumer): | |||||
consumer.ready = Queue(0) | |||||
self.out.put(self) | |||||
try: | |||||
pool = self.pool | |||||
except AttributeError: | |||||
pass | |||||
else: | |||||
for x in pool: | |||||
self.ready.put(setq) | |||||
for x in pool: | |||||
self.get_out() | |||||
for x in pool: | |||||
put_pool(x) | |||||
self.pool = [] | |||||
def skip(self, tsk): | |||||
tsk.hasrun = Task.SKIPPED | |||||
def error_handler(self, tsk): | |||||
""" | |||||
Called when a task cannot be executed. The flag :py:attr:`waflib.Runner.Parallel.stop` is set, unless | |||||
the build is executed with:: | |||||
$ waf build -k | |||||
:param tsk: task | |||||
:type tsk: :py:attr:`waflib.Task.TaskBase` | |||||
""" | |||||
if not self.bld.keep: | |||||
self.stop = True | |||||
self.error.append(tsk) | |||||
def task_status(self, tsk): | |||||
try: | |||||
return tsk.runnable_status() | |||||
except Exception: | |||||
self.processed += 1 | |||||
tsk.err_msg = Utils.ex_stack() | |||||
if not self.stop and self.bld.keep: | |||||
self.skip(tsk) | |||||
if self.bld.keep == 1: | |||||
# if -k stop at the first exception, if -kk try to go as far as possible | |||||
if Logs.verbose > 1 or not self.error: | |||||
self.error.append(tsk) | |||||
self.stop = True | |||||
else: | |||||
if Logs.verbose > 1: | |||||
self.error.append(tsk) | |||||
return Task.EXCEPTION | |||||
tsk.hasrun = Task.EXCEPTION | |||||
self.error_handler(tsk) | |||||
return Task.EXCEPTION | |||||
def start(self): | |||||
""" | |||||
Give tasks to :py:class:`waflib.Runner.TaskConsumer` instances until the build finishes or the ``stop`` flag is set. | |||||
If only one job is used, then execute the tasks one by one, without consumers. | |||||
""" | |||||
self.total = self.bld.total() | |||||
while not self.stop: | |||||
self.refill_task_list() | |||||
# consider the next task | |||||
tsk = self.get_next_task() | |||||
if not tsk: | |||||
if self.count: | |||||
# tasks may add new ones after they are run | |||||
continue | |||||
else: | |||||
# no tasks to run, no tasks running, time to exit | |||||
break | |||||
if tsk.hasrun: | |||||
# if the task is marked as "run", just skip it | |||||
self.processed += 1 | |||||
continue | |||||
if self.stop: # stop immediately after a failure was detected | |||||
break | |||||
st = self.task_status(tsk) | |||||
if st == Task.RUN_ME: | |||||
tsk.position = (self.processed, self.total) | |||||
self.count += 1 | |||||
tsk.master = self | |||||
self.processed += 1 | |||||
if self.numjobs == 1: | |||||
tsk.process() | |||||
else: | |||||
self.add_task(tsk) | |||||
if st == Task.ASK_LATER: | |||||
self.postpone(tsk) | |||||
elif st == Task.SKIP_ME: | |||||
self.processed += 1 | |||||
self.skip(tsk) | |||||
self.add_more_tasks(tsk) | |||||
# self.count represents the tasks that have been made available to the consumer threads | |||||
# collect all the tasks after an error else the message may be incomplete | |||||
while self.error and self.count: | |||||
self.get_out() | |||||
#print loop | |||||
assert (self.count == 0 or self.stop) | |||||
# free the task pool, if any | |||||
self.free_task_pool() | |||||
@@ -0,0 +1,605 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2005-2010 (ita) | |||||
"Module called for configuring, compiling and installing targets" | |||||
import os, shlex, shutil, traceback, errno, sys, stat | |||||
from waflib import Utils, Configure, Logs, Options, ConfigSet, Context, Errors, Build, Node | |||||
build_dir_override = None | |||||
no_climb_commands = ['configure'] | |||||
default_cmd = "build" | |||||
def waf_entry_point(current_directory, version, wafdir): | |||||
""" | |||||
This is the main entry point, all Waf execution starts here. | |||||
:param current_directory: absolute path representing the current directory | |||||
:type current_directory: string | |||||
:param version: version number | |||||
:type version: string | |||||
:param wafdir: absolute path representing the directory of the waf library | |||||
:type wafdir: string | |||||
""" | |||||
Logs.init_log() | |||||
if Context.WAFVERSION != version: | |||||
Logs.error('Waf script %r and library %r do not match (directory %r)' % (version, Context.WAFVERSION, wafdir)) | |||||
sys.exit(1) | |||||
if '--version' in sys.argv: | |||||
Context.run_dir = current_directory | |||||
ctx = Context.create_context('options') | |||||
ctx.curdir = current_directory | |||||
ctx.parse_args() | |||||
sys.exit(0) | |||||
if len(sys.argv) > 1: | |||||
# os.path.join handles absolute paths in sys.argv[1] accordingly (it discards the previous ones) | |||||
# if sys.argv[1] is not an absolute path, then it is relative to the current working directory | |||||
potential_wscript = os.path.join(current_directory, sys.argv[1]) | |||||
# maybe check if the file is executable | |||||
# perhaps extract 'wscript' as a constant | |||||
if os.path.basename(potential_wscript) == 'wscript' and os.path.isfile(potential_wscript): | |||||
# need to explicitly normalize the path, as it may contain extra '/.' | |||||
current_directory = os.path.normpath(os.path.dirname(potential_wscript)) | |||||
sys.argv.pop(1) | |||||
Context.waf_dir = wafdir | |||||
Context.launch_dir = current_directory | |||||
# if 'configure' is in the commands, do not search any further | |||||
no_climb = os.environ.get('NOCLIMB', None) | |||||
if not no_climb: | |||||
for k in no_climb_commands: | |||||
for y in sys.argv: | |||||
if y.startswith(k): | |||||
no_climb = True | |||||
break | |||||
# try to find a lock file (if the project was configured) | |||||
# at the same time, store the first wscript file seen | |||||
cur = current_directory | |||||
while cur: | |||||
lst = os.listdir(cur) | |||||
if Options.lockfile in lst: | |||||
env = ConfigSet.ConfigSet() | |||||
try: | |||||
env.load(os.path.join(cur, Options.lockfile)) | |||||
ino = os.stat(cur)[stat.ST_INO] | |||||
except Exception: | |||||
pass | |||||
else: | |||||
# check if the folder was not moved | |||||
for x in (env.run_dir, env.top_dir, env.out_dir): | |||||
if Utils.is_win32: | |||||
if cur == x: | |||||
load = True | |||||
break | |||||
else: | |||||
# if the filesystem features symlinks, compare the inode numbers | |||||
try: | |||||
ino2 = os.stat(x)[stat.ST_INO] | |||||
except OSError: | |||||
pass | |||||
else: | |||||
if ino == ino2: | |||||
load = True | |||||
break | |||||
else: | |||||
Logs.warn('invalid lock file in %s' % cur) | |||||
load = False | |||||
if load: | |||||
Context.run_dir = env.run_dir | |||||
Context.top_dir = env.top_dir | |||||
Context.out_dir = env.out_dir | |||||
break | |||||
if not Context.run_dir: | |||||
if Context.WSCRIPT_FILE in lst: | |||||
Context.run_dir = cur | |||||
next = os.path.dirname(cur) | |||||
if next == cur: | |||||
break | |||||
cur = next | |||||
if no_climb: | |||||
break | |||||
if not Context.run_dir: | |||||
if '-h' in sys.argv or '--help' in sys.argv: | |||||
Logs.warn('No wscript file found: the help message may be incomplete') | |||||
Context.run_dir = current_directory | |||||
ctx = Context.create_context('options') | |||||
ctx.curdir = current_directory | |||||
ctx.parse_args() | |||||
sys.exit(0) | |||||
Logs.error('Waf: Run from a directory containing a file named %r' % Context.WSCRIPT_FILE) | |||||
sys.exit(1) | |||||
try: | |||||
os.chdir(Context.run_dir) | |||||
except OSError: | |||||
Logs.error('Waf: The folder %r is unreadable' % Context.run_dir) | |||||
sys.exit(1) | |||||
try: | |||||
set_main_module(os.path.join(Context.run_dir, Context.WSCRIPT_FILE)) | |||||
except Errors.WafError as e: | |||||
Logs.pprint('RED', e.verbose_msg) | |||||
Logs.error(str(e)) | |||||
sys.exit(1) | |||||
except Exception as e: | |||||
Logs.error('Waf: The wscript in %r is unreadable' % Context.run_dir, e) | |||||
traceback.print_exc(file=sys.stdout) | |||||
sys.exit(2) | |||||
""" | |||||
import cProfile, pstats | |||||
cProfile.runctx("from waflib import Scripting; Scripting.run_commands()", {}, {}, 'profi.txt') | |||||
p = pstats.Stats('profi.txt') | |||||
p.sort_stats('time').print_stats(75) # or 'cumulative' | |||||
""" | |||||
try: | |||||
run_commands() | |||||
except Errors.WafError as e: | |||||
if Logs.verbose > 1: | |||||
Logs.pprint('RED', e.verbose_msg) | |||||
Logs.error(e.msg) | |||||
sys.exit(1) | |||||
except SystemExit: | |||||
raise | |||||
except Exception as e: | |||||
traceback.print_exc(file=sys.stdout) | |||||
sys.exit(2) | |||||
except KeyboardInterrupt: | |||||
Logs.pprint('RED', 'Interrupted') | |||||
sys.exit(68) | |||||
#""" | |||||
def set_main_module(file_path): | |||||
""" | |||||
Read the main wscript file into :py:const:`waflib.Context.Context.g_module` and | |||||
bind default functions such as ``init``, ``dist``, ``distclean`` if not defined. | |||||
Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization. | |||||
:param file_path: absolute path representing the top-level wscript file | |||||
:type file_path: string | |||||
""" | |||||
Context.g_module = Context.load_module(file_path) | |||||
Context.g_module.root_path = file_path | |||||
# note: to register the module globally, use the following: | |||||
# sys.modules['wscript_main'] = g_module | |||||
def set_def(obj): | |||||
name = obj.__name__ | |||||
if not name in Context.g_module.__dict__: | |||||
setattr(Context.g_module, name, obj) | |||||
for k in (update, dist, distclean, distcheck, update): | |||||
set_def(k) | |||||
# add dummy init and shutdown functions if they're not defined | |||||
if not 'init' in Context.g_module.__dict__: | |||||
Context.g_module.init = Utils.nada | |||||
if not 'shutdown' in Context.g_module.__dict__: | |||||
Context.g_module.shutdown = Utils.nada | |||||
if not 'options' in Context.g_module.__dict__: | |||||
Context.g_module.options = Utils.nada | |||||
def parse_options(): | |||||
""" | |||||
Parse the command-line options and initialize the logging system. | |||||
Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization. | |||||
""" | |||||
Context.create_context('options').execute() | |||||
for var in Options.envvars: | |||||
(name, value) = var.split('=', 1) | |||||
os.environ[name.strip()] = value | |||||
if not Options.commands: | |||||
Options.commands = [default_cmd] | |||||
Options.commands = [x for x in Options.commands if x != 'options'] # issue 1076 | |||||
# process some internal Waf options | |||||
Logs.verbose = Options.options.verbose | |||||
#Logs.init_log() | |||||
if Options.options.zones: | |||||
Logs.zones = Options.options.zones.split(',') | |||||
if not Logs.verbose: | |||||
Logs.verbose = 1 | |||||
elif Logs.verbose > 0: | |||||
Logs.zones = ['runner'] | |||||
if Logs.verbose > 2: | |||||
Logs.zones = ['*'] | |||||
def run_command(cmd_name): | |||||
""" | |||||
Execute a single command. Called by :py:func:`waflib.Scripting.run_commands`. | |||||
:param cmd_name: command to execute, like ``build`` | |||||
:type cmd_name: string | |||||
""" | |||||
ctx = Context.create_context(cmd_name) | |||||
ctx.log_timer = Utils.Timer() | |||||
ctx.options = Options.options # provided for convenience | |||||
ctx.cmd = cmd_name | |||||
try: | |||||
ctx.execute() | |||||
finally: | |||||
# Issue 1374 | |||||
ctx.finalize() | |||||
return ctx | |||||
def run_commands(): | |||||
""" | |||||
Execute the commands that were given on the command-line, and the other options | |||||
Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization, and executed | |||||
after :py:func:`waflib.Scripting.parse_options`. | |||||
""" | |||||
parse_options() | |||||
run_command('init') | |||||
while Options.commands: | |||||
cmd_name = Options.commands.pop(0) | |||||
ctx = run_command(cmd_name) | |||||
Logs.info('%r finished successfully (%s)' % (cmd_name, str(ctx.log_timer))) | |||||
run_command('shutdown') | |||||
########################################################################################### | |||||
def _can_distclean(name): | |||||
# WARNING: this method may disappear anytime | |||||
for k in '.o .moc .exe'.split(): | |||||
if name.endswith(k): | |||||
return True | |||||
return False | |||||
def distclean_dir(dirname): | |||||
""" | |||||
Distclean function called in the particular case when:: | |||||
top == out | |||||
:param dirname: absolute path of the folder to clean | |||||
:type dirname: string | |||||
""" | |||||
for (root, dirs, files) in os.walk(dirname): | |||||
for f in files: | |||||
if _can_distclean(f): | |||||
fname = os.path.join(root, f) | |||||
try: | |||||
os.remove(fname) | |||||
except OSError: | |||||
Logs.warn('Could not remove %r' % fname) | |||||
for x in (Context.DBFILE, 'config.log'): | |||||
try: | |||||
os.remove(x) | |||||
except OSError: | |||||
pass | |||||
try: | |||||
shutil.rmtree('c4che') | |||||
except OSError: | |||||
pass | |||||
def distclean(ctx): | |||||
'''removes the build directory''' | |||||
lst = os.listdir('.') | |||||
for f in lst: | |||||
if f == Options.lockfile: | |||||
try: | |||||
proj = ConfigSet.ConfigSet(f) | |||||
except IOError: | |||||
Logs.warn('Could not read %r' % f) | |||||
continue | |||||
if proj['out_dir'] != proj['top_dir']: | |||||
try: | |||||
shutil.rmtree(proj['out_dir']) | |||||
except IOError: | |||||
pass | |||||
except OSError as e: | |||||
if e.errno != errno.ENOENT: | |||||
Logs.warn('Could not remove %r' % proj['out_dir']) | |||||
else: | |||||
distclean_dir(proj['out_dir']) | |||||
for k in (proj['out_dir'], proj['top_dir'], proj['run_dir']): | |||||
p = os.path.join(k, Options.lockfile) | |||||
try: | |||||
os.remove(p) | |||||
except OSError as e: | |||||
if e.errno != errno.ENOENT: | |||||
Logs.warn('Could not remove %r' % p) | |||||
# remove local waf cache folders | |||||
if not Options.commands: | |||||
for x in '.waf-1. waf-1. .waf3-1. waf3-1.'.split(): | |||||
if f.startswith(x): | |||||
shutil.rmtree(f, ignore_errors=True) | |||||
class Dist(Context.Context): | |||||
'''creates an archive containing the project source code''' | |||||
cmd = 'dist' | |||||
fun = 'dist' | |||||
algo = 'tar.bz2' | |||||
ext_algo = {} | |||||
def execute(self): | |||||
""" | |||||
See :py:func:`waflib.Context.Context.execute` | |||||
""" | |||||
self.recurse([os.path.dirname(Context.g_module.root_path)]) | |||||
self.archive() | |||||
def archive(self): | |||||
""" | |||||
Create the archive. | |||||
""" | |||||
import tarfile | |||||
arch_name = self.get_arch_name() | |||||
try: | |||||
self.base_path | |||||
except AttributeError: | |||||
self.base_path = self.path | |||||
node = self.base_path.make_node(arch_name) | |||||
try: | |||||
node.delete() | |||||
except OSError: | |||||
pass | |||||
files = self.get_files() | |||||
if self.algo.startswith('tar.'): | |||||
tar = tarfile.open(arch_name, 'w:' + self.algo.replace('tar.', '')) | |||||
for x in files: | |||||
self.add_tar_file(x, tar) | |||||
tar.close() | |||||
elif self.algo == 'zip': | |||||
import zipfile | |||||
zip = zipfile.ZipFile(arch_name, 'w', compression=zipfile.ZIP_DEFLATED) | |||||
for x in files: | |||||
archive_name = self.get_base_name() + '/' + x.path_from(self.base_path) | |||||
zip.write(x.abspath(), archive_name, zipfile.ZIP_DEFLATED) | |||||
zip.close() | |||||
else: | |||||
self.fatal('Valid algo types are tar.bz2, tar.gz, tar.xz or zip') | |||||
try: | |||||
from hashlib import sha1 as sha | |||||
except ImportError: | |||||
from sha import sha | |||||
try: | |||||
digest = " (sha=%r)" % sha(node.read()).hexdigest() | |||||
except Exception: | |||||
digest = '' | |||||
Logs.info('New archive created: %s%s' % (self.arch_name, digest)) | |||||
def get_tar_path(self, node): | |||||
""" | |||||
return the path to use for a node in the tar archive, the purpose of this | |||||
is to let subclases resolve symbolic links or to change file names | |||||
""" | |||||
return node.abspath() | |||||
def add_tar_file(self, x, tar): | |||||
""" | |||||
Add a file to the tar archive. Transform symlinks into files if the files lie out of the project tree. | |||||
""" | |||||
p = self.get_tar_path(x) | |||||
tinfo = tar.gettarinfo(name=p, arcname=self.get_tar_prefix() + '/' + x.path_from(self.base_path)) | |||||
tinfo.uid = 0 | |||||
tinfo.gid = 0 | |||||
tinfo.uname = 'root' | |||||
tinfo.gname = 'root' | |||||
fu = None | |||||
try: | |||||
fu = open(p, 'rb') | |||||
tar.addfile(tinfo, fileobj=fu) | |||||
finally: | |||||
if fu: | |||||
fu.close() | |||||
def get_tar_prefix(self): | |||||
try: | |||||
return self.tar_prefix | |||||
except AttributeError: | |||||
return self.get_base_name() | |||||
def get_arch_name(self): | |||||
""" | |||||
Return the name of the archive to create. Change the default value by setting *arch_name*:: | |||||
def dist(ctx): | |||||
ctx.arch_name = 'ctx.tar.bz2' | |||||
:rtype: string | |||||
""" | |||||
try: | |||||
self.arch_name | |||||
except AttributeError: | |||||
self.arch_name = self.get_base_name() + '.' + self.ext_algo.get(self.algo, self.algo) | |||||
return self.arch_name | |||||
def get_base_name(self): | |||||
""" | |||||
Return the default name of the main directory in the archive, which is set to *appname-version*. | |||||
Set the attribute *base_name* to change the default value:: | |||||
def dist(ctx): | |||||
ctx.base_name = 'files' | |||||
:rtype: string | |||||
""" | |||||
try: | |||||
self.base_name | |||||
except AttributeError: | |||||
appname = getattr(Context.g_module, Context.APPNAME, 'noname') | |||||
version = getattr(Context.g_module, Context.VERSION, '1.0') | |||||
self.base_name = appname + '-' + version | |||||
return self.base_name | |||||
def get_excl(self): | |||||
""" | |||||
Return the patterns to exclude for finding the files in the top-level directory. Set the attribute *excl* | |||||
to change the default value:: | |||||
def dist(ctx): | |||||
ctx.excl = 'build **/*.o **/*.class' | |||||
:rtype: string | |||||
""" | |||||
try: | |||||
return self.excl | |||||
except AttributeError: | |||||
self.excl = Node.exclude_regs + ' **/waf-1.8.* **/.waf-1.8* **/waf3-1.8.* **/.waf3-1.8* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*' | |||||
if Context.out_dir: | |||||
nd = self.root.find_node(Context.out_dir) | |||||
if nd: | |||||
self.excl += ' ' + nd.path_from(self.base_path) | |||||
return self.excl | |||||
def get_files(self): | |||||
""" | |||||
The files to package are searched automatically by :py:func:`waflib.Node.Node.ant_glob`. Set | |||||
*files* to prevent this behaviour:: | |||||
def dist(ctx): | |||||
ctx.files = ctx.path.find_node('wscript') | |||||
The files are searched from the directory 'base_path', to change it, set:: | |||||
def dist(ctx): | |||||
ctx.base_path = path | |||||
:rtype: list of :py:class:`waflib.Node.Node` | |||||
""" | |||||
try: | |||||
files = self.files | |||||
except AttributeError: | |||||
files = self.base_path.ant_glob('**/*', excl=self.get_excl()) | |||||
return files | |||||
def dist(ctx): | |||||
'''makes a tarball for redistributing the sources''' | |||||
pass | |||||
class DistCheck(Dist): | |||||
""" | |||||
Create an archive of the project, and try to build the project in a temporary directory:: | |||||
$ waf distcheck | |||||
""" | |||||
fun = 'distcheck' | |||||
cmd = 'distcheck' | |||||
def execute(self): | |||||
""" | |||||
See :py:func:`waflib.Context.Context.execute` | |||||
""" | |||||
self.recurse([os.path.dirname(Context.g_module.root_path)]) | |||||
self.archive() | |||||
self.check() | |||||
def check(self): | |||||
""" | |||||
Create the archive, uncompress it and try to build the project | |||||
""" | |||||
import tempfile, tarfile | |||||
t = None | |||||
try: | |||||
t = tarfile.open(self.get_arch_name()) | |||||
for x in t: | |||||
t.extract(x) | |||||
finally: | |||||
if t: | |||||
t.close() | |||||
cfg = [] | |||||
if Options.options.distcheck_args: | |||||
cfg = shlex.split(Options.options.distcheck_args) | |||||
else: | |||||
cfg = [x for x in sys.argv if x.startswith('-')] | |||||
instdir = tempfile.mkdtemp('.inst', self.get_base_name()) | |||||
ret = Utils.subprocess.Popen([sys.executable, sys.argv[0], 'configure', 'install', 'uninstall', '--destdir=' + instdir] + cfg, cwd=self.get_base_name()).wait() | |||||
if ret: | |||||
raise Errors.WafError('distcheck failed with code %i' % ret) | |||||
if os.path.exists(instdir): | |||||
raise Errors.WafError('distcheck succeeded, but files were left in %s' % instdir) | |||||
shutil.rmtree(self.get_base_name()) | |||||
def distcheck(ctx): | |||||
'''checks if the project compiles (tarball from 'dist')''' | |||||
pass | |||||
def update(ctx): | |||||
'''updates the plugins from the *waflib/extras* directory''' | |||||
lst = Options.options.files.split(',') | |||||
if not lst: | |||||
lst = [x for x in Utils.listdir(Context.waf_dir + '/waflib/extras') if x.endswith('.py')] | |||||
for x in lst: | |||||
tool = x.replace('.py', '') | |||||
try: | |||||
Configure.download_tool(tool, force=True, ctx=ctx) | |||||
except Errors.WafError: | |||||
Logs.error('Could not find the tool %s in the remote repository' % x) | |||||
def autoconfigure(execute_method): | |||||
""" | |||||
Decorator used to set the commands that can be configured automatically | |||||
""" | |||||
def execute(self): | |||||
if not Configure.autoconfig: | |||||
return execute_method(self) | |||||
env = ConfigSet.ConfigSet() | |||||
do_config = False | |||||
try: | |||||
env.load(os.path.join(Context.top_dir, Options.lockfile)) | |||||
except Exception: | |||||
Logs.warn('Configuring the project') | |||||
do_config = True | |||||
else: | |||||
if env.run_dir != Context.run_dir: | |||||
do_config = True | |||||
else: | |||||
h = 0 | |||||
for f in env['files']: | |||||
h = Utils.h_list((h, Utils.readf(f, 'rb'))) | |||||
do_config = h != env.hash | |||||
if do_config: | |||||
Options.commands.insert(0, self.cmd) | |||||
Options.commands.insert(0, 'configure') | |||||
if Configure.autoconfig == 'clobber': | |||||
Options.options.__dict__ = env.options | |||||
return | |||||
return execute_method(self) | |||||
return execute | |||||
Build.BuildContext.execute = autoconfigure(Build.BuildContext.execute) | |||||
@@ -0,0 +1,836 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2005-2010 (ita) | |||||
""" | |||||
Task generators | |||||
The class :py:class:`waflib.TaskGen.task_gen` encapsulates the creation of task objects (low-level code) | |||||
The instances can have various parameters, but the creation of task nodes (Task.py) | |||||
is always postponed. To achieve this, various methods are called from the method "apply" | |||||
""" | |||||
import copy, re, os | |||||
from waflib import Task, Utils, Logs, Errors, ConfigSet, Node | |||||
feats = Utils.defaultdict(set) | |||||
"""remember the methods declaring features""" | |||||
HEADER_EXTS = ['.h', '.hpp', '.hxx', '.hh'] | |||||
class task_gen(object): | |||||
""" | |||||
Instances of this class create :py:class:`waflib.Task.TaskBase` when | |||||
calling the method :py:meth:`waflib.TaskGen.task_gen.post` from the main thread. | |||||
A few notes: | |||||
* The methods to call (*self.meths*) can be specified dynamically (removing, adding, ..) | |||||
* The 'features' are used to add methods to self.meths and then execute them | |||||
* The attribute 'path' is a node representing the location of the task generator | |||||
* The tasks created are added to the attribute *tasks* | |||||
* The attribute 'idx' is a counter of task generators in the same path | |||||
""" | |||||
mappings = Utils.ordered_iter_dict() | |||||
"""Mappings are global file extension mappings, they are retrieved in the order of definition""" | |||||
prec = Utils.defaultdict(list) | |||||
"""Dict holding the precedence rules for task generator methods""" | |||||
def __init__(self, *k, **kw): | |||||
""" | |||||
The task generator objects predefine various attributes (source, target) for possible | |||||
processing by process_rule (make-like rules) or process_source (extensions, misc methods) | |||||
The tasks are stored on the attribute 'tasks'. They are created by calling methods | |||||
listed in self.meths *or* referenced in the attribute features | |||||
A topological sort is performed to ease the method re-use. | |||||
The extra key/value elements passed in kw are set as attributes | |||||
""" | |||||
# so we will have to play with directed acyclic graphs | |||||
# detect cycles, etc | |||||
self.source = '' | |||||
self.target = '' | |||||
self.meths = [] | |||||
""" | |||||
List of method names to execute (it is usually a good idea to avoid touching this) | |||||
""" | |||||
self.prec = Utils.defaultdict(list) | |||||
""" | |||||
Precedence table for sorting the methods in self.meths | |||||
""" | |||||
self.mappings = {} | |||||
""" | |||||
List of mappings {extension -> function} for processing files by extension | |||||
This is very rarely used, so we do not use an ordered dict here | |||||
""" | |||||
self.features = [] | |||||
""" | |||||
List of feature names for bringing new methods in | |||||
""" | |||||
self.tasks = [] | |||||
""" | |||||
List of tasks created. | |||||
""" | |||||
if not 'bld' in kw: | |||||
# task generators without a build context :-/ | |||||
self.env = ConfigSet.ConfigSet() | |||||
self.idx = 0 | |||||
self.path = None | |||||
else: | |||||
self.bld = kw['bld'] | |||||
self.env = self.bld.env.derive() | |||||
self.path = self.bld.path # emulate chdir when reading scripts | |||||
# provide a unique id | |||||
try: | |||||
self.idx = self.bld.idx[id(self.path)] = self.bld.idx.get(id(self.path), 0) + 1 | |||||
except AttributeError: | |||||
self.bld.idx = {} | |||||
self.idx = self.bld.idx[id(self.path)] = 1 | |||||
for key, val in kw.items(): | |||||
setattr(self, key, val) | |||||
def __str__(self): | |||||
"""for debugging purposes""" | |||||
return "<task_gen %r declared in %s>" % (self.name, self.path.abspath()) | |||||
def __repr__(self): | |||||
"""for debugging purposes""" | |||||
lst = [] | |||||
for x in self.__dict__.keys(): | |||||
if x not in ('env', 'bld', 'compiled_tasks', 'tasks'): | |||||
lst.append("%s=%s" % (x, repr(getattr(self, x)))) | |||||
return "bld(%s) in %s" % (", ".join(lst), self.path.abspath()) | |||||
def get_name(self): | |||||
""" | |||||
If not set, the name is computed from the target name:: | |||||
def build(bld): | |||||
x = bld(name='foo') | |||||
x.get_name() # foo | |||||
y = bld(target='bar') | |||||
y.get_name() # bar | |||||
:rtype: string | |||||
:return: name of this task generator | |||||
""" | |||||
try: | |||||
return self._name | |||||
except AttributeError: | |||||
if isinstance(self.target, list): | |||||
lst = [str(x) for x in self.target] | |||||
name = self._name = ','.join(lst) | |||||
else: | |||||
name = self._name = str(self.target) | |||||
return name | |||||
def set_name(self, name): | |||||
self._name = name | |||||
name = property(get_name, set_name) | |||||
def to_list(self, val): | |||||
""" | |||||
Ensure that a parameter is a list | |||||
:type val: string or list of string | |||||
:param val: input to return as a list | |||||
:rtype: list | |||||
""" | |||||
if isinstance(val, str): return val.split() | |||||
else: return val | |||||
def post(self): | |||||
""" | |||||
Create task objects. The following operations are performed: | |||||
#. The body of this method is called only once and sets the attribute ``posted`` | |||||
#. The attribute ``features`` is used to add more methods in ``self.meths`` | |||||
#. The methods are sorted by the precedence table ``self.prec`` or `:waflib:attr:waflib.TaskGen.task_gen.prec` | |||||
#. The methods are then executed in order | |||||
#. The tasks created are added to :py:attr:`waflib.TaskGen.task_gen.tasks` | |||||
""" | |||||
# we could add a decorator to let the task run once, but then python 2.3 will be difficult to support | |||||
if getattr(self, 'posted', None): | |||||
#error("OBJECT ALREADY POSTED" + str( self)) | |||||
return False | |||||
self.posted = True | |||||
keys = set(self.meths) | |||||
# add the methods listed in the features | |||||
self.features = Utils.to_list(self.features) | |||||
for x in self.features + ['*']: | |||||
st = feats[x] | |||||
if not st: | |||||
if not x in Task.classes: | |||||
Logs.warn('feature %r does not exist - bind at least one method to it' % x) | |||||
keys.update(list(st)) # ironpython 2.7 wants the cast to list | |||||
# copy the precedence table | |||||
prec = {} | |||||
prec_tbl = self.prec or task_gen.prec | |||||
for x in prec_tbl: | |||||
if x in keys: | |||||
prec[x] = prec_tbl[x] | |||||
# elements disconnected | |||||
tmp = [] | |||||
for a in keys: | |||||
for x in prec.values(): | |||||
if a in x: break | |||||
else: | |||||
tmp.append(a) | |||||
tmp.sort() | |||||
# topological sort | |||||
out = [] | |||||
while tmp: | |||||
e = tmp.pop() | |||||
if e in keys: out.append(e) | |||||
try: | |||||
nlst = prec[e] | |||||
except KeyError: | |||||
pass | |||||
else: | |||||
del prec[e] | |||||
for x in nlst: | |||||
for y in prec: | |||||
if x in prec[y]: | |||||
break | |||||
else: | |||||
tmp.append(x) | |||||
if prec: | |||||
raise Errors.WafError('Cycle detected in the method execution %r' % prec) | |||||
out.reverse() | |||||
self.meths = out | |||||
# then we run the methods in order | |||||
Logs.debug('task_gen: posting %s %d' % (self, id(self))) | |||||
for x in out: | |||||
try: | |||||
v = getattr(self, x) | |||||
except AttributeError: | |||||
raise Errors.WafError('%r is not a valid task generator method' % x) | |||||
Logs.debug('task_gen: -> %s (%d)' % (x, id(self))) | |||||
v() | |||||
Logs.debug('task_gen: posted %s' % self.name) | |||||
return True | |||||
def get_hook(self, node): | |||||
""" | |||||
:param node: Input file to process | |||||
:type node: :py:class:`waflib.Tools.Node.Node` | |||||
:return: A method able to process the input node by looking at the extension | |||||
:rtype: function | |||||
""" | |||||
name = node.name | |||||
if self.mappings: | |||||
for k in self.mappings: | |||||
if name.endswith(k): | |||||
return self.mappings[k] | |||||
for k in task_gen.mappings: | |||||
if name.endswith(k): | |||||
return task_gen.mappings[k] | |||||
raise Errors.WafError("File %r has no mapping in %r (have you forgotten to load a waf tool?)" % (node, task_gen.mappings.keys())) | |||||
def create_task(self, name, src=None, tgt=None, **kw): | |||||
""" | |||||
Wrapper for creating task instances. The classes are retrieved from the | |||||
context class if possible, then from the global dict Task.classes. | |||||
:param name: task class name | |||||
:type name: string | |||||
:param src: input nodes | |||||
:type src: list of :py:class:`waflib.Tools.Node.Node` | |||||
:param tgt: output nodes | |||||
:type tgt: list of :py:class:`waflib.Tools.Node.Node` | |||||
:return: A task object | |||||
:rtype: :py:class:`waflib.Task.TaskBase` | |||||
""" | |||||
task = Task.classes[name](env=self.env.derive(), generator=self) | |||||
if src: | |||||
task.set_inputs(src) | |||||
if tgt: | |||||
task.set_outputs(tgt) | |||||
task.__dict__.update(kw) | |||||
self.tasks.append(task) | |||||
return task | |||||
def clone(self, env): | |||||
""" | |||||
Make a copy of a task generator. Once the copy is made, it is necessary to ensure that the | |||||
it does not create the same output files as the original, or the same files may | |||||
be compiled several times. | |||||
:param env: A configuration set | |||||
:type env: :py:class:`waflib.ConfigSet.ConfigSet` | |||||
:return: A copy | |||||
:rtype: :py:class:`waflib.TaskGen.task_gen` | |||||
""" | |||||
newobj = self.bld() | |||||
for x in self.__dict__: | |||||
if x in ('env', 'bld'): | |||||
continue | |||||
elif x in ('path', 'features'): | |||||
setattr(newobj, x, getattr(self, x)) | |||||
else: | |||||
setattr(newobj, x, copy.copy(getattr(self, x))) | |||||
newobj.posted = False | |||||
if isinstance(env, str): | |||||
newobj.env = self.bld.all_envs[env].derive() | |||||
else: | |||||
newobj.env = env.derive() | |||||
return newobj | |||||
def declare_chain(name='', rule=None, reentrant=None, color='BLUE', | |||||
ext_in=[], ext_out=[], before=[], after=[], decider=None, scan=None, install_path=None, shell=False): | |||||
""" | |||||
Create a new mapping and a task class for processing files by extension. | |||||
See Tools/flex.py for an example. | |||||
:param name: name for the task class | |||||
:type name: string | |||||
:param rule: function to execute or string to be compiled in a function | |||||
:type rule: string or function | |||||
:param reentrant: re-inject the output file in the process (done automatically, set to 0 to disable) | |||||
:type reentrant: int | |||||
:param color: color for the task output | |||||
:type color: string | |||||
:param ext_in: execute the task only after the files of such extensions are created | |||||
:type ext_in: list of string | |||||
:param ext_out: execute the task only before files of such extensions are processed | |||||
:type ext_out: list of string | |||||
:param before: execute instances of this task before classes of the given names | |||||
:type before: list of string | |||||
:param after: execute instances of this task after classes of the given names | |||||
:type after: list of string | |||||
:param decider: if present, use it to create the output nodes for the task | |||||
:type decider: function | |||||
:param scan: scanner function for the task | |||||
:type scan: function | |||||
:param install_path: installation path for the output nodes | |||||
:type install_path: string | |||||
""" | |||||
ext_in = Utils.to_list(ext_in) | |||||
ext_out = Utils.to_list(ext_out) | |||||
if not name: | |||||
name = rule | |||||
cls = Task.task_factory(name, rule, color=color, ext_in=ext_in, ext_out=ext_out, before=before, after=after, scan=scan, shell=shell) | |||||
def x_file(self, node): | |||||
ext = decider and decider(self, node) or cls.ext_out | |||||
if ext_in: | |||||
_ext_in = ext_in[0] | |||||
tsk = self.create_task(name, node) | |||||
cnt = 0 | |||||
keys = set(self.mappings.keys()) | set(self.__class__.mappings.keys()) | |||||
for x in ext: | |||||
k = node.change_ext(x, ext_in=_ext_in) | |||||
tsk.outputs.append(k) | |||||
if reentrant != None: | |||||
if cnt < int(reentrant): | |||||
self.source.append(k) | |||||
else: | |||||
# reinject downstream files into the build | |||||
for y in keys: # ~ nfile * nextensions :-/ | |||||
if k.name.endswith(y): | |||||
self.source.append(k) | |||||
break | |||||
cnt += 1 | |||||
if install_path: | |||||
self.bld.install_files(install_path, tsk.outputs) | |||||
return tsk | |||||
for x in cls.ext_in: | |||||
task_gen.mappings[x] = x_file | |||||
return x_file | |||||
def taskgen_method(func): | |||||
""" | |||||
Decorator: register a method as a task generator method. | |||||
The function must accept a task generator as first parameter:: | |||||
from waflib.TaskGen import taskgen_method | |||||
@taskgen_method | |||||
def mymethod(self): | |||||
pass | |||||
:param func: task generator method to add | |||||
:type func: function | |||||
:rtype: function | |||||
""" | |||||
setattr(task_gen, func.__name__, func) | |||||
return func | |||||
def feature(*k): | |||||
""" | |||||
Decorator: register a task generator method that will be executed when the | |||||
object attribute 'feature' contains the corresponding key(s):: | |||||
from waflib.Task import feature | |||||
@feature('myfeature') | |||||
def myfunction(self): | |||||
print('that is my feature!') | |||||
def build(bld): | |||||
bld(features='myfeature') | |||||
:param k: feature names | |||||
:type k: list of string | |||||
""" | |||||
def deco(func): | |||||
setattr(task_gen, func.__name__, func) | |||||
for name in k: | |||||
feats[name].update([func.__name__]) | |||||
return func | |||||
return deco | |||||
def before_method(*k): | |||||
""" | |||||
Decorator: register a task generator method which will be executed | |||||
before the functions of given name(s):: | |||||
from waflib.TaskGen import feature, before | |||||
@feature('myfeature') | |||||
@before_method('fun2') | |||||
def fun1(self): | |||||
print('feature 1!') | |||||
@feature('myfeature') | |||||
def fun2(self): | |||||
print('feature 2!') | |||||
def build(bld): | |||||
bld(features='myfeature') | |||||
:param k: method names | |||||
:type k: list of string | |||||
""" | |||||
def deco(func): | |||||
setattr(task_gen, func.__name__, func) | |||||
for fun_name in k: | |||||
if not func.__name__ in task_gen.prec[fun_name]: | |||||
task_gen.prec[fun_name].append(func.__name__) | |||||
#task_gen.prec[fun_name].sort() | |||||
return func | |||||
return deco | |||||
before = before_method | |||||
def after_method(*k): | |||||
""" | |||||
Decorator: register a task generator method which will be executed | |||||
after the functions of given name(s):: | |||||
from waflib.TaskGen import feature, after | |||||
@feature('myfeature') | |||||
@after_method('fun2') | |||||
def fun1(self): | |||||
print('feature 1!') | |||||
@feature('myfeature') | |||||
def fun2(self): | |||||
print('feature 2!') | |||||
def build(bld): | |||||
bld(features='myfeature') | |||||
:param k: method names | |||||
:type k: list of string | |||||
""" | |||||
def deco(func): | |||||
setattr(task_gen, func.__name__, func) | |||||
for fun_name in k: | |||||
if not fun_name in task_gen.prec[func.__name__]: | |||||
task_gen.prec[func.__name__].append(fun_name) | |||||
#task_gen.prec[func.__name__].sort() | |||||
return func | |||||
return deco | |||||
after = after_method | |||||
def extension(*k): | |||||
""" | |||||
Decorator: register a task generator method which will be invoked during | |||||
the processing of source files for the extension given:: | |||||
from waflib import Task | |||||
class mytask(Task): | |||||
run_str = 'cp ${SRC} ${TGT}' | |||||
@extension('.moo') | |||||
def create_maa_file(self, node): | |||||
self.create_task('mytask', node, node.change_ext('.maa')) | |||||
def build(bld): | |||||
bld(source='foo.moo') | |||||
""" | |||||
def deco(func): | |||||
setattr(task_gen, func.__name__, func) | |||||
for x in k: | |||||
task_gen.mappings[x] = func | |||||
return func | |||||
return deco | |||||
# --------------------------------------------------------------- | |||||
# The following methods are task generator methods commonly used | |||||
# they are almost examples, the rest of waf core does not depend on them | |||||
@taskgen_method | |||||
def to_nodes(self, lst, path=None): | |||||
""" | |||||
Convert the input list into a list of nodes. | |||||
It is used by :py:func:`waflib.TaskGen.process_source` and :py:func:`waflib.TaskGen.process_rule`. | |||||
It is designed for source files, for folders, see :py:func:`waflib.Tools.ccroot.to_incnodes`: | |||||
:param lst: input list | |||||
:type lst: list of string and nodes | |||||
:param path: path from which to search the nodes (by default, :py:attr:`waflib.TaskGen.task_gen.path`) | |||||
:type path: :py:class:`waflib.Tools.Node.Node` | |||||
:rtype: list of :py:class:`waflib.Tools.Node.Node` | |||||
""" | |||||
tmp = [] | |||||
path = path or self.path | |||||
find = path.find_resource | |||||
if isinstance(lst, Node.Node): | |||||
lst = [lst] | |||||
# either a list or a string, convert to a list of nodes | |||||
for x in Utils.to_list(lst): | |||||
if isinstance(x, str): | |||||
node = find(x) | |||||
else: | |||||
node = x | |||||
if not node: | |||||
raise Errors.WafError("source not found: %r in %r" % (x, self)) | |||||
tmp.append(node) | |||||
return tmp | |||||
@feature('*') | |||||
def process_source(self): | |||||
""" | |||||
Process each element in the attribute ``source`` by extension. | |||||
#. The *source* list is converted through :py:meth:`waflib.TaskGen.to_nodes` to a list of :py:class:`waflib.Node.Node` first. | |||||
#. File extensions are mapped to methods having the signature: ``def meth(self, node)`` by :py:meth:`waflib.TaskGen.extension` | |||||
#. The method is retrieved through :py:meth:`waflib.TaskGen.task_gen.get_hook` | |||||
#. When called, the methods may modify self.source to append more source to process | |||||
#. The mappings can map an extension or a filename (see the code below) | |||||
""" | |||||
self.source = self.to_nodes(getattr(self, 'source', [])) | |||||
for node in self.source: | |||||
self.get_hook(node)(self, node) | |||||
@feature('*') | |||||
@before_method('process_source') | |||||
def process_rule(self): | |||||
""" | |||||
Process the attribute ``rule``. When present, :py:meth:`waflib.TaskGen.process_source` is disabled:: | |||||
def build(bld): | |||||
bld(rule='cp ${SRC} ${TGT}', source='wscript', target='bar.txt') | |||||
""" | |||||
if not getattr(self, 'rule', None): | |||||
return | |||||
# create the task class | |||||
name = str(getattr(self, 'name', None) or self.target or getattr(self.rule, '__name__', self.rule)) | |||||
# or we can put the class in a cache for performance reasons | |||||
try: | |||||
cache = self.bld.cache_rule_attr | |||||
except AttributeError: | |||||
cache = self.bld.cache_rule_attr = {} | |||||
cls = None | |||||
if getattr(self, 'cache_rule', 'True'): | |||||
try: | |||||
cls = cache[(name, self.rule)] | |||||
except KeyError: | |||||
pass | |||||
if not cls: | |||||
cls = Task.task_factory(name, self.rule, | |||||
getattr(self, 'vars', []), | |||||
shell=getattr(self, 'shell', True), color=getattr(self, 'color', 'BLUE'), | |||||
scan = getattr(self, 'scan', None)) | |||||
if getattr(self, 'scan', None): | |||||
cls.scan = self.scan | |||||
elif getattr(self, 'deps', None): | |||||
def scan(self): | |||||
nodes = [] | |||||
for x in self.generator.to_list(getattr(self.generator, 'deps', None)): | |||||
node = self.generator.path.find_resource(x) | |||||
if not node: | |||||
self.generator.bld.fatal('Could not find %r (was it declared?)' % x) | |||||
nodes.append(node) | |||||
return [nodes, []] | |||||
cls.scan = scan | |||||
if getattr(self, 'update_outputs', None): | |||||
Task.update_outputs(cls) | |||||
if getattr(self, 'always', None): | |||||
Task.always_run(cls) | |||||
for x in ('after', 'before', 'ext_in', 'ext_out'): | |||||
setattr(cls, x, getattr(self, x, [])) | |||||
if getattr(self, 'cache_rule', 'True'): | |||||
cache[(name, self.rule)] = cls | |||||
# now create one instance | |||||
tsk = self.create_task(name) | |||||
if getattr(self, 'target', None): | |||||
if isinstance(self.target, str): | |||||
self.target = self.target.split() | |||||
if not isinstance(self.target, list): | |||||
self.target = [self.target] | |||||
for x in self.target: | |||||
if isinstance(x, str): | |||||
tsk.outputs.append(self.path.find_or_declare(x)) | |||||
else: | |||||
x.parent.mkdir() # if a node was given, create the required folders | |||||
tsk.outputs.append(x) | |||||
if getattr(self, 'install_path', None): | |||||
self.bld.install_files(self.install_path, tsk.outputs) | |||||
if getattr(self, 'source', None): | |||||
tsk.inputs = self.to_nodes(self.source) | |||||
# bypass the execution of process_source by setting the source to an empty list | |||||
self.source = [] | |||||
if getattr(self, 'cwd', None): | |||||
tsk.cwd = self.cwd | |||||
@feature('seq') | |||||
def sequence_order(self): | |||||
""" | |||||
Add a strict sequential constraint between the tasks generated by task generators. | |||||
It works because task generators are posted in order. | |||||
It will not post objects which belong to other folders. | |||||
Example:: | |||||
bld(features='javac seq') | |||||
bld(features='jar seq') | |||||
To start a new sequence, set the attribute seq_start, for example:: | |||||
obj = bld(features='seq') | |||||
obj.seq_start = True | |||||
Note that the method is executed in last position. This is more an | |||||
example than a widely-used solution. | |||||
""" | |||||
if self.meths and self.meths[-1] != 'sequence_order': | |||||
self.meths.append('sequence_order') | |||||
return | |||||
if getattr(self, 'seq_start', None): | |||||
return | |||||
# all the tasks previously declared must be run before these | |||||
if getattr(self.bld, 'prev', None): | |||||
self.bld.prev.post() | |||||
for x in self.bld.prev.tasks: | |||||
for y in self.tasks: | |||||
y.set_run_after(x) | |||||
self.bld.prev = self | |||||
re_m4 = re.compile('@(\w+)@', re.M) | |||||
class subst_pc(Task.Task): | |||||
""" | |||||
Create *.pc* files from *.pc.in*. The task is executed whenever an input variable used | |||||
in the substitution changes. | |||||
""" | |||||
def run(self): | |||||
"Substitutes variables in a .in file" | |||||
if getattr(self.generator, 'is_copy', None): | |||||
self.outputs[0].write(self.inputs[0].read('rb'), 'wb') | |||||
if getattr(self.generator, 'chmod', None): | |||||
os.chmod(self.outputs[0].abspath(), self.generator.chmod) | |||||
return None | |||||
if getattr(self.generator, 'fun', None): | |||||
return self.generator.fun(self) | |||||
code = self.inputs[0].read(encoding=getattr(self.generator, 'encoding', 'ISO8859-1')) | |||||
if getattr(self.generator, 'subst_fun', None): | |||||
code = self.generator.subst_fun(self, code) | |||||
if code is not None: | |||||
self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'ISO8859-1')) | |||||
return | |||||
# replace all % by %% to prevent errors by % signs | |||||
code = code.replace('%', '%%') | |||||
# extract the vars foo into lst and replace @foo@ by %(foo)s | |||||
lst = [] | |||||
def repl(match): | |||||
g = match.group | |||||
if g(1): | |||||
lst.append(g(1)) | |||||
return "%%(%s)s" % g(1) | |||||
return '' | |||||
global re_m4 | |||||
code = getattr(self.generator, 're_m4', re_m4).sub(repl, code) | |||||
try: | |||||
d = self.generator.dct | |||||
except AttributeError: | |||||
d = {} | |||||
for x in lst: | |||||
tmp = getattr(self.generator, x, '') or self.env[x] or self.env[x.upper()] | |||||
try: | |||||
tmp = ''.join(tmp) | |||||
except TypeError: | |||||
tmp = str(tmp) | |||||
d[x] = tmp | |||||
code = code % d | |||||
self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'ISO8859-1')) | |||||
self.generator.bld.raw_deps[self.uid()] = self.dep_vars = lst | |||||
# make sure the signature is updated | |||||
try: delattr(self, 'cache_sig') | |||||
except AttributeError: pass | |||||
if getattr(self.generator, 'chmod', None): | |||||
os.chmod(self.outputs[0].abspath(), self.generator.chmod) | |||||
def sig_vars(self): | |||||
""" | |||||
Compute a hash (signature) of the variables used in the substitution | |||||
""" | |||||
bld = self.generator.bld | |||||
env = self.env | |||||
upd = self.m.update | |||||
if getattr(self.generator, 'fun', None): | |||||
upd(Utils.h_fun(self.generator.fun).encode()) | |||||
if getattr(self.generator, 'subst_fun', None): | |||||
upd(Utils.h_fun(self.generator.subst_fun).encode()) | |||||
# raw_deps: persistent custom values returned by the scanner | |||||
vars = self.generator.bld.raw_deps.get(self.uid(), []) | |||||
# hash both env vars and task generator attributes | |||||
act_sig = bld.hash_env_vars(env, vars) | |||||
upd(act_sig) | |||||
lst = [getattr(self.generator, x, '') for x in vars] | |||||
upd(Utils.h_list(lst)) | |||||
return self.m.digest() | |||||
@extension('.pc.in') | |||||
def add_pcfile(self, node): | |||||
""" | |||||
Process *.pc.in* files to *.pc*. Install the results to ``${PREFIX}/lib/pkgconfig/`` | |||||
def build(bld): | |||||
bld(source='foo.pc.in', install_path='${LIBDIR}/pkgconfig/') | |||||
""" | |||||
tsk = self.create_task('subst_pc', node, node.change_ext('.pc', '.pc.in')) | |||||
self.bld.install_files(getattr(self, 'install_path', '${LIBDIR}/pkgconfig/'), tsk.outputs) | |||||
class subst(subst_pc): | |||||
pass | |||||
@feature('subst') | |||||
@before_method('process_source', 'process_rule') | |||||
def process_subst(self): | |||||
""" | |||||
Define a transformation that substitutes the contents of *source* files to *target* files:: | |||||
def build(bld): | |||||
bld( | |||||
features='subst', | |||||
source='foo.c.in', | |||||
target='foo.c', | |||||
install_path='${LIBDIR}/pkgconfig', | |||||
VAR = 'val' | |||||
) | |||||
The input files are supposed to contain macros of the form *@VAR@*, where *VAR* is an argument | |||||
of the task generator object. | |||||
This method overrides the processing by :py:meth:`waflib.TaskGen.process_source`. | |||||
""" | |||||
src = Utils.to_list(getattr(self, 'source', [])) | |||||
if isinstance(src, Node.Node): | |||||
src = [src] | |||||
tgt = Utils.to_list(getattr(self, 'target', [])) | |||||
if isinstance(tgt, Node.Node): | |||||
tgt = [tgt] | |||||
if len(src) != len(tgt): | |||||
raise Errors.WafError('invalid number of source/target for %r' % self) | |||||
for x, y in zip(src, tgt): | |||||
if not x or not y: | |||||
raise Errors.WafError('null source or target for %r' % self) | |||||
a, b = None, None | |||||
if isinstance(x, str) and isinstance(y, str) and x == y: | |||||
a = self.path.find_node(x) | |||||
b = self.path.get_bld().make_node(y) | |||||
if not os.path.isfile(b.abspath()): | |||||
b.sig = None | |||||
b.parent.mkdir() | |||||
else: | |||||
if isinstance(x, str): | |||||
a = self.path.find_resource(x) | |||||
elif isinstance(x, Node.Node): | |||||
a = x | |||||
if isinstance(y, str): | |||||
b = self.path.find_or_declare(y) | |||||
elif isinstance(y, Node.Node): | |||||
b = y | |||||
if not a: | |||||
raise Errors.WafError('cound not find %r for %r' % (x, self)) | |||||
has_constraints = False | |||||
tsk = self.create_task('subst', a, b) | |||||
for k in ('after', 'before', 'ext_in', 'ext_out'): | |||||
val = getattr(self, k, None) | |||||
if val: | |||||
has_constraints = True | |||||
setattr(tsk, k, val) | |||||
# paranoid safety measure for the general case foo.in->foo.h with ambiguous dependencies | |||||
if not has_constraints: | |||||
global HEADER_EXTS | |||||
for xt in HEADER_EXTS: | |||||
if b.name.endswith(xt): | |||||
tsk.before = [k for k in ('c', 'cxx') if k in Task.classes] | |||||
break | |||||
inst_to = getattr(self, 'install_path', None) | |||||
if inst_to: | |||||
self.bld.install_files(inst_to, b, chmod=getattr(self, 'chmod', Utils.O644)) | |||||
self.source = [] | |||||
@@ -0,0 +1,3 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2005-2010 (ita) |
@@ -0,0 +1,24 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2006-2010 (ita) | |||||
# Ralf Habacker, 2006 (rh) | |||||
""" | |||||
The **ar** program creates static libraries. This tool is almost always loaded | |||||
from others (C, C++, D, etc) for static library support. | |||||
""" | |||||
from waflib.Configure import conf | |||||
@conf | |||||
def find_ar(conf): | |||||
"""Configuration helper used by C/C++ tools to enable the support for static libraries""" | |||||
conf.load('ar') | |||||
def configure(conf): | |||||
"""Find the ar program and set the default flags in ``conf.env.ARFLAGS``""" | |||||
conf.find_program('ar', var='AR') | |||||
conf.add_os_flags('ARFLAGS') | |||||
if not conf.env.ARFLAGS: | |||||
conf.env.ARFLAGS = ['rcs'] | |||||
@@ -0,0 +1,75 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2008-2010 (ita) | |||||
""" | |||||
Assembly support, used by tools such as gas and nasm | |||||
To declare targets using assembly:: | |||||
def configure(conf): | |||||
conf.load('gcc gas') | |||||
def build(bld): | |||||
bld( | |||||
features='c cstlib asm', | |||||
source = 'test.S', | |||||
target = 'asmtest') | |||||
bld( | |||||
features='asm asmprogram', | |||||
source = 'test.S', | |||||
target = 'asmtest') | |||||
Support for pure asm programs and libraries should also work:: | |||||
def configure(conf): | |||||
conf.load('nasm') | |||||
conf.find_program('ld', 'ASLINK') | |||||
def build(bld): | |||||
bld( | |||||
features='asm asmprogram', | |||||
source = 'test.S', | |||||
target = 'asmtest') | |||||
""" | |||||
import os, sys | |||||
from waflib import Task, Utils | |||||
import waflib.Task | |||||
from waflib.Tools.ccroot import link_task, stlink_task | |||||
from waflib.TaskGen import extension, feature | |||||
class asm(Task.Task): | |||||
""" | |||||
Compile asm files by gas/nasm/yasm/... | |||||
""" | |||||
color = 'BLUE' | |||||
run_str = '${AS} ${ASFLAGS} ${ASMPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${AS_SRC_F}${SRC} ${AS_TGT_F}${TGT}' | |||||
@extension('.s', '.S', '.asm', '.ASM', '.spp', '.SPP') | |||||
def asm_hook(self, node): | |||||
""" | |||||
Bind the asm extension to the asm task | |||||
:param node: input file | |||||
:type node: :py:class:`waflib.Node.Node` | |||||
""" | |||||
return self.create_compiled_task('asm', node) | |||||
class asmprogram(link_task): | |||||
"Link object files into a c program" | |||||
run_str = '${ASLINK} ${ASLINKFLAGS} ${ASLNK_TGT_F}${TGT} ${ASLNK_SRC_F}${SRC}' | |||||
ext_out = ['.bin'] | |||||
inst_to = '${BINDIR}' | |||||
class asmshlib(asmprogram): | |||||
"Link object files into a c shared library" | |||||
inst_to = '${LIBDIR}' | |||||
class asmstlib(stlink_task): | |||||
"Link object files into a c static library" | |||||
pass # do not remove | |||||
def configure(conf): | |||||
conf.env['ASMPATH_ST'] = '-I%s' |
@@ -0,0 +1,49 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# John O'Meara, 2006 | |||||
# Thomas Nagy 2009-2010 (ita) | |||||
""" | |||||
The **bison** program is a code generator which creates C or C++ files. | |||||
The generated files are compiled into object files. | |||||
""" | |||||
from waflib import Task | |||||
from waflib.TaskGen import extension | |||||
class bison(Task.Task): | |||||
"""Compile bison files""" | |||||
color = 'BLUE' | |||||
run_str = '${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}' | |||||
ext_out = ['.h'] # just to make sure | |||||
@extension('.y', '.yc', '.yy') | |||||
def big_bison(self, node): | |||||
""" | |||||
Create a bison task, which must be executed from the directory of the output file. | |||||
""" | |||||
has_h = '-d' in self.env['BISONFLAGS'] | |||||
outs = [] | |||||
if node.name.endswith('.yc'): | |||||
outs.append(node.change_ext('.tab.cc')) | |||||
if has_h: | |||||
outs.append(node.change_ext('.tab.hh')) | |||||
else: | |||||
outs.append(node.change_ext('.tab.c')) | |||||
if has_h: | |||||
outs.append(node.change_ext('.tab.h')) | |||||
tsk = self.create_task('bison', node, outs) | |||||
tsk.cwd = node.parent.get_bld().abspath() | |||||
# and the c/cxx file must be compiled too | |||||
self.source.append(outs[0]) | |||||
def configure(conf): | |||||
""" | |||||
Detect the *bison* program | |||||
""" | |||||
conf.find_program('bison', var='BISON') | |||||
conf.env.BISONFLAGS = ['-d'] | |||||
@@ -0,0 +1,39 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2006-2010 (ita) | |||||
"Base for c programs/libraries" | |||||
from waflib import TaskGen, Task | |||||
from waflib.Tools import c_preproc | |||||
from waflib.Tools.ccroot import link_task, stlink_task | |||||
@TaskGen.extension('.c') | |||||
def c_hook(self, node): | |||||
"Bind the c file extension to the creation of a :py:class:`waflib.Tools.c.c` instance" | |||||
if not self.env.CC and self.env.CXX: | |||||
return self.create_compiled_task('cxx', node) | |||||
return self.create_compiled_task('c', node) | |||||
class c(Task.Task): | |||||
"Compile C files into object files" | |||||
run_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT[0].abspath()}' | |||||
vars = ['CCDEPS'] # unused variable to depend on, just in case | |||||
ext_in = ['.h'] # set the build order easily by using ext_out=['.h'] | |||||
scan = c_preproc.scan | |||||
class cprogram(link_task): | |||||
"Link object files into a c program" | |||||
run_str = '${LINK_CC} ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}' | |||||
ext_out = ['.bin'] | |||||
vars = ['LINKDEPS'] | |||||
inst_to = '${BINDIR}' | |||||
class cshlib(cprogram): | |||||
"Link object files into a c shared library" | |||||
inst_to = '${LIBDIR}' | |||||
class cstlib(stlink_task): | |||||
"Link object files into a c static library" | |||||
pass # do not remove | |||||
@@ -0,0 +1,128 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2005-2010 (ita) | |||||
"base for all c/c++ programs and libraries" | |||||
import os, sys, re | |||||
from waflib import Utils, Build | |||||
from waflib.Configure import conf | |||||
def get_extensions(lst): | |||||
""" | |||||
:param lst: files to process | |||||
:list lst: list of string or :py:class:`waflib.Node.Node` | |||||
:return: list of file extensions | |||||
:rtype: list of string | |||||
""" | |||||
ret = [] | |||||
for x in Utils.to_list(lst): | |||||
try: | |||||
if not isinstance(x, str): | |||||
x = x.name | |||||
ret.append(x[x.rfind('.') + 1:]) | |||||
except Exception: | |||||
pass | |||||
return ret | |||||
def sniff_features(**kw): | |||||
""" | |||||
Look at the source files and return the features for a task generator (mainly cc and cxx):: | |||||
snif_features(source=['foo.c', 'foo.cxx'], type='shlib') | |||||
# returns ['cxx', 'c', 'cxxshlib', 'cshlib'] | |||||
:param source: source files to process | |||||
:type source: list of string or :py:class:`waflib.Node.Node` | |||||
:param type: object type in *program*, *shlib* or *stlib* | |||||
:type type: string | |||||
:return: the list of features for a task generator processing the source files | |||||
:rtype: list of string | |||||
""" | |||||
exts = get_extensions(kw['source']) | |||||
type = kw['_type'] | |||||
feats = [] | |||||
# watch the order, cxx will have the precedence | |||||
if 'cxx' in exts or 'cpp' in exts or 'c++' in exts or 'cc' in exts or 'C' in exts: | |||||
feats.append('cxx') | |||||
if 'c' in exts or 'vala' in exts: | |||||
feats.append('c') | |||||
if 'd' in exts: | |||||
feats.append('d') | |||||
if 'java' in exts: | |||||
feats.append('java') | |||||
if 'java' in exts: | |||||
return 'java' | |||||
if type in ('program', 'shlib', 'stlib'): | |||||
for x in feats: | |||||
if x in ('cxx', 'd', 'c'): | |||||
feats.append(x + type) | |||||
return feats | |||||
def set_features(kw, _type): | |||||
kw['_type'] = _type | |||||
kw['features'] = Utils.to_list(kw.get('features', [])) + Utils.to_list(sniff_features(**kw)) | |||||
@conf | |||||
def program(bld, *k, **kw): | |||||
""" | |||||
Alias for creating programs by looking at the file extensions:: | |||||
def build(bld): | |||||
bld.program(source='foo.c', target='app') | |||||
# equivalent to: | |||||
# bld(features='c cprogram', source='foo.c', target='app') | |||||
""" | |||||
set_features(kw, 'program') | |||||
return bld(*k, **kw) | |||||
@conf | |||||
def shlib(bld, *k, **kw): | |||||
""" | |||||
Alias for creating shared libraries by looking at the file extensions:: | |||||
def build(bld): | |||||
bld.shlib(source='foo.c', target='app') | |||||
# equivalent to: | |||||
# bld(features='c cshlib', source='foo.c', target='app') | |||||
""" | |||||
set_features(kw, 'shlib') | |||||
return bld(*k, **kw) | |||||
@conf | |||||
def stlib(bld, *k, **kw): | |||||
""" | |||||
Alias for creating static libraries by looking at the file extensions:: | |||||
def build(bld): | |||||
bld.stlib(source='foo.cpp', target='app') | |||||
# equivalent to: | |||||
# bld(features='cxx cxxstlib', source='foo.cpp', target='app') | |||||
""" | |||||
set_features(kw, 'stlib') | |||||
return bld(*k, **kw) | |||||
@conf | |||||
def objects(bld, *k, **kw): | |||||
""" | |||||
Alias for creating object files by looking at the file extensions:: | |||||
def build(bld): | |||||
bld.objects(source='foo.c', target='app') | |||||
# equivalent to: | |||||
# bld(features='c', source='foo.c', target='app') | |||||
""" | |||||
set_features(kw, 'objects') | |||||
return bld(*k, **kw) | |||||
@@ -0,0 +1,188 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy 2008-2010 | |||||
""" | |||||
MacOSX related tools | |||||
""" | |||||
import os, shutil, sys, platform | |||||
from waflib import TaskGen, Task, Build, Options, Utils, Errors | |||||
from waflib.TaskGen import taskgen_method, feature, after_method, before_method | |||||
app_info = ''' | |||||
<?xml version="1.0" encoding="UTF-8"?> | |||||
<!DOCTYPE plist SYSTEM "file://localhost/System/Library/DTDs/PropertyList.dtd"> | |||||
<plist version="0.9"> | |||||
<dict> | |||||
<key>CFBundlePackageType</key> | |||||
<string>APPL</string> | |||||
<key>CFBundleGetInfoString</key> | |||||
<string>Created by Waf</string> | |||||
<key>CFBundleSignature</key> | |||||
<string>????</string> | |||||
<key>NOTE</key> | |||||
<string>THIS IS A GENERATED FILE, DO NOT MODIFY</string> | |||||
<key>CFBundleExecutable</key> | |||||
<string>%s</string> | |||||
</dict> | |||||
</plist> | |||||
''' | |||||
""" | |||||
plist template | |||||
""" | |||||
@feature('c', 'cxx') | |||||
def set_macosx_deployment_target(self): | |||||
""" | |||||
see WAF issue 285 and also and also http://trac.macports.org/ticket/17059 | |||||
""" | |||||
if self.env['MACOSX_DEPLOYMENT_TARGET']: | |||||
os.environ['MACOSX_DEPLOYMENT_TARGET'] = self.env['MACOSX_DEPLOYMENT_TARGET'] | |||||
elif 'MACOSX_DEPLOYMENT_TARGET' not in os.environ: | |||||
if Utils.unversioned_sys_platform() == 'darwin': | |||||
os.environ['MACOSX_DEPLOYMENT_TARGET'] = '.'.join(platform.mac_ver()[0].split('.')[:2]) | |||||
@taskgen_method | |||||
def create_bundle_dirs(self, name, out): | |||||
""" | |||||
Create bundle folders, used by :py:func:`create_task_macplist` and :py:func:`create_task_macapp` | |||||
""" | |||||
bld = self.bld | |||||
dir = out.parent.find_or_declare(name) | |||||
dir.mkdir() | |||||
macos = dir.find_or_declare(['Contents', 'MacOS']) | |||||
macos.mkdir() | |||||
return dir | |||||
def bundle_name_for_output(out): | |||||
name = out.name | |||||
k = name.rfind('.') | |||||
if k >= 0: | |||||
name = name[:k] + '.app' | |||||
else: | |||||
name = name + '.app' | |||||
return name | |||||
@feature('cprogram', 'cxxprogram') | |||||
@after_method('apply_link') | |||||
def create_task_macapp(self): | |||||
""" | |||||
To compile an executable into a Mac application (a .app), set its *mac_app* attribute:: | |||||
def build(bld): | |||||
bld.shlib(source='a.c', target='foo', mac_app = True) | |||||
To force *all* executables to be transformed into Mac applications:: | |||||
def build(bld): | |||||
bld.env.MACAPP = True | |||||
bld.shlib(source='a.c', target='foo') | |||||
""" | |||||
if self.env['MACAPP'] or getattr(self, 'mac_app', False): | |||||
out = self.link_task.outputs[0] | |||||
name = bundle_name_for_output(out) | |||||
dir = self.create_bundle_dirs(name, out) | |||||
n1 = dir.find_or_declare(['Contents', 'MacOS', out.name]) | |||||
self.apptask = self.create_task('macapp', self.link_task.outputs, n1) | |||||
inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/MacOS/' % name | |||||
self.bld.install_files(inst_to, n1, chmod=Utils.O755) | |||||
if getattr(self, 'mac_resources', None): | |||||
res_dir = n1.parent.parent.make_node('Resources') | |||||
inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Resources' % name | |||||
for x in self.to_list(self.mac_resources): | |||||
node = self.path.find_node(x) | |||||
if not node: | |||||
raise Errors.WafError('Missing mac_resource %r in %r' % (x, self)) | |||||
parent = node.parent | |||||
if os.path.isdir(node.abspath()): | |||||
nodes = node.ant_glob('**') | |||||
else: | |||||
nodes = [node] | |||||
for node in nodes: | |||||
rel = node.path_from(parent) | |||||
tsk = self.create_task('macapp', node, res_dir.make_node(rel)) | |||||
self.bld.install_as(inst_to + '/%s' % rel, node) | |||||
if getattr(self.bld, 'is_install', None): | |||||
# disable the normal binary installation | |||||
self.install_task.hasrun = Task.SKIP_ME | |||||
@feature('cprogram', 'cxxprogram') | |||||
@after_method('apply_link') | |||||
def create_task_macplist(self): | |||||
""" | |||||
Create a :py:class:`waflib.Tools.c_osx.macplist` instance. | |||||
""" | |||||
if self.env['MACAPP'] or getattr(self, 'mac_app', False): | |||||
out = self.link_task.outputs[0] | |||||
name = bundle_name_for_output(out) | |||||
dir = self.create_bundle_dirs(name, out) | |||||
n1 = dir.find_or_declare(['Contents', 'Info.plist']) | |||||
self.plisttask = plisttask = self.create_task('macplist', [], n1) | |||||
if getattr(self, 'mac_plist', False): | |||||
node = self.path.find_resource(self.mac_plist) | |||||
if node: | |||||
plisttask.inputs.append(node) | |||||
else: | |||||
plisttask.code = self.mac_plist | |||||
else: | |||||
plisttask.code = app_info % self.link_task.outputs[0].name | |||||
inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/' % name | |||||
self.bld.install_files(inst_to, n1) | |||||
@feature('cshlib', 'cxxshlib') | |||||
@before_method('apply_link', 'propagate_uselib_vars') | |||||
def apply_bundle(self): | |||||
""" | |||||
To make a bundled shared library (a ``.bundle``), set the *mac_bundle* attribute:: | |||||
def build(bld): | |||||
bld.shlib(source='a.c', target='foo', mac_bundle = True) | |||||
To force *all* executables to be transformed into bundles:: | |||||
def build(bld): | |||||
bld.env.MACBUNDLE = True | |||||
bld.shlib(source='a.c', target='foo') | |||||
""" | |||||
if self.env['MACBUNDLE'] or getattr(self, 'mac_bundle', False): | |||||
self.env['LINKFLAGS_cshlib'] = self.env['LINKFLAGS_cxxshlib'] = [] # disable the '-dynamiclib' flag | |||||
self.env['cshlib_PATTERN'] = self.env['cxxshlib_PATTERN'] = self.env['macbundle_PATTERN'] | |||||
use = self.use = self.to_list(getattr(self, 'use', [])) | |||||
if not 'MACBUNDLE' in use: | |||||
use.append('MACBUNDLE') | |||||
app_dirs = ['Contents', 'Contents/MacOS', 'Contents/Resources'] | |||||
class macapp(Task.Task): | |||||
""" | |||||
Create mac applications | |||||
""" | |||||
color = 'PINK' | |||||
def run(self): | |||||
self.outputs[0].parent.mkdir() | |||||
shutil.copy2(self.inputs[0].srcpath(), self.outputs[0].abspath()) | |||||
class macplist(Task.Task): | |||||
""" | |||||
Create plist files | |||||
""" | |||||
color = 'PINK' | |||||
ext_in = ['.bin'] | |||||
def run(self): | |||||
if getattr(self, 'code', None): | |||||
txt = self.code | |||||
else: | |||||
txt = self.inputs[0].read() | |||||
self.outputs[0].write(txt) | |||||
@@ -0,0 +1,229 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2010 (ita) | |||||
""" | |||||
Various configuration tests. | |||||
""" | |||||
from waflib import Task | |||||
from waflib.Configure import conf | |||||
from waflib.TaskGen import feature, before_method, after_method | |||||
import sys | |||||
LIB_CODE = ''' | |||||
#ifdef _MSC_VER | |||||
#define testEXPORT __declspec(dllexport) | |||||
#else | |||||
#define testEXPORT | |||||
#endif | |||||
testEXPORT int lib_func(void) { return 9; } | |||||
''' | |||||
MAIN_CODE = ''' | |||||
#ifdef _MSC_VER | |||||
#define testEXPORT __declspec(dllimport) | |||||
#else | |||||
#define testEXPORT | |||||
#endif | |||||
testEXPORT int lib_func(void); | |||||
int main(int argc, char **argv) { | |||||
(void)argc; (void)argv; | |||||
return !(lib_func() == 9); | |||||
} | |||||
''' | |||||
@feature('link_lib_test') | |||||
@before_method('process_source') | |||||
def link_lib_test_fun(self): | |||||
""" | |||||
The configuration test :py:func:`waflib.Configure.run_build` declares a unique task generator, | |||||
so we need to create other task generators from here to check if the linker is able to link libraries. | |||||
""" | |||||
def write_test_file(task): | |||||
task.outputs[0].write(task.generator.code) | |||||
rpath = [] | |||||
if getattr(self, 'add_rpath', False): | |||||
rpath = [self.bld.path.get_bld().abspath()] | |||||
mode = self.mode | |||||
m = '%s %s' % (mode, mode) | |||||
ex = self.test_exec and 'test_exec' or '' | |||||
bld = self.bld | |||||
bld(rule=write_test_file, target='test.' + mode, code=LIB_CODE) | |||||
bld(rule=write_test_file, target='main.' + mode, code=MAIN_CODE) | |||||
bld(features='%sshlib' % m, source='test.' + mode, target='test') | |||||
bld(features='%sprogram %s' % (m, ex), source='main.' + mode, target='app', use='test', rpath=rpath) | |||||
@conf | |||||
def check_library(self, mode=None, test_exec=True): | |||||
""" | |||||
Check if libraries can be linked with the current linker. Uses :py:func:`waflib.Tools.c_tests.link_lib_test_fun`. | |||||
:param mode: c or cxx or d | |||||
:type mode: string | |||||
""" | |||||
if not mode: | |||||
mode = 'c' | |||||
if self.env.CXX: | |||||
mode = 'cxx' | |||||
self.check( | |||||
compile_filename = [], | |||||
features = 'link_lib_test', | |||||
msg = 'Checking for libraries', | |||||
mode = mode, | |||||
test_exec = test_exec, | |||||
) | |||||
######################################################################################## | |||||
INLINE_CODE = ''' | |||||
typedef int foo_t; | |||||
static %s foo_t static_foo () {return 0; } | |||||
%s foo_t foo () { | |||||
return 0; | |||||
} | |||||
''' | |||||
INLINE_VALUES = ['inline', '__inline__', '__inline'] | |||||
@conf | |||||
def check_inline(self, **kw): | |||||
""" | |||||
Check for the right value for inline macro. | |||||
Define INLINE_MACRO to 1 if the define is found. | |||||
If the inline macro is not 'inline', add a define to the ``config.h`` (#define inline __inline__) | |||||
:param define_name: define INLINE_MACRO by default to 1 if the macro is defined | |||||
:type define_name: string | |||||
:param features: by default *c* or *cxx* depending on the compiler present | |||||
:type features: list of string | |||||
""" | |||||
self.start_msg('Checking for inline') | |||||
if not 'define_name' in kw: | |||||
kw['define_name'] = 'INLINE_MACRO' | |||||
if not 'features' in kw: | |||||
if self.env.CXX: | |||||
kw['features'] = ['cxx'] | |||||
else: | |||||
kw['features'] = ['c'] | |||||
for x in INLINE_VALUES: | |||||
kw['fragment'] = INLINE_CODE % (x, x) | |||||
try: | |||||
self.check(**kw) | |||||
except self.errors.ConfigurationError: | |||||
continue | |||||
else: | |||||
self.end_msg(x) | |||||
if x != 'inline': | |||||
self.define('inline', x, quote=False) | |||||
return x | |||||
self.fatal('could not use inline functions') | |||||
######################################################################################## | |||||
LARGE_FRAGMENT = '''#include <unistd.h> | |||||
int main(int argc, char **argv) { | |||||
(void)argc; (void)argv; | |||||
return !(sizeof(off_t) >= 8); | |||||
} | |||||
''' | |||||
@conf | |||||
def check_large_file(self, **kw): | |||||
""" | |||||
Check for large file support and define the macro HAVE_LARGEFILE | |||||
The test is skipped on win32 systems (DEST_BINFMT == pe). | |||||
:param define_name: define to set, by default *HAVE_LARGEFILE* | |||||
:type define_name: string | |||||
:param execute: execute the test (yes by default) | |||||
:type execute: bool | |||||
""" | |||||
if not 'define_name' in kw: | |||||
kw['define_name'] = 'HAVE_LARGEFILE' | |||||
if not 'execute' in kw: | |||||
kw['execute'] = True | |||||
if not 'features' in kw: | |||||
if self.env.CXX: | |||||
kw['features'] = ['cxx', 'cxxprogram'] | |||||
else: | |||||
kw['features'] = ['c', 'cprogram'] | |||||
kw['fragment'] = LARGE_FRAGMENT | |||||
kw['msg'] = 'Checking for large file support' | |||||
ret = True | |||||
try: | |||||
if self.env.DEST_BINFMT != 'pe': | |||||
ret = self.check(**kw) | |||||
except self.errors.ConfigurationError: | |||||
pass | |||||
else: | |||||
if ret: | |||||
return True | |||||
kw['msg'] = 'Checking for -D_FILE_OFFSET_BITS=64' | |||||
kw['defines'] = ['_FILE_OFFSET_BITS=64'] | |||||
try: | |||||
ret = self.check(**kw) | |||||
except self.errors.ConfigurationError: | |||||
pass | |||||
else: | |||||
self.define('_FILE_OFFSET_BITS', 64) | |||||
return ret | |||||
self.fatal('There is no support for large files') | |||||
######################################################################################## | |||||
ENDIAN_FRAGMENT = ''' | |||||
short int ascii_mm[] = { 0x4249, 0x4765, 0x6E44, 0x6961, 0x6E53, 0x7953, 0 }; | |||||
short int ascii_ii[] = { 0x694C, 0x5454, 0x656C, 0x6E45, 0x6944, 0x6E61, 0 }; | |||||
int use_ascii (int i) { | |||||
return ascii_mm[i] + ascii_ii[i]; | |||||
} | |||||
short int ebcdic_ii[] = { 0x89D3, 0xE3E3, 0x8593, 0x95C5, 0x89C4, 0x9581, 0 }; | |||||
short int ebcdic_mm[] = { 0xC2C9, 0xC785, 0x95C4, 0x8981, 0x95E2, 0xA8E2, 0 }; | |||||
int use_ebcdic (int i) { | |||||
return ebcdic_mm[i] + ebcdic_ii[i]; | |||||
} | |||||
extern int foo; | |||||
''' | |||||
class grep_for_endianness(Task.Task): | |||||
color = 'PINK' | |||||
def run(self): | |||||
txt = self.inputs[0].read(flags='rb').decode('iso8859-1') | |||||
if txt.find('LiTTleEnDian') > -1: | |||||
self.generator.tmp.append('little') | |||||
elif txt.find('BIGenDianSyS') > -1: | |||||
self.generator.tmp.append('big') | |||||
else: | |||||
return -1 | |||||
@feature('grep_for_endianness') | |||||
@after_method('process_source') | |||||
def grep_for_endianness_fun(self): | |||||
""" | |||||
Used by the endiannes configuration test | |||||
""" | |||||
self.create_task('grep_for_endianness', self.compiled_tasks[0].outputs[0]) | |||||
@conf | |||||
def check_endianness(self): | |||||
""" | |||||
Execute a configuration test to determine the endianness | |||||
""" | |||||
tmp = [] | |||||
def check_msg(self): | |||||
return tmp[0] | |||||
self.check(fragment=ENDIAN_FRAGMENT, features='c grep_for_endianness', msg="Checking for endianness", define='ENDIANNESS', tmp=tmp, okmsg=check_msg) | |||||
return tmp[0] | |||||
@@ -0,0 +1,703 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2005-2010 (ita) | |||||
""" | |||||
Classes and methods shared by tools providing support for C-like language such | |||||
as C/C++/D/Assembly/Go (this support module is almost never used alone). | |||||
""" | |||||
import os, re | |||||
from waflib import Task, Utils, Node, Errors | |||||
from waflib.TaskGen import after_method, before_method, feature, taskgen_method, extension | |||||
from waflib.Tools import c_aliases, c_preproc, c_config, c_osx, c_tests | |||||
from waflib.Configure import conf | |||||
SYSTEM_LIB_PATHS = ['/usr/lib64', '/usr/lib', '/usr/local/lib64', '/usr/local/lib'] | |||||
USELIB_VARS = Utils.defaultdict(set) | |||||
""" | |||||
Mapping for features to :py:class:`waflib.ConfigSet.ConfigSet` variables. See :py:func:`waflib.Tools.ccroot.propagate_uselib_vars`. | |||||
""" | |||||
USELIB_VARS['c'] = set(['INCLUDES', 'FRAMEWORKPATH', 'DEFINES', 'CPPFLAGS', 'CCDEPS', 'CFLAGS', 'ARCH']) | |||||
USELIB_VARS['cxx'] = set(['INCLUDES', 'FRAMEWORKPATH', 'DEFINES', 'CPPFLAGS', 'CXXDEPS', 'CXXFLAGS', 'ARCH']) | |||||
USELIB_VARS['d'] = set(['INCLUDES', 'DFLAGS']) | |||||
USELIB_VARS['includes'] = set(['INCLUDES', 'FRAMEWORKPATH', 'ARCH']) | |||||
USELIB_VARS['cprogram'] = USELIB_VARS['cxxprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'FRAMEWORK', 'FRAMEWORKPATH', 'ARCH']) | |||||
USELIB_VARS['cshlib'] = USELIB_VARS['cxxshlib'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'FRAMEWORK', 'FRAMEWORKPATH', 'ARCH']) | |||||
USELIB_VARS['cstlib'] = USELIB_VARS['cxxstlib'] = set(['ARFLAGS', 'LINKDEPS']) | |||||
USELIB_VARS['dprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS']) | |||||
USELIB_VARS['dshlib'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS']) | |||||
USELIB_VARS['dstlib'] = set(['ARFLAGS', 'LINKDEPS']) | |||||
USELIB_VARS['asm'] = set(['ASFLAGS']) | |||||
# ================================================================================================= | |||||
@taskgen_method | |||||
def create_compiled_task(self, name, node): | |||||
""" | |||||
Create the compilation task: c, cxx, asm, etc. The output node is created automatically (object file with a typical **.o** extension). | |||||
The task is appended to the list *compiled_tasks* which is then used by :py:func:`waflib.Tools.ccroot.apply_link` | |||||
:param name: name of the task class | |||||
:type name: string | |||||
:param node: the file to compile | |||||
:type node: :py:class:`waflib.Node.Node` | |||||
:return: The task created | |||||
:rtype: :py:class:`waflib.Task.Task` | |||||
""" | |||||
out = '%s.%d.o' % (node.name, self.idx) | |||||
task = self.create_task(name, node, node.parent.find_or_declare(out)) | |||||
try: | |||||
self.compiled_tasks.append(task) | |||||
except AttributeError: | |||||
self.compiled_tasks = [task] | |||||
return task | |||||
@taskgen_method | |||||
def to_incnodes(self, inlst): | |||||
""" | |||||
Task generator method provided to convert a list of string/nodes into a list of includes folders. | |||||
The paths are assumed to be relative to the task generator path, except if they begin by **#** | |||||
in which case they are searched from the top-level directory (``bld.srcnode``). | |||||
The folders are simply assumed to be existing. | |||||
The node objects in the list are returned in the output list. The strings are converted | |||||
into node objects if possible. The node is searched from the source directory, and if a match is found, | |||||
the equivalent build directory is created and added to the returned list too. When a folder cannot be found, it is ignored. | |||||
:param inlst: list of folders | |||||
:type inlst: space-delimited string or a list of string/nodes | |||||
:rtype: list of :py:class:`waflib.Node.Node` | |||||
:return: list of include folders as nodes | |||||
""" | |||||
lst = [] | |||||
seen = set([]) | |||||
for x in self.to_list(inlst): | |||||
if x in seen or not x: | |||||
continue | |||||
seen.add(x) | |||||
# with a real lot of targets, it is sometimes interesting to cache the results below | |||||
if isinstance(x, Node.Node): | |||||
lst.append(x) | |||||
else: | |||||
if os.path.isabs(x): | |||||
lst.append(self.bld.root.make_node(x) or x) | |||||
else: | |||||
if x[0] == '#': | |||||
p = self.bld.bldnode.make_node(x[1:]) | |||||
v = self.bld.srcnode.make_node(x[1:]) | |||||
else: | |||||
p = self.path.get_bld().make_node(x) | |||||
v = self.path.make_node(x) | |||||
if p.is_child_of(self.bld.bldnode): | |||||
p.mkdir() | |||||
lst.append(p) | |||||
lst.append(v) | |||||
return lst | |||||
@feature('c', 'cxx', 'd', 'asm', 'fc', 'includes') | |||||
@after_method('propagate_uselib_vars', 'process_source') | |||||
def apply_incpaths(self): | |||||
""" | |||||
Task generator method that processes the attribute *includes*:: | |||||
tg = bld(features='includes', includes='.') | |||||
The folders only need to be relative to the current directory, the equivalent build directory is | |||||
added automatically (for headers created in the build directory). This enable using a build directory | |||||
or not (``top == out``). | |||||
This method will add a list of nodes read by :py:func:`waflib.Tools.ccroot.to_incnodes` in ``tg.env.INCPATHS``, | |||||
and the list of include paths in ``tg.env.INCLUDES``. | |||||
""" | |||||
lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env['INCLUDES']) | |||||
self.includes_nodes = lst | |||||
self.env['INCPATHS'] = [x.abspath() for x in lst] | |||||
class link_task(Task.Task): | |||||
""" | |||||
Base class for all link tasks. A task generator is supposed to have at most one link task bound in the attribute *link_task*. See :py:func:`waflib.Tools.ccroot.apply_link`. | |||||
.. inheritance-diagram:: waflib.Tools.ccroot.stlink_task waflib.Tools.c.cprogram waflib.Tools.c.cshlib waflib.Tools.cxx.cxxstlib waflib.Tools.cxx.cxxprogram waflib.Tools.cxx.cxxshlib waflib.Tools.d.dprogram waflib.Tools.d.dshlib waflib.Tools.d.dstlib waflib.Tools.ccroot.fake_shlib waflib.Tools.ccroot.fake_stlib waflib.Tools.asm.asmprogram waflib.Tools.asm.asmshlib waflib.Tools.asm.asmstlib | |||||
""" | |||||
color = 'YELLOW' | |||||
inst_to = None | |||||
"""Default installation path for the link task outputs, or None to disable""" | |||||
chmod = Utils.O755 | |||||
"""Default installation mode for the link task outputs""" | |||||
def add_target(self, target): | |||||
""" | |||||
Process the *target* attribute to add the platform-specific prefix/suffix such as *.so* or *.exe*. | |||||
The settings are retrieved from ``env.clsname_PATTERN`` | |||||
""" | |||||
if isinstance(target, str): | |||||
pattern = self.env[self.__class__.__name__ + '_PATTERN'] | |||||
if not pattern: | |||||
pattern = '%s' | |||||
folder, name = os.path.split(target) | |||||
if self.__class__.__name__.find('shlib') > 0 and getattr(self.generator, 'vnum', None): | |||||
nums = self.generator.vnum.split('.') | |||||
if self.env.DEST_BINFMT == 'pe': | |||||
# include the version in the dll file name, | |||||
# the import lib file name stays unversionned. | |||||
name = name + '-' + nums[0] | |||||
elif self.env.DEST_OS == 'openbsd': | |||||
pattern = '%s.%s' % (pattern, nums[0]) | |||||
if len(nums) >= 2: | |||||
pattern += '.%s' % nums[1] | |||||
tmp = folder + os.sep + pattern % name | |||||
target = self.generator.path.find_or_declare(tmp) | |||||
self.set_outputs(target) | |||||
class stlink_task(link_task): | |||||
""" | |||||
Base for static link tasks, which use *ar* most of the time. | |||||
The target is always removed before being written. | |||||
""" | |||||
run_str = '${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}' | |||||
chmod = Utils.O644 | |||||
"""Default installation mode for the static libraries""" | |||||
def rm_tgt(cls): | |||||
old = cls.run | |||||
def wrap(self): | |||||
try: os.remove(self.outputs[0].abspath()) | |||||
except OSError: pass | |||||
return old(self) | |||||
setattr(cls, 'run', wrap) | |||||
rm_tgt(stlink_task) | |||||
@feature('c', 'cxx', 'd', 'fc', 'asm') | |||||
@after_method('process_source') | |||||
def apply_link(self): | |||||
""" | |||||
Collect the tasks stored in ``compiled_tasks`` (created by :py:func:`waflib.Tools.ccroot.create_compiled_task`), and | |||||
use the outputs for a new instance of :py:class:`waflib.Tools.ccroot.link_task`. The class to use is the first link task | |||||
matching a name from the attribute *features*, for example:: | |||||
def build(bld): | |||||
tg = bld(features='cxx cxxprogram cprogram', source='main.c', target='app') | |||||
will create the task ``tg.link_task`` as a new instance of :py:class:`waflib.Tools.cxx.cxxprogram` | |||||
""" | |||||
for x in self.features: | |||||
if x == 'cprogram' and 'cxx' in self.features: # limited compat | |||||
x = 'cxxprogram' | |||||
elif x == 'cshlib' and 'cxx' in self.features: | |||||
x = 'cxxshlib' | |||||
if x in Task.classes: | |||||
if issubclass(Task.classes[x], link_task): | |||||
link = x | |||||
break | |||||
else: | |||||
return | |||||
objs = [t.outputs[0] for t in getattr(self, 'compiled_tasks', [])] | |||||
self.link_task = self.create_task(link, objs) | |||||
self.link_task.add_target(self.target) | |||||
# remember that the install paths are given by the task generators | |||||
try: | |||||
inst_to = self.install_path | |||||
except AttributeError: | |||||
inst_to = self.link_task.__class__.inst_to | |||||
if inst_to: | |||||
# install a copy of the node list we have at this moment (implib not added) | |||||
self.install_task = self.bld.install_files(inst_to, self.link_task.outputs[:], env=self.env, chmod=self.link_task.chmod, task=self.link_task) | |||||
@taskgen_method | |||||
def use_rec(self, name, **kw): | |||||
""" | |||||
Processes the ``use`` keyword recursively. This method is kind of private and only meant to be used from ``process_use`` | |||||
""" | |||||
if name in self.tmp_use_not or name in self.tmp_use_seen: | |||||
return | |||||
try: | |||||
y = self.bld.get_tgen_by_name(name) | |||||
except Errors.WafError: | |||||
self.uselib.append(name) | |||||
self.tmp_use_not.add(name) | |||||
return | |||||
self.tmp_use_seen.append(name) | |||||
y.post() | |||||
# bind temporary attributes on the task generator | |||||
y.tmp_use_objects = objects = kw.get('objects', True) | |||||
y.tmp_use_stlib = stlib = kw.get('stlib', True) | |||||
try: | |||||
link_task = y.link_task | |||||
except AttributeError: | |||||
y.tmp_use_var = '' | |||||
else: | |||||
objects = False | |||||
if not isinstance(link_task, stlink_task): | |||||
stlib = False | |||||
y.tmp_use_var = 'LIB' | |||||
else: | |||||
y.tmp_use_var = 'STLIB' | |||||
p = self.tmp_use_prec | |||||
for x in self.to_list(getattr(y, 'use', [])): | |||||
if self.env["STLIB_" + x]: | |||||
continue | |||||
try: | |||||
p[x].append(name) | |||||
except KeyError: | |||||
p[x] = [name] | |||||
self.use_rec(x, objects=objects, stlib=stlib) | |||||
@feature('c', 'cxx', 'd', 'use', 'fc') | |||||
@before_method('apply_incpaths', 'propagate_uselib_vars') | |||||
@after_method('apply_link', 'process_source') | |||||
def process_use(self): | |||||
""" | |||||
Process the ``use`` attribute which contains a list of task generator names:: | |||||
def build(bld): | |||||
bld.shlib(source='a.c', target='lib1') | |||||
bld.program(source='main.c', target='app', use='lib1') | |||||
See :py:func:`waflib.Tools.ccroot.use_rec`. | |||||
""" | |||||
use_not = self.tmp_use_not = set([]) | |||||
self.tmp_use_seen = [] # we would like an ordered set | |||||
use_prec = self.tmp_use_prec = {} | |||||
self.uselib = self.to_list(getattr(self, 'uselib', [])) | |||||
self.includes = self.to_list(getattr(self, 'includes', [])) | |||||
names = self.to_list(getattr(self, 'use', [])) | |||||
for x in names: | |||||
self.use_rec(x) | |||||
for x in use_not: | |||||
if x in use_prec: | |||||
del use_prec[x] | |||||
# topological sort | |||||
out = [] | |||||
tmp = [] | |||||
for x in self.tmp_use_seen: | |||||
for k in use_prec.values(): | |||||
if x in k: | |||||
break | |||||
else: | |||||
tmp.append(x) | |||||
while tmp: | |||||
e = tmp.pop() | |||||
out.append(e) | |||||
try: | |||||
nlst = use_prec[e] | |||||
except KeyError: | |||||
pass | |||||
else: | |||||
del use_prec[e] | |||||
for x in nlst: | |||||
for y in use_prec: | |||||
if x in use_prec[y]: | |||||
break | |||||
else: | |||||
tmp.append(x) | |||||
if use_prec: | |||||
raise Errors.WafError('Cycle detected in the use processing %r' % use_prec) | |||||
out.reverse() | |||||
link_task = getattr(self, 'link_task', None) | |||||
for x in out: | |||||
y = self.bld.get_tgen_by_name(x) | |||||
var = y.tmp_use_var | |||||
if var and link_task: | |||||
if var == 'LIB' or y.tmp_use_stlib or x in names: | |||||
self.env.append_value(var, [y.target[y.target.rfind(os.sep) + 1:]]) | |||||
self.link_task.dep_nodes.extend(y.link_task.outputs) | |||||
tmp_path = y.link_task.outputs[0].parent.path_from(self.bld.bldnode) | |||||
self.env.append_unique(var + 'PATH', [tmp_path]) | |||||
else: | |||||
if y.tmp_use_objects: | |||||
self.add_objects_from_tgen(y) | |||||
if getattr(y, 'export_includes', None): | |||||
self.includes.extend(y.to_incnodes(y.export_includes)) | |||||
if getattr(y, 'export_defines', None): | |||||
self.env.append_value('DEFINES', self.to_list(y.export_defines)) | |||||
# and finally, add the use variables (no recursion needed) | |||||
for x in names: | |||||
try: | |||||
y = self.bld.get_tgen_by_name(x) | |||||
except Errors.WafError: | |||||
if not self.env['STLIB_' + x] and not x in self.uselib: | |||||
self.uselib.append(x) | |||||
else: | |||||
for k in self.to_list(getattr(y, 'use', [])): | |||||
if not self.env['STLIB_' + k] and not k in self.uselib: | |||||
self.uselib.append(k) | |||||
@taskgen_method | |||||
def accept_node_to_link(self, node): | |||||
""" | |||||
PRIVATE INTERNAL USE ONLY | |||||
""" | |||||
return not node.name.endswith('.pdb') | |||||
@taskgen_method | |||||
def add_objects_from_tgen(self, tg): | |||||
""" | |||||
Add the objects from the depending compiled tasks as link task inputs. | |||||
Some objects are filtered: for instance, .pdb files are added | |||||
to the compiled tasks but not to the link tasks (to avoid errors) | |||||
PRIVATE INTERNAL USE ONLY | |||||
""" | |||||
try: | |||||
link_task = self.link_task | |||||
except AttributeError: | |||||
pass | |||||
else: | |||||
for tsk in getattr(tg, 'compiled_tasks', []): | |||||
for x in tsk.outputs: | |||||
if self.accept_node_to_link(x): | |||||
link_task.inputs.append(x) | |||||
@taskgen_method | |||||
def get_uselib_vars(self): | |||||
""" | |||||
:return: the *uselib* variables associated to the *features* attribute (see :py:attr:`waflib.Tools.ccroot.USELIB_VARS`) | |||||
:rtype: list of string | |||||
""" | |||||
_vars = set([]) | |||||
for x in self.features: | |||||
if x in USELIB_VARS: | |||||
_vars |= USELIB_VARS[x] | |||||
return _vars | |||||
@feature('c', 'cxx', 'd', 'fc', 'javac', 'cs', 'uselib', 'asm') | |||||
@after_method('process_use') | |||||
def propagate_uselib_vars(self): | |||||
""" | |||||
Process uselib variables for adding flags. For example, the following target:: | |||||
def build(bld): | |||||
bld.env.AFLAGS_aaa = ['bar'] | |||||
from waflib.Tools.ccroot import USELIB_VARS | |||||
USELIB_VARS['aaa'] = set('AFLAGS') | |||||
tg = bld(features='aaa', aflags='test') | |||||
The *aflags* attribute will be processed and this method will set:: | |||||
tg.env.AFLAGS = ['bar', 'test'] | |||||
""" | |||||
_vars = self.get_uselib_vars() | |||||
env = self.env | |||||
app = env.append_value | |||||
feature_uselib = self.features + self.to_list(getattr(self, 'uselib', [])) | |||||
for var in _vars: | |||||
y = var.lower() | |||||
val = getattr(self, y, []) | |||||
if val: | |||||
app(var, self.to_list(val)) | |||||
for x in feature_uselib: | |||||
val = env['%s_%s' % (var, x)] | |||||
if val: | |||||
app(var, val) | |||||
# ============ the code above must not know anything about import libs ========== | |||||
@feature('cshlib', 'cxxshlib', 'fcshlib') | |||||
@after_method('apply_link') | |||||
def apply_implib(self): | |||||
""" | |||||
Handle dlls and their import libs on Windows-like systems. | |||||
A ``.dll.a`` file called *import library* is generated. | |||||
It must be installed as it is required for linking the library. | |||||
""" | |||||
if not self.env.DEST_BINFMT == 'pe': | |||||
return | |||||
dll = self.link_task.outputs[0] | |||||
if isinstance(self.target, Node.Node): | |||||
name = self.target.name | |||||
else: | |||||
name = os.path.split(self.target)[1] | |||||
implib = self.env['implib_PATTERN'] % name | |||||
implib = dll.parent.find_or_declare(implib) | |||||
self.env.append_value('LINKFLAGS', self.env['IMPLIB_ST'] % implib.bldpath()) | |||||
self.link_task.outputs.append(implib) | |||||
if getattr(self, 'defs', None) and self.env.DEST_BINFMT == 'pe': | |||||
node = self.path.find_resource(self.defs) | |||||
if not node: | |||||
raise Errors.WafError('invalid def file %r' % self.defs) | |||||
if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME): | |||||
self.env.append_value('LINKFLAGS', '/def:%s' % node.path_from(self.bld.bldnode)) | |||||
self.link_task.dep_nodes.append(node) | |||||
else: | |||||
#gcc for windows takes *.def file a an input without any special flag | |||||
self.link_task.inputs.append(node) | |||||
# where to put the import library | |||||
if getattr(self, 'install_task', None): | |||||
try: | |||||
# user has given a specific installation path for the import library | |||||
inst_to = self.install_path_implib | |||||
except AttributeError: | |||||
try: | |||||
# user has given an installation path for the main library, put the import library in it | |||||
inst_to = self.install_path | |||||
except AttributeError: | |||||
# else, put the library in BINDIR and the import library in LIBDIR | |||||
inst_to = '${IMPLIBDIR}' | |||||
self.install_task.dest = '${BINDIR}' | |||||
if not self.env.IMPLIBDIR: | |||||
self.env.IMPLIBDIR = self.env.LIBDIR | |||||
self.implib_install_task = self.bld.install_files(inst_to, implib, env=self.env, chmod=self.link_task.chmod, task=self.link_task) | |||||
# ============ the code above must not know anything about vnum processing on unix platforms ========= | |||||
re_vnum = re.compile('^([1-9]\\d*|0)([.]([1-9]\\d*|0)){0,2}?$') | |||||
@feature('cshlib', 'cxxshlib', 'dshlib', 'fcshlib', 'vnum') | |||||
@after_method('apply_link', 'propagate_uselib_vars') | |||||
def apply_vnum(self): | |||||
""" | |||||
Enforce version numbering on shared libraries. The valid version numbers must have either zero or two dots:: | |||||
def build(bld): | |||||
bld.shlib(source='a.c', target='foo', vnum='14.15.16') | |||||
In this example, ``libfoo.so`` is installed as ``libfoo.so.1.2.3``, and the following symbolic links are created: | |||||
* ``libfoo.so → libfoo.so.1.2.3`` | |||||
* ``libfoo.so.1 → libfoo.so.1.2.3`` | |||||
""" | |||||
if not getattr(self, 'vnum', '') or os.name != 'posix' or self.env.DEST_BINFMT not in ('elf', 'mac-o'): | |||||
return | |||||
link = self.link_task | |||||
if not re_vnum.match(self.vnum): | |||||
raise Errors.WafError('Invalid vnum %r for target %r' % (self.vnum, getattr(self, 'name', self))) | |||||
nums = self.vnum.split('.') | |||||
node = link.outputs[0] | |||||
libname = node.name | |||||
if libname.endswith('.dylib'): | |||||
name3 = libname.replace('.dylib', '.%s.dylib' % self.vnum) | |||||
name2 = libname.replace('.dylib', '.%s.dylib' % nums[0]) | |||||
else: | |||||
name3 = libname + '.' + self.vnum | |||||
name2 = libname + '.' + nums[0] | |||||
# add the so name for the ld linker - to disable, just unset env.SONAME_ST | |||||
if self.env.SONAME_ST: | |||||
v = self.env.SONAME_ST % name2 | |||||
self.env.append_value('LINKFLAGS', v.split()) | |||||
# the following task is just to enable execution from the build dir :-/ | |||||
if self.env.DEST_OS != 'openbsd': | |||||
outs = [node.parent.find_or_declare(name3)] | |||||
if name2 != name3: | |||||
outs.append(node.parent.find_or_declare(name2)) | |||||
self.create_task('vnum', node, outs) | |||||
if getattr(self, 'install_task', None): | |||||
self.install_task.hasrun = Task.SKIP_ME | |||||
bld = self.bld | |||||
path = self.install_task.dest | |||||
if self.env.DEST_OS == 'openbsd': | |||||
libname = self.link_task.outputs[0].name | |||||
t1 = bld.install_as('%s%s%s' % (path, os.sep, libname), node, env=self.env, chmod=self.link_task.chmod) | |||||
self.vnum_install_task = (t1,) | |||||
else: | |||||
t1 = bld.install_as(path + os.sep + name3, node, env=self.env, chmod=self.link_task.chmod) | |||||
t3 = bld.symlink_as(path + os.sep + libname, name3) | |||||
if name2 != name3: | |||||
t2 = bld.symlink_as(path + os.sep + name2, name3) | |||||
self.vnum_install_task = (t1, t2, t3) | |||||
else: | |||||
self.vnum_install_task = (t1, t3) | |||||
if '-dynamiclib' in self.env['LINKFLAGS']: | |||||
# this requires after(propagate_uselib_vars) | |||||
try: | |||||
inst_to = self.install_path | |||||
except AttributeError: | |||||
inst_to = self.link_task.__class__.inst_to | |||||
if inst_to: | |||||
p = Utils.subst_vars(inst_to, self.env) | |||||
path = os.path.join(p, self.link_task.outputs[0].name) | |||||
self.env.append_value('LINKFLAGS', ['-install_name', path]) | |||||
class vnum(Task.Task): | |||||
""" | |||||
Create the symbolic links for a versioned shared library. Instances are created by :py:func:`waflib.Tools.ccroot.apply_vnum` | |||||
""" | |||||
color = 'CYAN' | |||||
quient = True | |||||
ext_in = ['.bin'] | |||||
def keyword(self): | |||||
return 'Symlinking' | |||||
def run(self): | |||||
for x in self.outputs: | |||||
path = x.abspath() | |||||
try: | |||||
os.remove(path) | |||||
except OSError: | |||||
pass | |||||
try: | |||||
os.symlink(self.inputs[0].name, path) | |||||
except OSError: | |||||
return 1 | |||||
class fake_shlib(link_task): | |||||
""" | |||||
Task used for reading a system library and adding the dependency on it | |||||
""" | |||||
def runnable_status(self): | |||||
for t in self.run_after: | |||||
if not t.hasrun: | |||||
return Task.ASK_LATER | |||||
for x in self.outputs: | |||||
x.sig = Utils.h_file(x.abspath()) | |||||
return Task.SKIP_ME | |||||
class fake_stlib(stlink_task): | |||||
""" | |||||
Task used for reading a system library and adding the dependency on it | |||||
""" | |||||
def runnable_status(self): | |||||
for t in self.run_after: | |||||
if not t.hasrun: | |||||
return Task.ASK_LATER | |||||
for x in self.outputs: | |||||
x.sig = Utils.h_file(x.abspath()) | |||||
return Task.SKIP_ME | |||||
@conf | |||||
def read_shlib(self, name, paths=[], export_includes=[], export_defines=[]): | |||||
""" | |||||
Read a system shared library, enabling its use as a local library. Will trigger a rebuild if the file changes:: | |||||
def build(bld): | |||||
bld.read_shlib('m') | |||||
bld.program(source='main.c', use='m') | |||||
""" | |||||
return self(name=name, features='fake_lib', lib_paths=paths, lib_type='shlib', export_includes=export_includes, export_defines=export_defines) | |||||
@conf | |||||
def read_stlib(self, name, paths=[], export_includes=[], export_defines=[]): | |||||
""" | |||||
Read a system static library, enabling a use as a local library. Will trigger a rebuild if the file changes. | |||||
""" | |||||
return self(name=name, features='fake_lib', lib_paths=paths, lib_type='stlib', export_includes=export_includes, export_defines=export_defines) | |||||
lib_patterns = { | |||||
'shlib' : ['lib%s.so', '%s.so', 'lib%s.dylib', 'lib%s.dll', '%s.dll'], | |||||
'stlib' : ['lib%s.a', '%s.a', 'lib%s.dll', '%s.dll', 'lib%s.lib', '%s.lib'], | |||||
} | |||||
@feature('fake_lib') | |||||
def process_lib(self): | |||||
""" | |||||
Find the location of a foreign library. Used by :py:class:`waflib.Tools.ccroot.read_shlib` and :py:class:`waflib.Tools.ccroot.read_stlib`. | |||||
""" | |||||
node = None | |||||
names = [x % self.name for x in lib_patterns[self.lib_type]] | |||||
for x in self.lib_paths + [self.path] + SYSTEM_LIB_PATHS: | |||||
if not isinstance(x, Node.Node): | |||||
x = self.bld.root.find_node(x) or self.path.find_node(x) | |||||
if not x: | |||||
continue | |||||
for y in names: | |||||
node = x.find_node(y) | |||||
if node: | |||||
node.sig = Utils.h_file(node.abspath()) | |||||
break | |||||
else: | |||||
continue | |||||
break | |||||
else: | |||||
raise Errors.WafError('could not find library %r' % self.name) | |||||
self.link_task = self.create_task('fake_%s' % self.lib_type, [], [node]) | |||||
self.target = self.name | |||||
class fake_o(Task.Task): | |||||
def runnable_status(self): | |||||
return Task.SKIP_ME | |||||
@extension('.o', '.obj') | |||||
def add_those_o_files(self, node): | |||||
tsk = self.create_task('fake_o', [], node) | |||||
try: | |||||
self.compiled_tasks.append(tsk) | |||||
except AttributeError: | |||||
self.compiled_tasks = [tsk] | |||||
@feature('fake_obj') | |||||
@before_method('process_source') | |||||
def process_objs(self): | |||||
""" | |||||
Puts object files in the task generator outputs | |||||
""" | |||||
for node in self.to_nodes(self.source): | |||||
self.add_those_o_files(node) | |||||
self.source = [] | |||||
@conf | |||||
def read_object(self, obj): | |||||
""" | |||||
Read an object file, enabling injection in libs/programs. Will trigger a rebuild if the file changes. | |||||
:param obj: object file path, as string or Node | |||||
""" | |||||
if not isinstance(obj, self.path.__class__): | |||||
obj = self.path.find_resource(obj) | |||||
return self(features='fake_obj', source=obj, name=obj.name) | |||||
@feature('cxxprogram', 'cprogram') | |||||
@after_method('apply_link', 'process_use') | |||||
def set_full_paths_hpux(self): | |||||
""" | |||||
On hp-ux, extend the libpaths and static library paths to absolute paths | |||||
""" | |||||
if self.env.DEST_OS != 'hp-ux': | |||||
return | |||||
base = self.bld.bldnode.abspath() | |||||
for var in ['LIBPATH', 'STLIBPATH']: | |||||
lst = [] | |||||
for x in self.env[var]: | |||||
if x.startswith('/'): | |||||
lst.append(x) | |||||
else: | |||||
lst.append(os.path.normpath(os.path.join(base, x))) | |||||
self.env[var] = lst | |||||
@@ -0,0 +1,29 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Krzysztof Kosiński 2014 | |||||
""" | |||||
Detect the Clang C compiler | |||||
""" | |||||
import os, sys | |||||
from waflib.Tools import ccroot, ar, gcc | |||||
from waflib.Configure import conf | |||||
@conf | |||||
def find_clang(conf): | |||||
""" | |||||
Find the program clang and execute it to ensure it really is clang | |||||
""" | |||||
cc = conf.find_program('clang', var='CC') | |||||
conf.get_cc_version(cc, clang=True) | |||||
conf.env.CC_NAME = 'clang' | |||||
def configure(conf): | |||||
conf.find_clang() | |||||
conf.find_ar() | |||||
conf.gcc_common_flags() | |||||
conf.gcc_modifier_platform() | |||||
conf.cc_load_tools() | |||||
conf.cc_add_flags() | |||||
conf.link_add_flags() |
@@ -0,0 +1,30 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy 2009-2010 (ita) | |||||
""" | |||||
Detect the Clang++ C++ compiler | |||||
""" | |||||
import os, sys | |||||
from waflib.Tools import ccroot, ar, gxx | |||||
from waflib.Configure import conf | |||||
@conf | |||||
def find_clangxx(conf): | |||||
""" | |||||
Find the program clang++, and execute it to ensure it really is clang++ | |||||
""" | |||||
cxx = conf.find_program('clang++', var='CXX') | |||||
conf.get_cc_version(cxx, clang=True) | |||||
conf.env.CXX_NAME = 'clang' | |||||
def configure(conf): | |||||
conf.find_clangxx() | |||||
conf.find_ar() | |||||
conf.gxx_common_flags() | |||||
conf.gxx_modifier_platform() | |||||
conf.cxx_load_tools() | |||||
conf.cxx_add_flags() | |||||
conf.link_add_flags() | |||||
@@ -0,0 +1,104 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Matthias Jahn jahn dôt matthias ât freenet dôt de, 2007 (pmarat) | |||||
""" | |||||
Try to detect a C compiler from the list of supported compilers (gcc, msvc, etc):: | |||||
def options(opt): | |||||
opt.load('compiler_c') | |||||
def configure(cnf): | |||||
cnf.load('compiler_c') | |||||
def build(bld): | |||||
bld.program(source='main.c', target='app') | |||||
The compilers are associated to platforms in :py:attr:`waflib.Tools.compiler_c.c_compiler`. To register | |||||
a new C compiler named *cfoo* (assuming the tool ``waflib/extras/cfoo.py`` exists), use:: | |||||
from waflib.Tools.compiler_c import c_compiler | |||||
c_compiler['win32'] = ['cfoo', 'msvc', 'gcc'] | |||||
def options(opt): | |||||
opt.load('compiler_c') | |||||
def configure(cnf): | |||||
cnf.load('compiler_c') | |||||
def build(bld): | |||||
bld.program(source='main.c', target='app') | |||||
Not all compilers need to have a specific tool. For example, the clang compilers can be detected by the gcc tools when using:: | |||||
$ CC=clang waf configure | |||||
""" | |||||
import os, sys, imp, types, re | |||||
from waflib.Tools import ccroot | |||||
from waflib import Utils, Configure | |||||
from waflib.Logs import debug | |||||
c_compiler = { | |||||
'win32': ['msvc', 'gcc', 'clang'], | |||||
'cygwin': ['gcc'], | |||||
'darwin': ['clang', 'gcc'], | |||||
'aix': ['xlc', 'gcc', 'clang'], | |||||
'linux': ['gcc', 'clang', 'icc'], | |||||
'sunos': ['suncc', 'gcc'], | |||||
'irix': ['gcc', 'irixcc'], | |||||
'hpux': ['gcc'], | |||||
'osf1V': ['gcc'], | |||||
'gnu': ['gcc', 'clang'], | |||||
'java': ['gcc', 'msvc', 'clang', 'icc'], | |||||
'default':['gcc', 'clang'], | |||||
} | |||||
""" | |||||
Dict mapping the platform names to Waf tools finding specific C compilers:: | |||||
from waflib.Tools.compiler_c import c_compiler | |||||
c_compiler['linux'] = ['gcc', 'icc', 'suncc'] | |||||
""" | |||||
def default_compilers(): | |||||
build_platform = Utils.unversioned_sys_platform() | |||||
possible_compiler_list = c_compiler.get(build_platform, c_compiler['default']) | |||||
return ' '.join(possible_compiler_list) | |||||
def configure(conf): | |||||
""" | |||||
Try to find a suitable C compiler or raise a :py:class:`waflib.Errors.ConfigurationError`. | |||||
""" | |||||
try: test_for_compiler = conf.options.check_c_compiler or default_compilers() | |||||
except AttributeError: conf.fatal("Add options(opt): opt.load('compiler_c')") | |||||
for compiler in re.split('[ ,]+', test_for_compiler): | |||||
conf.env.stash() | |||||
conf.start_msg('Checking for %r (C compiler)' % compiler) | |||||
try: | |||||
conf.load(compiler) | |||||
except conf.errors.ConfigurationError as e: | |||||
conf.env.revert() | |||||
conf.end_msg(False) | |||||
debug('compiler_c: %r' % e) | |||||
else: | |||||
if conf.env['CC']: | |||||
conf.end_msg(conf.env.get_flat('CC')) | |||||
conf.env['COMPILER_CC'] = compiler | |||||
break | |||||
conf.end_msg(False) | |||||
else: | |||||
conf.fatal('could not configure a C compiler!') | |||||
def options(opt): | |||||
""" | |||||
Restrict the compiler detection from the command-line:: | |||||
$ waf configure --check-c-compiler=gcc | |||||
""" | |||||
test_for_compiler = default_compilers() | |||||
opt.load_special_tools('c_*.py', ban=['c_dumbpreproc.py']) | |||||
cc_compiler_opts = opt.add_option_group('Configuration options') | |||||
cc_compiler_opts.add_option('--check-c-compiler', default=None, | |||||
help='list of C compilers to try [%s]' % test_for_compiler, | |||||
dest="check_c_compiler") | |||||
for x in test_for_compiler.split(): | |||||
opt.load('%s' % x) | |||||
@@ -0,0 +1,105 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Matthias Jahn jahn dôt matthias ât freenet dôt de 2007 (pmarat) | |||||
""" | |||||
Try to detect a C++ compiler from the list of supported compilers (g++, msvc, etc):: | |||||
def options(opt): | |||||
opt.load('compiler_cxx') | |||||
def configure(cnf): | |||||
cnf.load('compiler_cxx') | |||||
def build(bld): | |||||
bld.program(source='main.cpp', target='app') | |||||
The compilers are associated to platforms in :py:attr:`waflib.Tools.compiler_cxx.cxx_compiler`. To register | |||||
a new C++ compiler named *cfoo* (assuming the tool ``waflib/extras/cfoo.py`` exists), use:: | |||||
from waflib.Tools.compiler_cxx import cxx_compiler | |||||
cxx_compiler['win32'] = ['cfoo', 'msvc', 'gcc'] | |||||
def options(opt): | |||||
opt.load('compiler_cxx') | |||||
def configure(cnf): | |||||
cnf.load('compiler_cxx') | |||||
def build(bld): | |||||
bld.program(source='main.c', target='app') | |||||
Not all compilers need to have a specific tool. For example, the clang compilers can be detected by the gcc tools when using:: | |||||
$ CXX=clang waf configure | |||||
""" | |||||
import os, sys, imp, types, re | |||||
from waflib.Tools import ccroot | |||||
from waflib import Utils, Configure | |||||
from waflib.Logs import debug | |||||
cxx_compiler = { | |||||
'win32': ['msvc', 'g++', 'clang++'], | |||||
'cygwin': ['g++'], | |||||
'darwin': ['clang++', 'g++'], | |||||
'aix': ['xlc++', 'g++', 'clang++'], | |||||
'linux': ['g++', 'clang++', 'icpc'], | |||||
'sunos': ['sunc++', 'g++'], | |||||
'irix': ['g++'], | |||||
'hpux': ['g++'], | |||||
'osf1V': ['g++'], | |||||
'gnu': ['g++', 'clang++'], | |||||
'java': ['g++', 'msvc', 'clang++', 'icpc'], | |||||
'default': ['g++', 'clang++'] | |||||
} | |||||
""" | |||||
Dict mapping the platform names to Waf tools finding specific C++ compilers:: | |||||
from waflib.Tools.compiler_cxx import cxx_compiler | |||||
cxx_compiler['linux'] = ['gxx', 'icpc', 'suncxx'] | |||||
""" | |||||
def default_compilers(): | |||||
build_platform = Utils.unversioned_sys_platform() | |||||
possible_compiler_list = cxx_compiler.get(build_platform, cxx_compiler['default']) | |||||
return ' '.join(possible_compiler_list) | |||||
def configure(conf): | |||||
""" | |||||
Try to find a suitable C++ compiler or raise a :py:class:`waflib.Errors.ConfigurationError`. | |||||
""" | |||||
try: test_for_compiler = conf.options.check_cxx_compiler or default_compilers() | |||||
except AttributeError: conf.fatal("Add options(opt): opt.load('compiler_cxx')") | |||||
for compiler in re.split('[ ,]+', test_for_compiler): | |||||
conf.env.stash() | |||||
conf.start_msg('Checking for %r (C++ compiler)' % compiler) | |||||
try: | |||||
conf.load(compiler) | |||||
except conf.errors.ConfigurationError as e: | |||||
conf.env.revert() | |||||
conf.end_msg(False) | |||||
debug('compiler_cxx: %r' % e) | |||||
else: | |||||
if conf.env['CXX']: | |||||
conf.end_msg(conf.env.get_flat('CXX')) | |||||
conf.env['COMPILER_CXX'] = compiler | |||||
break | |||||
conf.end_msg(False) | |||||
else: | |||||
conf.fatal('could not configure a C++ compiler!') | |||||
def options(opt): | |||||
""" | |||||
Restrict the compiler detection from the command-line:: | |||||
$ waf configure --check-cxx-compiler=gxx | |||||
""" | |||||
test_for_compiler = default_compilers() | |||||
opt.load_special_tools('cxx_*.py') | |||||
cxx_compiler_opts = opt.add_option_group('Configuration options') | |||||
cxx_compiler_opts.add_option('--check-cxx-compiler', default=None, | |||||
help='list of C++ compilers to try [%s]' % test_for_compiler, | |||||
dest="check_cxx_compiler") | |||||
for x in test_for_compiler.split(): | |||||
opt.load('%s' % x) | |||||
@@ -0,0 +1,79 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Carlos Rafael Giani, 2007 (dv) | |||||
# Thomas Nagy, 2010 (ita) | |||||
""" | |||||
Try to detect a D compiler from the list of supported compilers:: | |||||
def options(opt): | |||||
opt.load('compiler_d') | |||||
def configure(cnf): | |||||
cnf.load('compiler_d') | |||||
def build(bld): | |||||
bld.program(source='main.d', target='app') | |||||
Only three D compilers are really present at the moment: | |||||
* gdc | |||||
* dmd, the ldc compiler having a very similar command-line interface | |||||
* ldc2 | |||||
""" | |||||
import os, sys, imp, types, re | |||||
from waflib import Utils, Configure, Options, Logs | |||||
d_compiler = { | |||||
'default' : ['gdc', 'dmd', 'ldc2'] | |||||
} | |||||
""" | |||||
Dict mapping the platform names to lists of names of D compilers to try, in order of preference:: | |||||
from waflib.Tools.compiler_d import d_compiler | |||||
d_compiler['default'] = ['gdc', 'dmd', 'ldc2'] | |||||
""" | |||||
def default_compilers(): | |||||
build_platform = Utils.unversioned_sys_platform() | |||||
possible_compiler_list = d_compiler.get(build_platform, d_compiler['default']) | |||||
return ' '.join(possible_compiler_list) | |||||
def configure(conf): | |||||
""" | |||||
Try to find a suitable D compiler or raise a :py:class:`waflib.Errors.ConfigurationError`. | |||||
""" | |||||
try: test_for_compiler = conf.options.check_d_compiler or default_compilers() | |||||
except AttributeError: conf.fatal("Add options(opt): opt.load('compiler_d')") | |||||
for compiler in re.split('[ ,]+', test_for_compiler): | |||||
conf.env.stash() | |||||
conf.start_msg('Checking for %r (D compiler)' % compiler) | |||||
try: | |||||
conf.load(compiler) | |||||
except conf.errors.ConfigurationError as e: | |||||
conf.env.revert() | |||||
conf.end_msg(False) | |||||
Logs.debug('compiler_d: %r' % e) | |||||
else: | |||||
if conf.env.D: | |||||
conf.end_msg(conf.env.get_flat('D')) | |||||
conf.env['COMPILER_D'] = compiler | |||||
break | |||||
conf.end_msg(False) | |||||
else: | |||||
conf.fatal('could not configure a D compiler!') | |||||
def options(opt): | |||||
""" | |||||
Restrict the compiler detection from the command-line:: | |||||
$ waf configure --check-d-compiler=dmd | |||||
""" | |||||
test_for_compiler = default_compilers() | |||||
d_compiler_opts = opt.add_option_group('Configuration options') | |||||
d_compiler_opts.add_option('--check-d-compiler', default=None, | |||||
help='list of D compilers to try [%s]' % test_for_compiler, dest='check_d_compiler') | |||||
for x in test_for_compiler.split(): | |||||
opt.load('%s' % x) | |||||
@@ -0,0 +1,67 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
import os, sys, imp, types, re | |||||
from waflib import Utils, Configure, Options, Logs, Errors | |||||
from waflib.Tools import fc | |||||
fc_compiler = { | |||||
'win32' : ['gfortran','ifort'], | |||||
'darwin' : ['gfortran', 'g95', 'ifort'], | |||||
'linux' : ['gfortran', 'g95', 'ifort'], | |||||
'java' : ['gfortran', 'g95', 'ifort'], | |||||
'default': ['gfortran'], | |||||
'aix' : ['gfortran'] | |||||
} | |||||
""" | |||||
Dict mapping the platform names to lists of names of Fortran compilers to try, in order of preference:: | |||||
from waflib.Tools.compiler_c import c_compiler | |||||
c_compiler['linux'] = ['gfortran', 'g95', 'ifort'] | |||||
""" | |||||
def default_compilers(): | |||||
build_platform = Utils.unversioned_sys_platform() | |||||
possible_compiler_list = fc_compiler.get(build_platform, fc_compiler['default']) | |||||
return ' '.join(possible_compiler_list) | |||||
def configure(conf): | |||||
""" | |||||
Try to find a suitable Fortran compiler or raise a :py:class:`waflib.Errors.ConfigurationError`. | |||||
""" | |||||
try: test_for_compiler = conf.options.check_fortran_compiler or default_compilers() | |||||
except AttributeError: conf.fatal("Add options(opt): opt.load('compiler_fc')") | |||||
for compiler in re.split('[ ,]+', test_for_compiler): | |||||
conf.env.stash() | |||||
conf.start_msg('Checking for %r (Fortran compiler)' % compiler) | |||||
try: | |||||
conf.load(compiler) | |||||
except conf.errors.ConfigurationError as e: | |||||
conf.env.revert() | |||||
conf.end_msg(False) | |||||
Logs.debug('compiler_fortran: %r' % e) | |||||
else: | |||||
if conf.env['FC']: | |||||
conf.end_msg(conf.env.get_flat('FC')) | |||||
conf.env.COMPILER_FORTRAN = compiler | |||||
break | |||||
conf.end_msg(False) | |||||
else: | |||||
conf.fatal('could not configure a Fortran compiler!') | |||||
def options(opt): | |||||
""" | |||||
Restrict the compiler detection from the command-line:: | |||||
$ waf configure --check-fortran-compiler=ifort | |||||
""" | |||||
test_for_compiler = default_compilers() | |||||
opt.load_special_tools('fc_*.py') | |||||
fortran_compiler_opts = opt.add_option_group('Configuration options') | |||||
fortran_compiler_opts.add_option('--check-fortran-compiler', default=None, | |||||
help='list of Fortran compiler to try [%s]' % test_for_compiler, | |||||
dest="check_fortran_compiler") | |||||
for x in test_for_compiler.split(): | |||||
opt.load('%s' % x) | |||||
@@ -0,0 +1,222 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2006-2010 (ita) | |||||
""" | |||||
C# support. A simple example:: | |||||
def configure(conf): | |||||
conf.load('cs') | |||||
def build(bld): | |||||
bld(features='cs', source='main.cs', gen='foo') | |||||
Note that the configuration may compile C# snippets:: | |||||
FRAG = ''' | |||||
namespace Moo { | |||||
public class Test { public static int Main(string[] args) { return 0; } } | |||||
}''' | |||||
def configure(conf): | |||||
conf.check(features='cs', fragment=FRAG, compile_filename='test.cs', gen='test.exe', | |||||
bintype='exe', csflags=['-pkg:gtk-sharp-2.0'], msg='Checking for Gtksharp support') | |||||
""" | |||||
from waflib import Utils, Task, Options, Logs, Errors | |||||
from waflib.TaskGen import before_method, after_method, feature | |||||
from waflib.Tools import ccroot | |||||
from waflib.Configure import conf | |||||
import os, tempfile | |||||
ccroot.USELIB_VARS['cs'] = set(['CSFLAGS', 'ASSEMBLIES', 'RESOURCES']) | |||||
ccroot.lib_patterns['csshlib'] = ['%s'] | |||||
@feature('cs') | |||||
@before_method('process_source') | |||||
def apply_cs(self): | |||||
""" | |||||
Create a C# task bound to the attribute *cs_task*. There can be only one C# task by task generator. | |||||
""" | |||||
cs_nodes = [] | |||||
no_nodes = [] | |||||
for x in self.to_nodes(self.source): | |||||
if x.name.endswith('.cs'): | |||||
cs_nodes.append(x) | |||||
else: | |||||
no_nodes.append(x) | |||||
self.source = no_nodes | |||||
bintype = getattr(self, 'bintype', self.gen.endswith('.dll') and 'library' or 'exe') | |||||
self.cs_task = tsk = self.create_task('mcs', cs_nodes, self.path.find_or_declare(self.gen)) | |||||
tsk.env.CSTYPE = '/target:%s' % bintype | |||||
tsk.env.OUT = '/out:%s' % tsk.outputs[0].abspath() | |||||
self.env.append_value('CSFLAGS', '/platform:%s' % getattr(self, 'platform', 'anycpu')) | |||||
inst_to = getattr(self, 'install_path', bintype=='exe' and '${BINDIR}' or '${LIBDIR}') | |||||
if inst_to: | |||||
# note: we are making a copy, so the files added to cs_task.outputs won't be installed automatically | |||||
mod = getattr(self, 'chmod', bintype=='exe' and Utils.O755 or Utils.O644) | |||||
self.install_task = self.bld.install_files(inst_to, self.cs_task.outputs[:], env=self.env, chmod=mod) | |||||
@feature('cs') | |||||
@after_method('apply_cs') | |||||
def use_cs(self): | |||||
""" | |||||
C# applications honor the **use** keyword:: | |||||
def build(bld): | |||||
bld(features='cs', source='My.cs', bintype='library', gen='my.dll', name='mylib') | |||||
bld(features='cs', source='Hi.cs', includes='.', bintype='exe', gen='hi.exe', use='mylib', name='hi') | |||||
""" | |||||
names = self.to_list(getattr(self, 'use', [])) | |||||
get = self.bld.get_tgen_by_name | |||||
for x in names: | |||||
try: | |||||
y = get(x) | |||||
except Errors.WafError: | |||||
self.env.append_value('CSFLAGS', '/reference:%s' % x) | |||||
continue | |||||
y.post() | |||||
tsk = getattr(y, 'cs_task', None) or getattr(y, 'link_task', None) | |||||
if not tsk: | |||||
self.bld.fatal('cs task has no link task for use %r' % self) | |||||
self.cs_task.dep_nodes.extend(tsk.outputs) # dependency | |||||
self.cs_task.set_run_after(tsk) # order (redundant, the order is infered from the nodes inputs/outputs) | |||||
self.env.append_value('CSFLAGS', '/reference:%s' % tsk.outputs[0].abspath()) | |||||
@feature('cs') | |||||
@after_method('apply_cs', 'use_cs') | |||||
def debug_cs(self): | |||||
""" | |||||
The C# targets may create .mdb or .pdb files:: | |||||
def build(bld): | |||||
bld(features='cs', source='My.cs', bintype='library', gen='my.dll', csdebug='full') | |||||
# csdebug is a value in (True, 'full', 'pdbonly') | |||||
""" | |||||
csdebug = getattr(self, 'csdebug', self.env.CSDEBUG) | |||||
if not csdebug: | |||||
return | |||||
node = self.cs_task.outputs[0] | |||||
if self.env.CS_NAME == 'mono': | |||||
out = node.parent.find_or_declare(node.name + '.mdb') | |||||
else: | |||||
out = node.change_ext('.pdb') | |||||
self.cs_task.outputs.append(out) | |||||
try: | |||||
self.install_task.source.append(out) | |||||
except AttributeError: | |||||
pass | |||||
if csdebug == 'pdbonly': | |||||
val = ['/debug+', '/debug:pdbonly'] | |||||
elif csdebug == 'full': | |||||
val = ['/debug+', '/debug:full'] | |||||
else: | |||||
val = ['/debug-'] | |||||
self.env.append_value('CSFLAGS', val) | |||||
class mcs(Task.Task): | |||||
""" | |||||
Compile C# files | |||||
""" | |||||
color = 'YELLOW' | |||||
run_str = '${MCS} ${CSTYPE} ${CSFLAGS} ${ASS_ST:ASSEMBLIES} ${RES_ST:RESOURCES} ${OUT} ${SRC}' | |||||
def exec_command(self, cmd, **kw): | |||||
bld = self.generator.bld | |||||
try: | |||||
if not kw.get('cwd', None): | |||||
kw['cwd'] = bld.cwd | |||||
except AttributeError: | |||||
bld.cwd = kw['cwd'] = bld.variant_dir | |||||
try: | |||||
tmp = None | |||||
if isinstance(cmd, list) and len(' '.join(cmd)) >= 8192: | |||||
program = cmd[0] #unquoted program name, otherwise exec_command will fail | |||||
cmd = [self.quote_response_command(x) for x in cmd] | |||||
(fd, tmp) = tempfile.mkstemp() | |||||
os.write(fd, '\r\n'.join(i.replace('\\', '\\\\') for i in cmd[1:]).encode()) | |||||
os.close(fd) | |||||
cmd = [program, '@' + tmp] | |||||
# no return here, that's on purpose | |||||
ret = self.generator.bld.exec_command(cmd, **kw) | |||||
finally: | |||||
if tmp: | |||||
try: | |||||
os.remove(tmp) | |||||
except OSError: | |||||
pass # anti-virus and indexers can keep the files open -_- | |||||
return ret | |||||
def quote_response_command(self, flag): | |||||
# /noconfig is not allowed when using response files | |||||
if flag.lower() == '/noconfig': | |||||
return '' | |||||
if flag.find(' ') > -1: | |||||
for x in ('/r:', '/reference:', '/resource:', '/lib:', '/out:'): | |||||
if flag.startswith(x): | |||||
flag = '%s"%s"' % (x, '","'.join(flag[len(x):].split(','))) | |||||
break | |||||
else: | |||||
flag = '"%s"' % flag | |||||
return flag | |||||
def configure(conf): | |||||
""" | |||||
Find a C# compiler, set the variable MCS for the compiler and CS_NAME (mono or csc) | |||||
""" | |||||
csc = getattr(Options.options, 'cscbinary', None) | |||||
if csc: | |||||
conf.env.MCS = csc | |||||
conf.find_program(['csc', 'mcs', 'gmcs'], var='MCS') | |||||
conf.env.ASS_ST = '/r:%s' | |||||
conf.env.RES_ST = '/resource:%s' | |||||
conf.env.CS_NAME = 'csc' | |||||
if str(conf.env.MCS).lower().find('mcs') > -1: | |||||
conf.env.CS_NAME = 'mono' | |||||
def options(opt): | |||||
""" | |||||
Add a command-line option for the configuration:: | |||||
$ waf configure --with-csc-binary=/foo/bar/mcs | |||||
""" | |||||
opt.add_option('--with-csc-binary', type='string', dest='cscbinary') | |||||
class fake_csshlib(Task.Task): | |||||
""" | |||||
Task used for reading a foreign .net assembly and adding the dependency on it | |||||
""" | |||||
color = 'YELLOW' | |||||
inst_to = None | |||||
def runnable_status(self): | |||||
for x in self.outputs: | |||||
x.sig = Utils.h_file(x.abspath()) | |||||
return Task.SKIP_ME | |||||
@conf | |||||
def read_csshlib(self, name, paths=[]): | |||||
""" | |||||
Read a foreign .net assembly for the *use* system:: | |||||
def build(bld): | |||||
bld.read_csshlib('ManagedLibrary.dll', paths=[bld.env.mylibrarypath]) | |||||
bld(features='cs', source='Hi.cs', bintype='exe', gen='hi.exe', use='ManagedLibrary.dll') | |||||
:param name: Name of the library | |||||
:type name: string | |||||
:param paths: Folders in which the library may be found | |||||
:type paths: list of string | |||||
:return: A task generator having the feature *fake_lib* which will call :py:func:`waflib.Tools.ccroot.process_lib` | |||||
:rtype: :py:class:`waflib.TaskGen.task_gen` | |||||
""" | |||||
return self(name=name, features='fake_lib', lib_paths=paths, lib_type='csshlib') | |||||
@@ -0,0 +1,40 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2005-2010 (ita) | |||||
"Base for c++ programs and libraries" | |||||
from waflib import TaskGen, Task | |||||
from waflib.Tools import c_preproc | |||||
from waflib.Tools.ccroot import link_task, stlink_task | |||||
@TaskGen.extension('.cpp','.cc','.cxx','.C','.c++') | |||||
def cxx_hook(self, node): | |||||
"Bind the c++ file extensions to the creation of a :py:class:`waflib.Tools.cxx.cxx` instance" | |||||
return self.create_compiled_task('cxx', node) | |||||
if not '.c' in TaskGen.task_gen.mappings: | |||||
TaskGen.task_gen.mappings['.c'] = TaskGen.task_gen.mappings['.cpp'] | |||||
class cxx(Task.Task): | |||||
"Compile C++ files into object files" | |||||
run_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT[0].abspath()}' | |||||
vars = ['CXXDEPS'] # unused variable to depend on, just in case | |||||
ext_in = ['.h'] # set the build order easily by using ext_out=['.h'] | |||||
scan = c_preproc.scan | |||||
class cxxprogram(link_task): | |||||
"Link object files into a c++ program" | |||||
run_str = '${LINK_CXX} ${LINKFLAGS} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}' | |||||
vars = ['LINKDEPS'] | |||||
ext_out = ['.bin'] | |||||
inst_to = '${BINDIR}' | |||||
class cxxshlib(cxxprogram): | |||||
"Link object files into a c++ shared library" | |||||
inst_to = '${LIBDIR}' | |||||
class cxxstlib(stlink_task): | |||||
"Link object files into a c++ static library" | |||||
pass # do not remove | |||||
@@ -0,0 +1,97 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Carlos Rafael Giani, 2007 (dv) | |||||
# Thomas Nagy, 2007-2010 (ita) | |||||
from waflib import Utils, Task, Errors | |||||
from waflib.TaskGen import taskgen_method, feature, extension | |||||
from waflib.Tools import d_scan, d_config | |||||
from waflib.Tools.ccroot import link_task, stlink_task | |||||
class d(Task.Task): | |||||
"Compile a d file into an object file" | |||||
color = 'GREEN' | |||||
run_str = '${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_SRC_F:SRC} ${D_TGT_F:TGT}' | |||||
scan = d_scan.scan | |||||
class d_with_header(d): | |||||
"Compile a d file and generate a header" | |||||
run_str = '${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_HDR_F:tgt.outputs[1].bldpath()} ${D_SRC_F:SRC} ${D_TGT_F:tgt.outputs[0].bldpath()}' | |||||
class d_header(Task.Task): | |||||
"Compile d headers" | |||||
color = 'BLUE' | |||||
run_str = '${D} ${D_HEADER} ${SRC}' | |||||
class dprogram(link_task): | |||||
"Link object files into a d program" | |||||
run_str = '${D_LINKER} ${LINKFLAGS} ${DLNK_SRC_F}${SRC} ${DLNK_TGT_F:TGT} ${RPATH_ST:RPATH} ${DSTLIB_MARKER} ${DSTLIBPATH_ST:STLIBPATH} ${DSTLIB_ST:STLIB} ${DSHLIB_MARKER} ${DLIBPATH_ST:LIBPATH} ${DSHLIB_ST:LIB}' | |||||
inst_to = '${BINDIR}' | |||||
class dshlib(dprogram): | |||||
"Link object files into a d shared library" | |||||
inst_to = '${LIBDIR}' | |||||
class dstlib(stlink_task): | |||||
"Link object files into a d static library" | |||||
pass # do not remove | |||||
@extension('.d', '.di', '.D') | |||||
def d_hook(self, node): | |||||
""" | |||||
Compile *D* files. To get .di files as well as .o files, set the following:: | |||||
def build(bld): | |||||
bld.program(source='foo.d', target='app', generate_headers=True) | |||||
""" | |||||
ext = Utils.destos_to_binfmt(self.env.DEST_OS) == 'pe' and 'obj' or 'o' | |||||
out = '%s.%d.%s' % (node.name, self.idx, ext) | |||||
def create_compiled_task(self, name, node): | |||||
task = self.create_task(name, node, node.parent.find_or_declare(out)) | |||||
try: | |||||
self.compiled_tasks.append(task) | |||||
except AttributeError: | |||||
self.compiled_tasks = [task] | |||||
return task | |||||
if getattr(self, 'generate_headers', None): | |||||
tsk = create_compiled_task(self, 'd_with_header', node) | |||||
tsk.outputs.append(node.change_ext(self.env['DHEADER_ext'])) | |||||
else: | |||||
tsk = create_compiled_task(self, 'd', node) | |||||
return tsk | |||||
@taskgen_method | |||||
def generate_header(self, filename): | |||||
""" | |||||
See feature request #104:: | |||||
def build(bld): | |||||
tg = bld.program(source='foo.d', target='app') | |||||
tg.generate_header('blah.d') | |||||
# is equivalent to: | |||||
#tg = bld.program(source='foo.d', target='app', header_lst='blah.d') | |||||
:param filename: header to create | |||||
:type filename: string | |||||
""" | |||||
try: | |||||
self.header_lst.append([filename, self.install_path]) | |||||
except AttributeError: | |||||
self.header_lst = [[filename, self.install_path]] | |||||
@feature('d') | |||||
def process_header(self): | |||||
""" | |||||
Process the attribute 'header_lst' to create the d header compilation tasks:: | |||||
def build(bld): | |||||
bld.program(source='foo.d', target='app', header_lst='blah.d') | |||||
""" | |||||
for i in getattr(self, 'header_lst', []): | |||||
node = self.path.find_resource(i[0]) | |||||
if not node: | |||||
raise Errors.WafError('file %r not found on d obj' % i[0]) | |||||
self.create_task('d_header', node, node.change_ext('.di')) | |||||
@@ -0,0 +1,63 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2010 (ita) | |||||
from waflib import Utils | |||||
from waflib.Configure import conf | |||||
@conf | |||||
def d_platform_flags(self): | |||||
""" | |||||
Set the extensions dll/so for d programs and libraries | |||||
""" | |||||
v = self.env | |||||
if not v.DEST_OS: | |||||
v.DEST_OS = Utils.unversioned_sys_platform() | |||||
binfmt = Utils.destos_to_binfmt(self.env.DEST_OS) | |||||
if binfmt == 'pe': | |||||
v['dprogram_PATTERN'] = '%s.exe' | |||||
v['dshlib_PATTERN'] = 'lib%s.dll' | |||||
v['dstlib_PATTERN'] = 'lib%s.a' | |||||
elif binfmt == 'mac-o': | |||||
v['dprogram_PATTERN'] = '%s' | |||||
v['dshlib_PATTERN'] = 'lib%s.dylib' | |||||
v['dstlib_PATTERN'] = 'lib%s.a' | |||||
else: | |||||
v['dprogram_PATTERN'] = '%s' | |||||
v['dshlib_PATTERN'] = 'lib%s.so' | |||||
v['dstlib_PATTERN'] = 'lib%s.a' | |||||
DLIB = ''' | |||||
version(D_Version2) { | |||||
import std.stdio; | |||||
int main() { | |||||
writefln("phobos2"); | |||||
return 0; | |||||
} | |||||
} else { | |||||
version(Tango) { | |||||
import tango.stdc.stdio; | |||||
int main() { | |||||
printf("tango"); | |||||
return 0; | |||||
} | |||||
} else { | |||||
import std.stdio; | |||||
int main() { | |||||
writefln("phobos1"); | |||||
return 0; | |||||
} | |||||
} | |||||
} | |||||
''' | |||||
"""Detection string for the D standard library""" | |||||
@conf | |||||
def check_dlibrary(self, execute=True): | |||||
""" | |||||
Detect the kind of standard library that comes with the compiler, will set conf.env.DLIBRARY to tango, phobos1 or phobos2. | |||||
""" | |||||
ret = self.check_cc(features='d dprogram', fragment=DLIB, compile_filename='test.d', execute=execute, define_ret=True) | |||||
if execute: | |||||
self.env.DLIBRARY = ret.strip() | |||||
@@ -0,0 +1,209 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2010 (ita) | |||||
""" | |||||
Provide a scanner for finding dependencies on d files | |||||
""" | |||||
import re | |||||
from waflib import Utils, Logs | |||||
def filter_comments(filename): | |||||
""" | |||||
:param filename: d file name | |||||
:type filename: string | |||||
:rtype: list | |||||
:return: a list of characters | |||||
""" | |||||
txt = Utils.readf(filename) | |||||
i = 0 | |||||
buf = [] | |||||
max = len(txt) | |||||
begin = 0 | |||||
while i < max: | |||||
c = txt[i] | |||||
if c == '"' or c == "'": # skip a string or character literal | |||||
buf.append(txt[begin:i]) | |||||
delim = c | |||||
i += 1 | |||||
while i < max: | |||||
c = txt[i] | |||||
if c == delim: break | |||||
elif c == '\\': # skip the character following backslash | |||||
i += 1 | |||||
i += 1 | |||||
i += 1 | |||||
begin = i | |||||
elif c == '/': # try to replace a comment with whitespace | |||||
buf.append(txt[begin:i]) | |||||
i += 1 | |||||
if i == max: break | |||||
c = txt[i] | |||||
if c == '+': # eat nesting /+ +/ comment | |||||
i += 1 | |||||
nesting = 1 | |||||
c = None | |||||
while i < max: | |||||
prev = c | |||||
c = txt[i] | |||||
if prev == '/' and c == '+': | |||||
nesting += 1 | |||||
c = None | |||||
elif prev == '+' and c == '/': | |||||
nesting -= 1 | |||||
if nesting == 0: break | |||||
c = None | |||||
i += 1 | |||||
elif c == '*': # eat /* */ comment | |||||
i += 1 | |||||
c = None | |||||
while i < max: | |||||
prev = c | |||||
c = txt[i] | |||||
if prev == '*' and c == '/': break | |||||
i += 1 | |||||
elif c == '/': # eat // comment | |||||
i += 1 | |||||
while i < max and txt[i] != '\n': | |||||
i += 1 | |||||
else: # no comment | |||||
begin = i - 1 | |||||
continue | |||||
i += 1 | |||||
begin = i | |||||
buf.append(' ') | |||||
else: | |||||
i += 1 | |||||
buf.append(txt[begin:]) | |||||
return buf | |||||
class d_parser(object): | |||||
""" | |||||
Parser for d files | |||||
""" | |||||
def __init__(self, env, incpaths): | |||||
#self.code = '' | |||||
#self.module = '' | |||||
#self.imports = [] | |||||
self.allnames = [] | |||||
self.re_module = re.compile("module\s+([^;]+)") | |||||
self.re_import = re.compile("import\s+([^;]+)") | |||||
self.re_import_bindings = re.compile("([^:]+):(.*)") | |||||
self.re_import_alias = re.compile("[^=]+=(.+)") | |||||
self.env = env | |||||
self.nodes = [] | |||||
self.names = [] | |||||
self.incpaths = incpaths | |||||
def tryfind(self, filename): | |||||
""" | |||||
Search file a file matching an module/import directive | |||||
:param filename: file to read | |||||
:type filename: string | |||||
""" | |||||
found = 0 | |||||
for n in self.incpaths: | |||||
found = n.find_resource(filename.replace('.', '/') + '.d') | |||||
if found: | |||||
self.nodes.append(found) | |||||
self.waiting.append(found) | |||||
break | |||||
if not found: | |||||
if not filename in self.names: | |||||
self.names.append(filename) | |||||
def get_strings(self, code): | |||||
""" | |||||
:param code: d code to parse | |||||
:type code: string | |||||
:return: the modules that the code uses | |||||
:rtype: a list of match objects | |||||
""" | |||||
#self.imports = [] | |||||
self.module = '' | |||||
lst = [] | |||||
# get the module name (if present) | |||||
mod_name = self.re_module.search(code) | |||||
if mod_name: | |||||
self.module = re.sub('\s+', '', mod_name.group(1)) # strip all whitespaces | |||||
# go through the code, have a look at all import occurrences | |||||
# first, lets look at anything beginning with "import" and ending with ";" | |||||
import_iterator = self.re_import.finditer(code) | |||||
if import_iterator: | |||||
for import_match in import_iterator: | |||||
import_match_str = re.sub('\s+', '', import_match.group(1)) # strip all whitespaces | |||||
# does this end with an import bindings declaration? | |||||
# (import bindings always terminate the list of imports) | |||||
bindings_match = self.re_import_bindings.match(import_match_str) | |||||
if bindings_match: | |||||
import_match_str = bindings_match.group(1) | |||||
# if so, extract the part before the ":" (since the module declaration(s) is/are located there) | |||||
# split the matching string into a bunch of strings, separated by a comma | |||||
matches = import_match_str.split(',') | |||||
for match in matches: | |||||
alias_match = self.re_import_alias.match(match) | |||||
if alias_match: | |||||
# is this an alias declaration? (alias = module name) if so, extract the module name | |||||
match = alias_match.group(1) | |||||
lst.append(match) | |||||
return lst | |||||
def start(self, node): | |||||
""" | |||||
The parsing starts here | |||||
:param node: input file | |||||
:type node: :py:class:`waflib.Node.Node` | |||||
""" | |||||
self.waiting = [node] | |||||
# while the stack is not empty, add the dependencies | |||||
while self.waiting: | |||||
nd = self.waiting.pop(0) | |||||
self.iter(nd) | |||||
def iter(self, node): | |||||
""" | |||||
Find all the modules that a file depends on, uses :py:meth:`waflib.Tools.d_scan.d_parser.tryfind` to process dependent files | |||||
:param node: input file | |||||
:type node: :py:class:`waflib.Node.Node` | |||||
""" | |||||
path = node.abspath() # obtain the absolute path | |||||
code = "".join(filter_comments(path)) # read the file and filter the comments | |||||
names = self.get_strings(code) # obtain the import strings | |||||
for x in names: | |||||
# optimization | |||||
if x in self.allnames: continue | |||||
self.allnames.append(x) | |||||
# for each name, see if it is like a node or not | |||||
self.tryfind(x) | |||||
def scan(self): | |||||
"look for .d/.di used by a d file" | |||||
env = self.env | |||||
gruik = d_parser(env, self.generator.includes_nodes) | |||||
node = self.inputs[0] | |||||
gruik.start(node) | |||||
nodes = gruik.nodes | |||||
names = gruik.names | |||||
if Logs.verbose: | |||||
Logs.debug('deps: deps for %s: %r; unresolved %r' % (str(node), nodes, names)) | |||||
return (nodes, names) | |||||
@@ -0,0 +1,70 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Ali Sabil, 2007 | |||||
""" | |||||
Compile dbus files with **dbus-binding-tool** | |||||
Typical usage:: | |||||
def options(opt): | |||||
opt.load('compiler_c dbus') | |||||
def configure(conf): | |||||
conf.load('compiler_c dbus') | |||||
def build(bld): | |||||
tg = bld.program( | |||||
includes = '.', | |||||
source = bld.path.ant_glob('*.c'), | |||||
target = 'gnome-hello') | |||||
tg.add_dbus_file('test.xml', 'test_prefix', 'glib-server') | |||||
""" | |||||
from waflib import Task, Errors | |||||
from waflib.TaskGen import taskgen_method, before_method | |||||
@taskgen_method | |||||
def add_dbus_file(self, filename, prefix, mode): | |||||
""" | |||||
Add a dbus file to the list of dbus files to process. Store them in the attribute *dbus_lst*. | |||||
:param filename: xml file to compile | |||||
:type filename: string | |||||
:param prefix: dbus binding tool prefix (--prefix=prefix) | |||||
:type prefix: string | |||||
:param mode: dbus binding tool mode (--mode=mode) | |||||
:type mode: string | |||||
""" | |||||
if not hasattr(self, 'dbus_lst'): | |||||
self.dbus_lst = [] | |||||
if not 'process_dbus' in self.meths: | |||||
self.meths.append('process_dbus') | |||||
self.dbus_lst.append([filename, prefix, mode]) | |||||
@before_method('apply_core') | |||||
def process_dbus(self): | |||||
""" | |||||
Process the dbus files stored in the attribute *dbus_lst* to create :py:class:`waflib.Tools.dbus.dbus_binding_tool` instances. | |||||
""" | |||||
for filename, prefix, mode in getattr(self, 'dbus_lst', []): | |||||
node = self.path.find_resource(filename) | |||||
if not node: | |||||
raise Errors.WafError('file not found ' + filename) | |||||
tsk = self.create_task('dbus_binding_tool', node, node.change_ext('.h')) | |||||
tsk.env.DBUS_BINDING_TOOL_PREFIX = prefix | |||||
tsk.env.DBUS_BINDING_TOOL_MODE = mode | |||||
class dbus_binding_tool(Task.Task): | |||||
""" | |||||
Compile a dbus file | |||||
""" | |||||
color = 'BLUE' | |||||
ext_out = ['.h'] | |||||
run_str = '${DBUS_BINDING_TOOL} --prefix=${DBUS_BINDING_TOOL_PREFIX} --mode=${DBUS_BINDING_TOOL_MODE} --output=${TGT} ${SRC}' | |||||
shell = True # temporary workaround for #795 | |||||
def configure(conf): | |||||
""" | |||||
Detect the program dbus-binding-tool and set the *conf.env.DBUS_BINDING_TOOL* | |||||
""" | |||||
dbus_binding_tool = conf.find_program('dbus-binding-tool', var='DBUS_BINDING_TOOL') | |||||
@@ -0,0 +1,88 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Carlos Rafael Giani, 2007 (dv) | |||||
# Thomas Nagy, 2008-2010 (ita) | |||||
import sys | |||||
from waflib.Tools import ar, d | |||||
from waflib.Configure import conf | |||||
@conf | |||||
def find_dmd(conf): | |||||
""" | |||||
Find the program *dmd*, *dmd2*, or *ldc* and set the variable *D* | |||||
""" | |||||
conf.find_program(['dmd', 'dmd2', 'ldc'], var='D') | |||||
# make sure that we're dealing with dmd1, dmd2, or ldc(1) | |||||
out = conf.cmd_and_log(conf.env.D + ['--help']) | |||||
if out.find("D Compiler v") == -1: | |||||
out = conf.cmd_and_log(conf.env.D + ['-version']) | |||||
if out.find("based on DMD v1.") == -1: | |||||
conf.fatal("detected compiler is not dmd/ldc") | |||||
@conf | |||||
def common_flags_ldc(conf): | |||||
""" | |||||
Set the D flags required by *ldc* | |||||
""" | |||||
v = conf.env | |||||
v['DFLAGS'] = ['-d-version=Posix'] | |||||
v['LINKFLAGS'] = [] | |||||
v['DFLAGS_dshlib'] = ['-relocation-model=pic'] | |||||
@conf | |||||
def common_flags_dmd(conf): | |||||
""" | |||||
Set the flags required by *dmd* or *dmd2* | |||||
""" | |||||
v = conf.env | |||||
# _DFLAGS _DIMPORTFLAGS | |||||
# Compiler is dmd so 'gdc' part will be ignored, just | |||||
# ensure key is there, so wscript can append flags to it | |||||
#v['DFLAGS'] = ['-version=Posix'] | |||||
v['D_SRC_F'] = ['-c'] | |||||
v['D_TGT_F'] = '-of%s' | |||||
# linker | |||||
v['D_LINKER'] = v['D'] | |||||
v['DLNK_SRC_F'] = '' | |||||
v['DLNK_TGT_F'] = '-of%s' | |||||
v['DINC_ST'] = '-I%s' | |||||
v['DSHLIB_MARKER'] = v['DSTLIB_MARKER'] = '' | |||||
v['DSTLIB_ST'] = v['DSHLIB_ST'] = '-L-l%s' | |||||
v['DSTLIBPATH_ST'] = v['DLIBPATH_ST'] = '-L-L%s' | |||||
v['LINKFLAGS_dprogram']= ['-quiet'] | |||||
v['DFLAGS_dshlib'] = ['-fPIC'] | |||||
v['LINKFLAGS_dshlib'] = ['-L-shared'] | |||||
v['DHEADER_ext'] = '.di' | |||||
v.DFLAGS_d_with_header = ['-H', '-Hf'] | |||||
v['D_HDR_F'] = '%s' | |||||
def configure(conf): | |||||
""" | |||||
Configuration for *dmd*, *dmd2*, and *ldc* | |||||
""" | |||||
conf.find_dmd() | |||||
if sys.platform == 'win32': | |||||
out = conf.cmd_and_log(conf.env.D + ['--help']) | |||||
if out.find("D Compiler v2.") > -1: | |||||
conf.fatal('dmd2 on Windows is not supported, use gdc or ldc2 instead') | |||||
conf.load('ar') | |||||
conf.load('d') | |||||
conf.common_flags_dmd() | |||||
conf.d_platform_flags() | |||||
if str(conf.env.D).find('ldc') > -1: | |||||
conf.common_flags_ldc() | |||||
@@ -0,0 +1,220 @@ | |||||
#! /usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2011 (ita) | |||||
""" | |||||
errcheck: highlight common mistakes | |||||
There is a performance hit, so this tool is only loaded when running "waf -v" | |||||
""" | |||||
typos = { | |||||
'feature':'features', | |||||
'sources':'source', | |||||
'targets':'target', | |||||
'include':'includes', | |||||
'export_include':'export_includes', | |||||
'define':'defines', | |||||
'importpath':'includes', | |||||
'installpath':'install_path', | |||||
'iscopy':'is_copy', | |||||
} | |||||
meths_typos = ['__call__', 'program', 'shlib', 'stlib', 'objects'] | |||||
from waflib import Logs, Build, Node, Task, TaskGen, ConfigSet, Errors, Utils | |||||
import waflib.Tools.ccroot | |||||
def check_same_targets(self): | |||||
mp = Utils.defaultdict(list) | |||||
uids = {} | |||||
def check_task(tsk): | |||||
if not isinstance(tsk, Task.Task): | |||||
return | |||||
for node in tsk.outputs: | |||||
mp[node].append(tsk) | |||||
try: | |||||
uids[tsk.uid()].append(tsk) | |||||
except KeyError: | |||||
uids[tsk.uid()] = [tsk] | |||||
for g in self.groups: | |||||
for tg in g: | |||||
try: | |||||
for tsk in tg.tasks: | |||||
check_task(tsk) | |||||
except AttributeError: | |||||
# raised if not a task generator, which should be uncommon | |||||
check_task(tg) | |||||
dupe = False | |||||
for (k, v) in mp.items(): | |||||
if len(v) > 1: | |||||
dupe = True | |||||
msg = '* Node %r is created more than once%s. The task generators are:' % (k, Logs.verbose == 1 and " (full message on 'waf -v -v')" or "") | |||||
Logs.error(msg) | |||||
for x in v: | |||||
if Logs.verbose > 1: | |||||
Logs.error(' %d. %r' % (1 + v.index(x), x.generator)) | |||||
else: | |||||
Logs.error(' %d. %r in %r' % (1 + v.index(x), x.generator.name, getattr(x.generator, 'path', None))) | |||||
if not dupe: | |||||
for (k, v) in uids.items(): | |||||
if len(v) > 1: | |||||
Logs.error('* Several tasks use the same identifier. Please check the information on\n http://docs.waf.googlecode.com/git/apidocs_16/Task.html#waflib.Task.Task.uid') | |||||
for tsk in v: | |||||
Logs.error(' - object %r (%r) defined in %r' % (tsk.__class__.__name__, tsk, tsk.generator)) | |||||
def check_invalid_constraints(self): | |||||
feat = set([]) | |||||
for x in list(TaskGen.feats.values()): | |||||
feat.union(set(x)) | |||||
for (x, y) in TaskGen.task_gen.prec.items(): | |||||
feat.add(x) | |||||
feat.union(set(y)) | |||||
ext = set([]) | |||||
for x in TaskGen.task_gen.mappings.values(): | |||||
ext.add(x.__name__) | |||||
invalid = ext & feat | |||||
if invalid: | |||||
Logs.error('The methods %r have invalid annotations: @extension <-> @feature/@before_method/@after_method' % list(invalid)) | |||||
# the build scripts have been read, so we can check for invalid after/before attributes on task classes | |||||
for cls in list(Task.classes.values()): | |||||
for x in ('before', 'after'): | |||||
for y in Utils.to_list(getattr(cls, x, [])): | |||||
if not Task.classes.get(y, None): | |||||
Logs.error('Erroneous order constraint %r=%r on task class %r' % (x, y, cls.__name__)) | |||||
if getattr(cls, 'rule', None): | |||||
Logs.error('Erroneous attribute "rule" on task class %r (rename to "run_str")' % cls.__name__) | |||||
def replace(m): | |||||
""" | |||||
We could add properties, but they would not work in some cases: | |||||
bld.program(...) requires 'source' in the attributes | |||||
""" | |||||
oldcall = getattr(Build.BuildContext, m) | |||||
def call(self, *k, **kw): | |||||
ret = oldcall(self, *k, **kw) | |||||
for x in typos: | |||||
if x in kw: | |||||
if x == 'iscopy' and 'subst' in getattr(self, 'features', ''): | |||||
continue | |||||
err = True | |||||
Logs.error('Fix the typo %r -> %r on %r' % (x, typos[x], ret)) | |||||
return ret | |||||
setattr(Build.BuildContext, m, call) | |||||
def enhance_lib(): | |||||
""" | |||||
modify existing classes and methods | |||||
""" | |||||
for m in meths_typos: | |||||
replace(m) | |||||
# catch '..' in ant_glob patterns | |||||
def ant_glob(self, *k, **kw): | |||||
if k: | |||||
lst=Utils.to_list(k[0]) | |||||
for pat in lst: | |||||
if '..' in pat.split('/'): | |||||
Logs.error("In ant_glob pattern %r: '..' means 'two dots', not 'parent directory'" % k[0]) | |||||
if kw.get('remove', True): | |||||
try: | |||||
if self.is_child_of(self.ctx.bldnode) and not kw.get('quiet', False): | |||||
Logs.error('Using ant_glob on the build folder (%r) is dangerous (quiet=True to disable this warning)' % self) | |||||
except AttributeError: | |||||
pass | |||||
return self.old_ant_glob(*k, **kw) | |||||
Node.Node.old_ant_glob = Node.Node.ant_glob | |||||
Node.Node.ant_glob = ant_glob | |||||
# catch conflicting ext_in/ext_out/before/after declarations | |||||
old = Task.is_before | |||||
def is_before(t1, t2): | |||||
ret = old(t1, t2) | |||||
if ret and old(t2, t1): | |||||
Logs.error('Contradictory order constraints in classes %r %r' % (t1, t2)) | |||||
return ret | |||||
Task.is_before = is_before | |||||
# check for bld(feature='cshlib') where no 'c' is given - this can be either a mistake or on purpose | |||||
# so we only issue a warning | |||||
def check_err_features(self): | |||||
lst = self.to_list(self.features) | |||||
if 'shlib' in lst: | |||||
Logs.error('feature shlib -> cshlib, dshlib or cxxshlib') | |||||
for x in ('c', 'cxx', 'd', 'fc'): | |||||
if not x in lst and lst and lst[0] in [x+y for y in ('program', 'shlib', 'stlib')]: | |||||
Logs.error('%r features is probably missing %r' % (self, x)) | |||||
TaskGen.feature('*')(check_err_features) | |||||
# check for erroneous order constraints | |||||
def check_err_order(self): | |||||
if not hasattr(self, 'rule') and not 'subst' in Utils.to_list(self.features): | |||||
for x in ('before', 'after', 'ext_in', 'ext_out'): | |||||
if hasattr(self, x): | |||||
Logs.warn('Erroneous order constraint %r on non-rule based task generator %r' % (x, self)) | |||||
else: | |||||
for x in ('before', 'after'): | |||||
for y in self.to_list(getattr(self, x, [])): | |||||
if not Task.classes.get(y, None): | |||||
Logs.error('Erroneous order constraint %s=%r on %r (no such class)' % (x, y, self)) | |||||
TaskGen.feature('*')(check_err_order) | |||||
# check for @extension used with @feature/@before_method/@after_method | |||||
def check_compile(self): | |||||
check_invalid_constraints(self) | |||||
try: | |||||
ret = self.orig_compile() | |||||
finally: | |||||
check_same_targets(self) | |||||
return ret | |||||
Build.BuildContext.orig_compile = Build.BuildContext.compile | |||||
Build.BuildContext.compile = check_compile | |||||
# check for invalid build groups #914 | |||||
def use_rec(self, name, **kw): | |||||
try: | |||||
y = self.bld.get_tgen_by_name(name) | |||||
except Errors.WafError: | |||||
pass | |||||
else: | |||||
idx = self.bld.get_group_idx(self) | |||||
odx = self.bld.get_group_idx(y) | |||||
if odx > idx: | |||||
msg = "Invalid 'use' across build groups:" | |||||
if Logs.verbose > 1: | |||||
msg += '\n target %r\n uses:\n %r' % (self, y) | |||||
else: | |||||
msg += " %r uses %r (try 'waf -v -v' for the full error)" % (self.name, name) | |||||
raise Errors.WafError(msg) | |||||
self.orig_use_rec(name, **kw) | |||||
TaskGen.task_gen.orig_use_rec = TaskGen.task_gen.use_rec | |||||
TaskGen.task_gen.use_rec = use_rec | |||||
# check for env.append | |||||
def getattri(self, name, default=None): | |||||
if name == 'append' or name == 'add': | |||||
raise Errors.WafError('env.append and env.add do not exist: use env.append_value/env.append_unique') | |||||
elif name == 'prepend': | |||||
raise Errors.WafError('env.prepend does not exist: use env.prepend_value') | |||||
if name in self.__slots__: | |||||
return object.__getattr__(self, name, default) | |||||
else: | |||||
return self[name] | |||||
ConfigSet.ConfigSet.__getattr__ = getattri | |||||
def options(opt): | |||||
""" | |||||
Add a few methods | |||||
""" | |||||
enhance_lib() | |||||
def configure(conf): | |||||
pass | |||||
@@ -0,0 +1,198 @@ | |||||
#! /usr/bin/env python | |||||
# encoding: utf-8 | |||||
# DC 2008 | |||||
# Thomas Nagy 2010 (ita) | |||||
""" | |||||
fortran support | |||||
""" | |||||
from waflib import Utils, Task, Logs | |||||
from waflib.Tools import ccroot, fc_config, fc_scan | |||||
from waflib.TaskGen import feature, extension | |||||
from waflib.Configure import conf | |||||
ccroot.USELIB_VARS['fc'] = set(['FCFLAGS', 'DEFINES', 'INCLUDES']) | |||||
ccroot.USELIB_VARS['fcprogram_test'] = ccroot.USELIB_VARS['fcprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS']) | |||||
ccroot.USELIB_VARS['fcshlib'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS']) | |||||
ccroot.USELIB_VARS['fcstlib'] = set(['ARFLAGS', 'LINKDEPS']) | |||||
@feature('fcprogram', 'fcshlib', 'fcstlib', 'fcprogram_test') | |||||
def dummy(self): | |||||
""" | |||||
Unused function that does nothing (TODO: remove in waf 1.9) | |||||
""" | |||||
pass | |||||
@extension('.f', '.f90', '.F', '.F90', '.for', '.FOR') | |||||
def fc_hook(self, node): | |||||
"Bind the typical Fortran file extensions to the creation of a :py:class:`waflib.Tools.fc.fc` instance" | |||||
return self.create_compiled_task('fc', node) | |||||
@conf | |||||
def modfile(conf, name): | |||||
""" | |||||
Turn a module name into the right module file name. | |||||
Defaults to all lower case. | |||||
""" | |||||
return {'lower' :name.lower() + '.mod', | |||||
'lower.MOD' :name.upper() + '.MOD', | |||||
'UPPER.mod' :name.upper() + '.mod', | |||||
'UPPER' :name.upper() + '.MOD'}[conf.env.FC_MOD_CAPITALIZATION or 'lower'] | |||||
def get_fortran_tasks(tsk): | |||||
""" | |||||
Obtain all other fortran tasks from the same build group. Those tasks must not have | |||||
the attribute 'nomod' or 'mod_fortran_done' | |||||
""" | |||||
bld = tsk.generator.bld | |||||
tasks = bld.get_tasks_group(bld.get_group_idx(tsk.generator)) | |||||
return [x for x in tasks if isinstance(x, fc) and not getattr(x, 'nomod', None) and not getattr(x, 'mod_fortran_done', None)] | |||||
class fc(Task.Task): | |||||
""" | |||||
The fortran tasks can only run when all fortran tasks in the current group are ready to be executed | |||||
This may cause a deadlock if another fortran task is waiting for something that cannot happen (circular dependency) | |||||
in this case, set the 'nomod=True' on those tasks instances to break the loop | |||||
""" | |||||
color = 'GREEN' | |||||
run_str = '${FC} ${FCFLAGS} ${FCINCPATH_ST:INCPATHS} ${FCDEFINES_ST:DEFINES} ${_FCMODOUTFLAGS} ${FC_TGT_F}${TGT[0].abspath()} ${FC_SRC_F}${SRC[0].abspath()}' | |||||
vars = ["FORTRANMODPATHFLAG"] | |||||
def scan(self): | |||||
"""scanner for fortran dependencies""" | |||||
tmp = fc_scan.fortran_parser(self.generator.includes_nodes) | |||||
tmp.task = self | |||||
tmp.start(self.inputs[0]) | |||||
if Logs.verbose: | |||||
Logs.debug('deps: deps for %r: %r; unresolved %r' % (self.inputs, tmp.nodes, tmp.names)) | |||||
return (tmp.nodes, tmp.names) | |||||
def runnable_status(self): | |||||
""" | |||||
Set the mod file outputs and the dependencies on the mod files over all the fortran tasks | |||||
executed by the main thread so there are no concurrency issues | |||||
""" | |||||
if getattr(self, 'mod_fortran_done', None): | |||||
return super(fc, self).runnable_status() | |||||
# now, if we reach this part it is because this fortran task is the first in the list | |||||
bld = self.generator.bld | |||||
# obtain the fortran tasks | |||||
lst = get_fortran_tasks(self) | |||||
# disable this method for other tasks | |||||
for tsk in lst: | |||||
tsk.mod_fortran_done = True | |||||
# wait for all the .f tasks to be ready for execution | |||||
# and ensure that the scanners are called at least once | |||||
for tsk in lst: | |||||
ret = tsk.runnable_status() | |||||
if ret == Task.ASK_LATER: | |||||
# we have to wait for one of the other fortran tasks to be ready | |||||
# this may deadlock if there are dependencies between the fortran tasks | |||||
# but this should not happen (we are setting them here!) | |||||
for x in lst: | |||||
x.mod_fortran_done = None | |||||
# TODO sort the list of tasks in bld.producer.outstanding to put all fortran tasks at the end | |||||
return Task.ASK_LATER | |||||
ins = Utils.defaultdict(set) | |||||
outs = Utils.defaultdict(set) | |||||
# the .mod files to create | |||||
for tsk in lst: | |||||
key = tsk.uid() | |||||
for x in bld.raw_deps[key]: | |||||
if x.startswith('MOD@'): | |||||
name = bld.modfile(x.replace('MOD@', '')) | |||||
node = bld.srcnode.find_or_declare(name) | |||||
if not hasattr(node, 'sig'): | |||||
node.sig = Utils.SIG_NIL | |||||
tsk.set_outputs(node) | |||||
outs[id(node)].add(tsk) | |||||
# the .mod files to use | |||||
for tsk in lst: | |||||
key = tsk.uid() | |||||
for x in bld.raw_deps[key]: | |||||
if x.startswith('USE@'): | |||||
name = bld.modfile(x.replace('USE@', '')) | |||||
node = bld.srcnode.find_resource(name) | |||||
if node and node not in tsk.outputs: | |||||
if not node in bld.node_deps[key]: | |||||
bld.node_deps[key].append(node) | |||||
ins[id(node)].add(tsk) | |||||
# if the intersection matches, set the order | |||||
for k in ins.keys(): | |||||
for a in ins[k]: | |||||
a.run_after.update(outs[k]) | |||||
# the scanner cannot output nodes, so we have to set them | |||||
# ourselves as task.dep_nodes (additional input nodes) | |||||
tmp = [] | |||||
for t in outs[k]: | |||||
tmp.extend(t.outputs) | |||||
a.dep_nodes.extend(tmp) | |||||
a.dep_nodes.sort(key=lambda x: x.abspath()) | |||||
# the task objects have changed: clear the signature cache | |||||
for tsk in lst: | |||||
try: | |||||
delattr(tsk, 'cache_sig') | |||||
except AttributeError: | |||||
pass | |||||
return super(fc, self).runnable_status() | |||||
class fcprogram(ccroot.link_task): | |||||
"""Link fortran programs""" | |||||
color = 'YELLOW' | |||||
run_str = '${FC} ${LINKFLAGS} ${FCLNK_SRC_F}${SRC} ${FCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FCSTLIB_MARKER} ${FCSTLIBPATH_ST:STLIBPATH} ${FCSTLIB_ST:STLIB} ${FCSHLIB_MARKER} ${FCLIBPATH_ST:LIBPATH} ${FCLIB_ST:LIB} ${LDFLAGS}' | |||||
inst_to = '${BINDIR}' | |||||
class fcshlib(fcprogram): | |||||
"""Link fortran libraries""" | |||||
inst_to = '${LIBDIR}' | |||||
class fcprogram_test(fcprogram): | |||||
"""Custom link task to obtain the compiler outputs for fortran configuration tests""" | |||||
def runnable_status(self): | |||||
"""This task is always executed""" | |||||
ret = super(fcprogram_test, self).runnable_status() | |||||
if ret == Task.SKIP_ME: | |||||
ret = Task.RUN_ME | |||||
return ret | |||||
def exec_command(self, cmd, **kw): | |||||
"""Store the compiler std our/err onto the build context, to bld.out + bld.err""" | |||||
bld = self.generator.bld | |||||
kw['shell'] = isinstance(cmd, str) | |||||
kw['stdout'] = kw['stderr'] = Utils.subprocess.PIPE | |||||
kw['cwd'] = bld.variant_dir | |||||
bld.out = bld.err = '' | |||||
bld.to_log('command: %s\n' % cmd) | |||||
kw['output'] = 0 | |||||
try: | |||||
(bld.out, bld.err) = bld.cmd_and_log(cmd, **kw) | |||||
except Exception: | |||||
return -1 | |||||
if bld.out: | |||||
bld.to_log("out: %s\n" % bld.out) | |||||
if bld.err: | |||||
bld.to_log("err: %s\n" % bld.err) | |||||
class fcstlib(ccroot.stlink_task): | |||||
"""Link fortran static libraries (uses ar by default)""" | |||||
pass # do not remove the pass statement | |||||
@@ -0,0 +1,470 @@ | |||||
#! /usr/bin/env python | |||||
# encoding: utf-8 | |||||
# DC 2008 | |||||
# Thomas Nagy 2010 (ita) | |||||
""" | |||||
Fortran configuration helpers | |||||
""" | |||||
import re, os, sys, shlex | |||||
from waflib.Configure import conf | |||||
from waflib.TaskGen import feature, before_method | |||||
from waflib import Utils | |||||
FC_FRAGMENT = ' program main\n end program main\n' | |||||
FC_FRAGMENT2 = ' PROGRAM MAIN\n END\n' # what's the actual difference between these? | |||||
@conf | |||||
def fc_flags(conf): | |||||
""" | |||||
Define common fortran configuration flags and file extensions | |||||
""" | |||||
v = conf.env | |||||
v['FC_SRC_F'] = [] | |||||
v['FC_TGT_F'] = ['-c', '-o'] | |||||
v['FCINCPATH_ST'] = '-I%s' | |||||
v['FCDEFINES_ST'] = '-D%s' | |||||
if not v['LINK_FC']: v['LINK_FC'] = v['FC'] | |||||
v['FCLNK_SRC_F'] = [] | |||||
v['FCLNK_TGT_F'] = ['-o'] | |||||
v['FCFLAGS_fcshlib'] = ['-fpic'] | |||||
v['LINKFLAGS_fcshlib'] = ['-shared'] | |||||
v['fcshlib_PATTERN'] = 'lib%s.so' | |||||
v['fcstlib_PATTERN'] = 'lib%s.a' | |||||
v['FCLIB_ST'] = '-l%s' | |||||
v['FCLIBPATH_ST'] = '-L%s' | |||||
v['FCSTLIB_ST'] = '-l%s' | |||||
v['FCSTLIBPATH_ST'] = '-L%s' | |||||
v['FCSTLIB_MARKER'] = '-Wl,-Bstatic' | |||||
v['FCSHLIB_MARKER'] = '-Wl,-Bdynamic' | |||||
v['SONAME_ST'] = '-Wl,-h,%s' | |||||
@conf | |||||
def fc_add_flags(conf): | |||||
""" | |||||
Add FCFLAGS / LDFLAGS / LINKFLAGS from os.environ to conf.env | |||||
""" | |||||
conf.add_os_flags('FCFLAGS') | |||||
conf.add_os_flags('LINKFLAGS') | |||||
conf.add_os_flags('LDFLAGS') | |||||
@conf | |||||
def check_fortran(self, *k, **kw): | |||||
"""See if the fortran compiler works by compiling a simple fortran program""" | |||||
self.check_cc( | |||||
fragment = FC_FRAGMENT, | |||||
compile_filename = 'test.f', | |||||
features = 'fc fcprogram', | |||||
msg = 'Compiling a simple fortran app') | |||||
@conf | |||||
def check_fc(self, *k, **kw): | |||||
""" | |||||
Same as :py:func:`waflib.Tools.c_config.check` but default to the *Fortran* programming language | |||||
(Overriding the C defaults in :py:func:`waflib.Tools.c_config.validate_c` here) | |||||
""" | |||||
kw['compiler'] = 'fc' | |||||
if not 'compile_mode' in kw: | |||||
kw['compile_mode'] = 'fc' | |||||
if not 'type' in kw: | |||||
kw['type'] = 'fcprogram' | |||||
if not 'compile_filename' in kw: | |||||
kw['compile_filename'] = 'test.f90' | |||||
if not 'code' in kw: | |||||
kw['code'] = FC_FRAGMENT | |||||
return self.check(*k, **kw) | |||||
# ------------------------------------------------------------------------ | |||||
# --- These are the default platform modifiers, refactored here for | |||||
# convenience. gfortran and g95 have much overlap. | |||||
# ------------------------------------------------------------------------ | |||||
@conf | |||||
def fortran_modifier_darwin(conf): | |||||
""" | |||||
Define fortran flags and extensions for the OSX systems | |||||
""" | |||||
v = conf.env | |||||
v['FCFLAGS_fcshlib'] = ['-fPIC'] | |||||
v['LINKFLAGS_fcshlib'] = ['-dynamiclib', '-Wl,-compatibility_version,1', '-Wl,-current_version,1'] | |||||
v['fcshlib_PATTERN'] = 'lib%s.dylib' | |||||
v['FRAMEWORKPATH_ST'] = '-F%s' | |||||
v['FRAMEWORK_ST'] = '-framework %s' | |||||
v['LINKFLAGS_fcstlib'] = [] | |||||
v['FCSHLIB_MARKER'] = '' | |||||
v['FCSTLIB_MARKER'] = '' | |||||
v['SONAME_ST'] = '' | |||||
@conf | |||||
def fortran_modifier_win32(conf): | |||||
"""Define fortran flags for the windows platforms""" | |||||
v = conf.env | |||||
v['fcprogram_PATTERN'] = v['fcprogram_test_PATTERN'] = '%s.exe' | |||||
v['fcshlib_PATTERN'] = '%s.dll' | |||||
v['implib_PATTERN'] = 'lib%s.dll.a' | |||||
v['IMPLIB_ST'] = '-Wl,--out-implib,%s' | |||||
v['FCFLAGS_fcshlib'] = [] | |||||
v.append_value('FCFLAGS_fcshlib', ['-DDLL_EXPORT']) # TODO adding nonstandard defines like this DLL_EXPORT is not a good idea | |||||
# Auto-import is enabled by default even without this option, | |||||
# but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages | |||||
# that the linker emits otherwise. | |||||
v.append_value('LINKFLAGS', ['-Wl,--enable-auto-import']) | |||||
@conf | |||||
def fortran_modifier_cygwin(conf): | |||||
"""Define fortran flags for use on cygwin""" | |||||
fortran_modifier_win32(conf) | |||||
v = conf.env | |||||
v['fcshlib_PATTERN'] = 'cyg%s.dll' | |||||
v.append_value('LINKFLAGS_fcshlib', ['-Wl,--enable-auto-image-base']) | |||||
v['FCFLAGS_fcshlib'] = [] | |||||
# ------------------------------------------------------------------------ | |||||
@conf | |||||
def check_fortran_dummy_main(self, *k, **kw): | |||||
""" | |||||
Guess if a main function is needed by compiling a code snippet with | |||||
the C compiler and link with the Fortran compiler | |||||
TODO: (DC) | |||||
- handling dialects (F77, F90, etc... -> needs core support first) | |||||
- fix dummy main check (AC_FC_DUMMY_MAIN vs AC_FC_MAIN) | |||||
TODO: what does the above mean? (ita) | |||||
""" | |||||
if not self.env.CC: | |||||
self.fatal('A c compiler is required for check_fortran_dummy_main') | |||||
lst = ['MAIN__', '__MAIN', '_MAIN', 'MAIN_', 'MAIN'] | |||||
lst.extend([m.lower() for m in lst]) | |||||
lst.append('') | |||||
self.start_msg('Detecting whether we need a dummy main') | |||||
for main in lst: | |||||
kw['fortran_main'] = main | |||||
try: | |||||
self.check_cc( | |||||
fragment = 'int %s() { return 0; }\n' % (main or 'test'), | |||||
features = 'c fcprogram', | |||||
mandatory = True | |||||
) | |||||
if not main: | |||||
self.env.FC_MAIN = -1 | |||||
self.end_msg('no') | |||||
else: | |||||
self.env.FC_MAIN = main | |||||
self.end_msg('yes %s' % main) | |||||
break | |||||
except self.errors.ConfigurationError: | |||||
pass | |||||
else: | |||||
self.end_msg('not found') | |||||
self.fatal('could not detect whether fortran requires a dummy main, see the config.log') | |||||
# ------------------------------------------------------------------------ | |||||
GCC_DRIVER_LINE = re.compile('^Driving:') | |||||
POSIX_STATIC_EXT = re.compile('\S+\.a') | |||||
POSIX_LIB_FLAGS = re.compile('-l\S+') | |||||
@conf | |||||
def is_link_verbose(self, txt): | |||||
"""Return True if 'useful' link options can be found in txt""" | |||||
assert isinstance(txt, str) | |||||
for line in txt.splitlines(): | |||||
if not GCC_DRIVER_LINE.search(line): | |||||
if POSIX_STATIC_EXT.search(line) or POSIX_LIB_FLAGS.search(line): | |||||
return True | |||||
return False | |||||
@conf | |||||
def check_fortran_verbose_flag(self, *k, **kw): | |||||
""" | |||||
Check what kind of verbose (-v) flag works, then set it to env.FC_VERBOSE_FLAG | |||||
""" | |||||
self.start_msg('fortran link verbose flag') | |||||
for x in ('-v', '--verbose', '-verbose', '-V'): | |||||
try: | |||||
self.check_cc( | |||||
features = 'fc fcprogram_test', | |||||
fragment = FC_FRAGMENT2, | |||||
compile_filename = 'test.f', | |||||
linkflags = [x], | |||||
mandatory=True | |||||
) | |||||
except self.errors.ConfigurationError: | |||||
pass | |||||
else: | |||||
# output is on stderr or stdout (for xlf) | |||||
if self.is_link_verbose(self.test_bld.err) or self.is_link_verbose(self.test_bld.out): | |||||
self.end_msg(x) | |||||
break | |||||
else: | |||||
self.end_msg('failure') | |||||
self.fatal('Could not obtain the fortran link verbose flag (see config.log)') | |||||
self.env.FC_VERBOSE_FLAG = x | |||||
return x | |||||
# ------------------------------------------------------------------------ | |||||
# linkflags which match those are ignored | |||||
LINKFLAGS_IGNORED = [r'-lang*', r'-lcrt[a-zA-Z0-9\.]*\.o', r'-lc$', r'-lSystem', r'-libmil', r'-LIST:*', r'-LNO:*'] | |||||
if os.name == 'nt': | |||||
LINKFLAGS_IGNORED.extend([r'-lfrt*', r'-luser32', r'-lkernel32', r'-ladvapi32', r'-lmsvcrt', r'-lshell32', r'-lmingw', r'-lmoldname']) | |||||
else: | |||||
LINKFLAGS_IGNORED.append(r'-lgcc*') | |||||
RLINKFLAGS_IGNORED = [re.compile(f) for f in LINKFLAGS_IGNORED] | |||||
def _match_ignore(line): | |||||
"""Returns True if the line should be ignored (fortran test for verbosity).""" | |||||
for i in RLINKFLAGS_IGNORED: | |||||
if i.match(line): | |||||
return True | |||||
return False | |||||
def parse_fortran_link(lines): | |||||
"""Given the output of verbose link of Fortran compiler, this returns a | |||||
list of flags necessary for linking using the standard linker.""" | |||||
# TODO: On windows ? | |||||
final_flags = [] | |||||
for line in lines: | |||||
if not GCC_DRIVER_LINE.match(line): | |||||
_parse_flink_line(line, final_flags) | |||||
return final_flags | |||||
SPACE_OPTS = re.compile('^-[LRuYz]$') | |||||
NOSPACE_OPTS = re.compile('^-[RL]') | |||||
def _parse_flink_token(lexer, token, tmp_flags): | |||||
# Here we go (convention for wildcard is shell, not regex !) | |||||
# 1 TODO: we first get some root .a libraries | |||||
# 2 TODO: take everything starting by -bI:* | |||||
# 3 Ignore the following flags: -lang* | -lcrt*.o | -lc | | |||||
# -lgcc* | -lSystem | -libmil | -LANG:=* | -LIST:* | -LNO:*) | |||||
# 4 take into account -lkernel32 | |||||
# 5 For options of the kind -[[LRuYz]], as they take one argument | |||||
# after, the actual option is the next token | |||||
# 6 For -YP,*: take and replace by -Larg where arg is the old | |||||
# argument | |||||
# 7 For -[lLR]*: take | |||||
# step 3 | |||||
if _match_ignore(token): | |||||
pass | |||||
# step 4 | |||||
elif token.startswith('-lkernel32') and sys.platform == 'cygwin': | |||||
tmp_flags.append(token) | |||||
# step 5 | |||||
elif SPACE_OPTS.match(token): | |||||
t = lexer.get_token() | |||||
if t.startswith('P,'): | |||||
t = t[2:] | |||||
for opt in t.split(os.pathsep): | |||||
tmp_flags.append('-L%s' % opt) | |||||
# step 6 | |||||
elif NOSPACE_OPTS.match(token): | |||||
tmp_flags.append(token) | |||||
# step 7 | |||||
elif POSIX_LIB_FLAGS.match(token): | |||||
tmp_flags.append(token) | |||||
else: | |||||
# ignore anything not explicitely taken into account | |||||
pass | |||||
t = lexer.get_token() | |||||
return t | |||||
def _parse_flink_line(line, final_flags): | |||||
"""private""" | |||||
lexer = shlex.shlex(line, posix = True) | |||||
lexer.whitespace_split = True | |||||
t = lexer.get_token() | |||||
tmp_flags = [] | |||||
while t: | |||||
t = _parse_flink_token(lexer, t, tmp_flags) | |||||
final_flags.extend(tmp_flags) | |||||
return final_flags | |||||
@conf | |||||
def check_fortran_clib(self, autoadd=True, *k, **kw): | |||||
""" | |||||
Obtain the flags for linking with the C library | |||||
if this check works, add uselib='CLIB' to your task generators | |||||
""" | |||||
if not self.env.FC_VERBOSE_FLAG: | |||||
self.fatal('env.FC_VERBOSE_FLAG is not set: execute check_fortran_verbose_flag?') | |||||
self.start_msg('Getting fortran runtime link flags') | |||||
try: | |||||
self.check_cc( | |||||
fragment = FC_FRAGMENT2, | |||||
compile_filename = 'test.f', | |||||
features = 'fc fcprogram_test', | |||||
linkflags = [self.env.FC_VERBOSE_FLAG] | |||||
) | |||||
except Exception: | |||||
self.end_msg(False) | |||||
if kw.get('mandatory', True): | |||||
conf.fatal('Could not find the c library flags') | |||||
else: | |||||
out = self.test_bld.err | |||||
flags = parse_fortran_link(out.splitlines()) | |||||
self.end_msg('ok (%s)' % ' '.join(flags)) | |||||
self.env.LINKFLAGS_CLIB = flags | |||||
return flags | |||||
return [] | |||||
def getoutput(conf, cmd, stdin=False): | |||||
""" | |||||
TODO a bit redundant, can be removed anytime | |||||
""" | |||||
if stdin: | |||||
stdin = Utils.subprocess.PIPE | |||||
else: | |||||
stdin = None | |||||
env = conf.env.env or None | |||||
try: | |||||
p = Utils.subprocess.Popen(cmd, stdin=stdin, stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE, env=env) | |||||
if stdin: | |||||
p.stdin.write('\n'.encode()) | |||||
out, err = p.communicate() | |||||
except Exception: | |||||
conf.fatal('could not determine the compiler version %r' % cmd) | |||||
if not isinstance(out, str): | |||||
out = out.decode(sys.stdout.encoding or 'iso8859-1') | |||||
if not isinstance(err, str): | |||||
err = err.decode(sys.stdout.encoding or 'iso8859-1') | |||||
return (out, err) | |||||
# ------------------------------------------------------------------------ | |||||
ROUTINES_CODE = """\ | |||||
subroutine foobar() | |||||
return | |||||
end | |||||
subroutine foo_bar() | |||||
return | |||||
end | |||||
""" | |||||
MAIN_CODE = """ | |||||
void %(dummy_func_nounder)s(void); | |||||
void %(dummy_func_under)s(void); | |||||
int %(main_func_name)s() { | |||||
%(dummy_func_nounder)s(); | |||||
%(dummy_func_under)s(); | |||||
return 0; | |||||
} | |||||
""" | |||||
@feature('link_main_routines_func') | |||||
@before_method('process_source') | |||||
def link_main_routines_tg_method(self): | |||||
""" | |||||
The configuration test declares a unique task generator, | |||||
so we create other task generators from there for fortran link tests | |||||
""" | |||||
def write_test_file(task): | |||||
task.outputs[0].write(task.generator.code) | |||||
bld = self.bld | |||||
bld(rule=write_test_file, target='main.c', code=MAIN_CODE % self.__dict__) | |||||
bld(rule=write_test_file, target='test.f', code=ROUTINES_CODE) | |||||
bld(features='fc fcstlib', source='test.f', target='test') | |||||
bld(features='c fcprogram', source='main.c', target='app', use='test') | |||||
def mangling_schemes(): | |||||
""" | |||||
Generate triplets for use with mangle_name | |||||
(used in check_fortran_mangling) | |||||
the order is tuned for gfortan | |||||
""" | |||||
for u in ('_', ''): | |||||
for du in ('', '_'): | |||||
for c in ("lower", "upper"): | |||||
yield (u, du, c) | |||||
def mangle_name(u, du, c, name): | |||||
"""Mangle a name from a triplet (used in check_fortran_mangling)""" | |||||
return getattr(name, c)() + u + (name.find('_') != -1 and du or '') | |||||
@conf | |||||
def check_fortran_mangling(self, *k, **kw): | |||||
""" | |||||
Detect the mangling scheme, sets FORTRAN_MANGLING to the triplet found | |||||
This test will compile a fortran static library, then link a c app against it | |||||
""" | |||||
if not self.env.CC: | |||||
self.fatal('A c compiler is required for link_main_routines') | |||||
if not self.env.FC: | |||||
self.fatal('A fortran compiler is required for link_main_routines') | |||||
if not self.env.FC_MAIN: | |||||
self.fatal('Checking for mangling requires self.env.FC_MAIN (execute "check_fortran_dummy_main" first?)') | |||||
self.start_msg('Getting fortran mangling scheme') | |||||
for (u, du, c) in mangling_schemes(): | |||||
try: | |||||
self.check_cc( | |||||
compile_filename = [], | |||||
features = 'link_main_routines_func', | |||||
msg = 'nomsg', | |||||
errmsg = 'nomsg', | |||||
mandatory=True, | |||||
dummy_func_nounder = mangle_name(u, du, c, "foobar"), | |||||
dummy_func_under = mangle_name(u, du, c, "foo_bar"), | |||||
main_func_name = self.env.FC_MAIN | |||||
) | |||||
except self.errors.ConfigurationError: | |||||
pass | |||||
else: | |||||
self.end_msg("ok ('%s', '%s', '%s-case')" % (u, du, c)) | |||||
self.env.FORTRAN_MANGLING = (u, du, c) | |||||
break | |||||
else: | |||||
self.end_msg(False) | |||||
self.fatal('mangler not found') | |||||
return (u, du, c) | |||||
@feature('pyext') | |||||
@before_method('propagate_uselib_vars', 'apply_link') | |||||
def set_lib_pat(self): | |||||
"""Set the fortran flags for linking with the python library""" | |||||
self.env['fcshlib_PATTERN'] = self.env['pyext_PATTERN'] | |||||
@conf | |||||
def detect_openmp(self): | |||||
for x in ('-fopenmp','-openmp','-mp','-xopenmp','-omp','-qsmp=omp'): | |||||
try: | |||||
self.check_fc( | |||||
msg='Checking for OpenMP flag %s' % x, | |||||
fragment='program main\n call omp_get_num_threads()\nend program main', | |||||
fcflags=x, | |||||
linkflags=x, | |||||
uselib_store='OPENMP' | |||||
) | |||||
except self.errors.ConfigurationError: | |||||
pass | |||||
else: | |||||
break | |||||
else: | |||||
self.fatal('Could not find OpenMP') | |||||
@@ -0,0 +1,121 @@ | |||||
#! /usr/bin/env python | |||||
# encoding: utf-8 | |||||
# DC 2008 | |||||
# Thomas Nagy 2010 (ita) | |||||
import re | |||||
from waflib import Utils, Task, TaskGen, Logs | |||||
from waflib.TaskGen import feature, before_method, after_method, extension | |||||
from waflib.Configure import conf | |||||
INC_REGEX = """(?:^|['">]\s*;)\s*(?:|#\s*)INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])""" | |||||
USE_REGEX = """(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)""" | |||||
MOD_REGEX = """(?:^|;)\s*MODULE(?!\s*PROCEDURE)(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)""" | |||||
re_inc = re.compile(INC_REGEX, re.I) | |||||
re_use = re.compile(USE_REGEX, re.I) | |||||
re_mod = re.compile(MOD_REGEX, re.I) | |||||
class fortran_parser(object): | |||||
""" | |||||
This parser will return: | |||||
* the nodes corresponding to the module names that will be produced | |||||
* the nodes corresponding to the include files used | |||||
* the module names used by the fortran file | |||||
""" | |||||
def __init__(self, incpaths): | |||||
self.seen = [] | |||||
"""Files already parsed""" | |||||
self.nodes = [] | |||||
"""List of :py:class:`waflib.Node.Node` representing the dependencies to return""" | |||||
self.names = [] | |||||
"""List of module names to return""" | |||||
self.incpaths = incpaths | |||||
"""List of :py:class:`waflib.Node.Node` representing the include paths""" | |||||
def find_deps(self, node): | |||||
""" | |||||
Parse a fortran file to read the dependencies used and provided | |||||
:param node: fortran file to read | |||||
:type node: :py:class:`waflib.Node.Node` | |||||
:return: lists representing the includes, the modules used, and the modules created by a fortran file | |||||
:rtype: tuple of list of strings | |||||
""" | |||||
txt = node.read() | |||||
incs = [] | |||||
uses = [] | |||||
mods = [] | |||||
for line in txt.splitlines(): | |||||
# line by line regexp search? optimize? | |||||
m = re_inc.search(line) | |||||
if m: | |||||
incs.append(m.group(1)) | |||||
m = re_use.search(line) | |||||
if m: | |||||
uses.append(m.group(1)) | |||||
m = re_mod.search(line) | |||||
if m: | |||||
mods.append(m.group(1)) | |||||
return (incs, uses, mods) | |||||
def start(self, node): | |||||
""" | |||||
Start the parsing. Use the stack self.waiting to hold the nodes to iterate on | |||||
:param node: fortran file | |||||
:type node: :py:class:`waflib.Node.Node` | |||||
""" | |||||
self.waiting = [node] | |||||
while self.waiting: | |||||
nd = self.waiting.pop(0) | |||||
self.iter(nd) | |||||
def iter(self, node): | |||||
""" | |||||
Process a single file in the search for dependencies, extract the files used | |||||
the modules used, and the modules provided. | |||||
""" | |||||
path = node.abspath() | |||||
incs, uses, mods = self.find_deps(node) | |||||
for x in incs: | |||||
if x in self.seen: | |||||
continue | |||||
self.seen.append(x) | |||||
self.tryfind_header(x) | |||||
for x in uses: | |||||
name = "USE@%s" % x | |||||
if not name in self.names: | |||||
self.names.append(name) | |||||
for x in mods: | |||||
name = "MOD@%s" % x | |||||
if not name in self.names: | |||||
self.names.append(name) | |||||
def tryfind_header(self, filename): | |||||
""" | |||||
Try to find an include and add it the nodes to process | |||||
:param filename: file name | |||||
:type filename: string | |||||
""" | |||||
found = None | |||||
for n in self.incpaths: | |||||
found = n.find_resource(filename) | |||||
if found: | |||||
self.nodes.append(found) | |||||
self.waiting.append(found) | |||||
break | |||||
if not found: | |||||
if not filename in self.names: | |||||
self.names.append(filename) | |||||
@@ -0,0 +1,53 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# John O'Meara, 2006 | |||||
# Thomas Nagy, 2006-2010 (ita) | |||||
""" | |||||
The **flex** program is a code generator which creates C or C++ files. | |||||
The generated files are compiled into object files. | |||||
""" | |||||
import waflib.TaskGen, os, re | |||||
def decide_ext(self, node): | |||||
if 'cxx' in self.features: | |||||
return ['.lex.cc'] | |||||
return ['.lex.c'] | |||||
def flexfun(tsk): | |||||
env = tsk.env | |||||
bld = tsk.generator.bld | |||||
wd = bld.variant_dir | |||||
def to_list(xx): | |||||
if isinstance(xx, str): return [xx] | |||||
return xx | |||||
tsk.last_cmd = lst = [] | |||||
lst.extend(to_list(env['FLEX'])) | |||||
lst.extend(to_list(env['FLEXFLAGS'])) | |||||
inputs = [a.path_from(bld.bldnode) for a in tsk.inputs] | |||||
if env.FLEX_MSYS: | |||||
inputs = [x.replace(os.sep, '/') for x in inputs] | |||||
lst.extend(inputs) | |||||
lst = [x for x in lst if x] | |||||
txt = bld.cmd_and_log(lst, cwd=wd, env=env.env or None, quiet=0) | |||||
tsk.outputs[0].write(txt.replace('\r\n', '\n').replace('\r', '\n')) # issue #1207 | |||||
waflib.TaskGen.declare_chain( | |||||
name = 'flex', | |||||
rule = flexfun, # issue #854 | |||||
ext_in = '.l', | |||||
decider = decide_ext, | |||||
) | |||||
def configure(conf): | |||||
""" | |||||
Detect the *flex* program | |||||
""" | |||||
conf.find_program('flex', var='FLEX') | |||||
conf.env.FLEXFLAGS = ['-t'] | |||||
if re.search (r"\\msys\\[0-9.]+\\bin\\flex.exe$", conf.env.FLEX[0]): | |||||
# this is the flex shipped with MSYS | |||||
conf.env.FLEX_MSYS = True | |||||
@@ -0,0 +1,66 @@ | |||||
#! /usr/bin/env python | |||||
# encoding: utf-8 | |||||
# KWS 2010 | |||||
# Thomas Nagy 2010 (ita) | |||||
import re | |||||
from waflib import Utils | |||||
from waflib.Tools import fc, fc_config, fc_scan, ar | |||||
from waflib.Configure import conf | |||||
@conf | |||||
def find_g95(conf): | |||||
fc = conf.find_program('g95', var='FC') | |||||
conf.get_g95_version(fc) | |||||
conf.env.FC_NAME = 'G95' | |||||
@conf | |||||
def g95_flags(conf): | |||||
v = conf.env | |||||
v['FCFLAGS_fcshlib'] = ['-fPIC'] | |||||
v['FORTRANMODFLAG'] = ['-fmod=', ''] # template for module path | |||||
v['FCFLAGS_DEBUG'] = ['-Werror'] # why not | |||||
@conf | |||||
def g95_modifier_win32(conf): | |||||
fc_config.fortran_modifier_win32(conf) | |||||
@conf | |||||
def g95_modifier_cygwin(conf): | |||||
fc_config.fortran_modifier_cygwin(conf) | |||||
@conf | |||||
def g95_modifier_darwin(conf): | |||||
fc_config.fortran_modifier_darwin(conf) | |||||
@conf | |||||
def g95_modifier_platform(conf): | |||||
dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform() | |||||
g95_modifier_func = getattr(conf, 'g95_modifier_' + dest_os, None) | |||||
if g95_modifier_func: | |||||
g95_modifier_func() | |||||
@conf | |||||
def get_g95_version(conf, fc): | |||||
"""get the compiler version""" | |||||
version_re = re.compile(r"g95\s*(?P<major>\d*)\.(?P<minor>\d*)").search | |||||
cmd = fc + ['--version'] | |||||
out, err = fc_config.getoutput(conf, cmd, stdin=False) | |||||
if out: | |||||
match = version_re(out) | |||||
else: | |||||
match = version_re(err) | |||||
if not match: | |||||
conf.fatal('cannot determine g95 version') | |||||
k = match.groupdict() | |||||
conf.env['FC_VERSION'] = (k['major'], k['minor']) | |||||
def configure(conf): | |||||
conf.find_g95() | |||||
conf.find_ar() | |||||
conf.fc_flags() | |||||
conf.fc_add_flags() | |||||
conf.g95_flags() | |||||
conf.g95_modifier_platform() | |||||
@@ -0,0 +1,18 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2008-2010 (ita) | |||||
"Detect as/gas/gcc for compiling assembly files" | |||||
import waflib.Tools.asm # - leave this | |||||
from waflib.Tools import ar | |||||
def configure(conf): | |||||
""" | |||||
Find the programs gas/as/gcc and set the variable *AS* | |||||
""" | |||||
conf.find_program(['gas', 'gcc'], var='AS') | |||||
conf.env.AS_TGT_F = ['-c', '-o'] | |||||
conf.env.ASLNK_TGT_F = ['-o'] | |||||
conf.find_ar() | |||||
conf.load('asm') |
@@ -0,0 +1,161 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2006-2010 (ita) | |||||
# Ralf Habacker, 2006 (rh) | |||||
# Yinon Ehrlich, 2009 | |||||
""" | |||||
gcc/llvm detection. | |||||
""" | |||||
import os, sys | |||||
from waflib import Configure, Options, Utils | |||||
from waflib.Tools import ccroot, ar | |||||
from waflib.Configure import conf | |||||
@conf | |||||
def find_gcc(conf): | |||||
""" | |||||
Find the program gcc, and if present, try to detect its version number | |||||
""" | |||||
cc = conf.find_program(['gcc', 'cc'], var='CC') | |||||
conf.get_cc_version(cc, gcc=True) | |||||
conf.env.CC_NAME = 'gcc' | |||||
@conf | |||||
def gcc_common_flags(conf): | |||||
""" | |||||
Common flags for gcc on nearly all platforms | |||||
""" | |||||
v = conf.env | |||||
v['CC_SRC_F'] = [] | |||||
v['CC_TGT_F'] = ['-c', '-o'] | |||||
# linker | |||||
if not v['LINK_CC']: v['LINK_CC'] = v['CC'] | |||||
v['CCLNK_SRC_F'] = [] | |||||
v['CCLNK_TGT_F'] = ['-o'] | |||||
v['CPPPATH_ST'] = '-I%s' | |||||
v['DEFINES_ST'] = '-D%s' | |||||
v['LIB_ST'] = '-l%s' # template for adding libs | |||||
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths | |||||
v['STLIB_ST'] = '-l%s' | |||||
v['STLIBPATH_ST'] = '-L%s' | |||||
v['RPATH_ST'] = '-Wl,-rpath,%s' | |||||
v['SONAME_ST'] = '-Wl,-h,%s' | |||||
v['SHLIB_MARKER'] = '-Wl,-Bdynamic' | |||||
v['STLIB_MARKER'] = '-Wl,-Bstatic' | |||||
# program | |||||
v['cprogram_PATTERN'] = '%s' | |||||
# shared librar | |||||
v['CFLAGS_cshlib'] = ['-fPIC'] | |||||
v['LINKFLAGS_cshlib'] = ['-shared'] | |||||
v['cshlib_PATTERN'] = 'lib%s.so' | |||||
# static lib | |||||
v['LINKFLAGS_cstlib'] = ['-Wl,-Bstatic'] | |||||
v['cstlib_PATTERN'] = 'lib%s.a' | |||||
# osx stuff | |||||
v['LINKFLAGS_MACBUNDLE'] = ['-bundle', '-undefined', 'dynamic_lookup'] | |||||
v['CFLAGS_MACBUNDLE'] = ['-fPIC'] | |||||
v['macbundle_PATTERN'] = '%s.bundle' | |||||
@conf | |||||
def gcc_modifier_win32(conf): | |||||
"""Configuration flags for executing gcc on Windows""" | |||||
v = conf.env | |||||
v['cprogram_PATTERN'] = '%s.exe' | |||||
v['cshlib_PATTERN'] = '%s.dll' | |||||
v['implib_PATTERN'] = 'lib%s.dll.a' | |||||
v['IMPLIB_ST'] = '-Wl,--out-implib,%s' | |||||
v['CFLAGS_cshlib'] = [] | |||||
# Auto-import is enabled by default even without this option, | |||||
# but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages | |||||
# that the linker emits otherwise. | |||||
v.append_value('LINKFLAGS', ['-Wl,--enable-auto-import']) | |||||
@conf | |||||
def gcc_modifier_cygwin(conf): | |||||
"""Configuration flags for executing gcc on Cygwin""" | |||||
gcc_modifier_win32(conf) | |||||
v = conf.env | |||||
v['cshlib_PATTERN'] = 'cyg%s.dll' | |||||
v.append_value('LINKFLAGS_cshlib', ['-Wl,--enable-auto-image-base']) | |||||
v['CFLAGS_cshlib'] = [] | |||||
@conf | |||||
def gcc_modifier_darwin(conf): | |||||
"""Configuration flags for executing gcc on MacOS""" | |||||
v = conf.env | |||||
v['CFLAGS_cshlib'] = ['-fPIC'] | |||||
v['LINKFLAGS_cshlib'] = ['-dynamiclib', '-Wl,-compatibility_version,1', '-Wl,-current_version,1'] | |||||
v['cshlib_PATTERN'] = 'lib%s.dylib' | |||||
v['FRAMEWORKPATH_ST'] = '-F%s' | |||||
v['FRAMEWORK_ST'] = ['-framework'] | |||||
v['ARCH_ST'] = ['-arch'] | |||||
v['LINKFLAGS_cstlib'] = [] | |||||
v['SHLIB_MARKER'] = [] | |||||
v['STLIB_MARKER'] = [] | |||||
v['SONAME_ST'] = [] | |||||
@conf | |||||
def gcc_modifier_aix(conf): | |||||
"""Configuration flags for executing gcc on AIX""" | |||||
v = conf.env | |||||
v['LINKFLAGS_cprogram'] = ['-Wl,-brtl'] | |||||
v['LINKFLAGS_cshlib'] = ['-shared','-Wl,-brtl,-bexpfull'] | |||||
v['SHLIB_MARKER'] = [] | |||||
@conf | |||||
def gcc_modifier_hpux(conf): | |||||
v = conf.env | |||||
v['SHLIB_MARKER'] = [] | |||||
v['STLIB_MARKER'] = [] | |||||
v['CFLAGS_cshlib'] = ['-fPIC','-DPIC'] | |||||
v['cshlib_PATTERN'] = 'lib%s.sl' | |||||
@conf | |||||
def gcc_modifier_openbsd(conf): | |||||
conf.env.SONAME_ST = [] | |||||
@conf | |||||
def gcc_modifier_osf1V(conf): | |||||
v = conf.env | |||||
v['SHLIB_MARKER'] = [] | |||||
v['STLIB_MARKER'] = [] | |||||
v['SONAME_ST'] = [] | |||||
@conf | |||||
def gcc_modifier_platform(conf): | |||||
"""Execute platform-specific functions based on *gcc_modifier_+NAME*""" | |||||
# * set configurations specific for a platform. | |||||
# * the destination platform is detected automatically by looking at the macros the compiler predefines, | |||||
# and if it's not recognised, it fallbacks to sys.platform. | |||||
gcc_modifier_func = getattr(conf, 'gcc_modifier_' + conf.env.DEST_OS, None) | |||||
if gcc_modifier_func: | |||||
gcc_modifier_func() | |||||
def configure(conf): | |||||
""" | |||||
Configuration for gcc | |||||
""" | |||||
conf.find_gcc() | |||||
conf.find_ar() | |||||
conf.gcc_common_flags() | |||||
conf.gcc_modifier_platform() | |||||
conf.cc_load_tools() | |||||
conf.cc_add_flags() | |||||
conf.link_add_flags() | |||||
@@ -0,0 +1,60 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Carlos Rafael Giani, 2007 (dv) | |||||
import sys | |||||
from waflib.Tools import ar, d | |||||
from waflib.Configure import conf | |||||
@conf | |||||
def find_gdc(conf): | |||||
""" | |||||
Find the program gdc and set the variable *D* | |||||
""" | |||||
conf.find_program('gdc', var='D') | |||||
out = conf.cmd_and_log(conf.env.D + ['--version']) | |||||
if out.find("gdc") == -1: | |||||
conf.fatal("detected compiler is not gdc") | |||||
@conf | |||||
def common_flags_gdc(conf): | |||||
""" | |||||
Set the flags required by *gdc* | |||||
""" | |||||
v = conf.env | |||||
# _DFLAGS _DIMPORTFLAGS | |||||
# for mory info about the meaning of this dict see dmd.py | |||||
v['DFLAGS'] = [] | |||||
v['D_SRC_F'] = ['-c'] | |||||
v['D_TGT_F'] = '-o%s' | |||||
# linker | |||||
v['D_LINKER'] = v['D'] | |||||
v['DLNK_SRC_F'] = '' | |||||
v['DLNK_TGT_F'] = '-o%s' | |||||
v['DINC_ST'] = '-I%s' | |||||
v['DSHLIB_MARKER'] = v['DSTLIB_MARKER'] = '' | |||||
v['DSTLIB_ST'] = v['DSHLIB_ST'] = '-l%s' | |||||
v['DSTLIBPATH_ST'] = v['DLIBPATH_ST'] = '-L%s' | |||||
v['LINKFLAGS_dshlib'] = ['-shared'] | |||||
v['DHEADER_ext'] = '.di' | |||||
v.DFLAGS_d_with_header = '-fintfc' | |||||
v['D_HDR_F'] = '-fintfc-file=%s' | |||||
def configure(conf): | |||||
""" | |||||
Configuration for gdc | |||||
""" | |||||
conf.find_gdc() | |||||
conf.load('ar') | |||||
conf.load('d') | |||||
conf.common_flags_gdc() | |||||
conf.d_platform_flags() | |||||
@@ -0,0 +1,90 @@ | |||||
#! /usr/bin/env python | |||||
# encoding: utf-8 | |||||
# DC 2008 | |||||
# Thomas Nagy 2010 (ita) | |||||
import re | |||||
from waflib import Utils | |||||
from waflib.Tools import fc, fc_config, fc_scan, ar | |||||
from waflib.Configure import conf | |||||
@conf | |||||
def find_gfortran(conf): | |||||
"""Find the gfortran program (will look in the environment variable 'FC')""" | |||||
fc = conf.find_program(['gfortran','g77'], var='FC') | |||||
# (fallback to g77 for systems, where no gfortran is available) | |||||
conf.get_gfortran_version(fc) | |||||
conf.env.FC_NAME = 'GFORTRAN' | |||||
@conf | |||||
def gfortran_flags(conf): | |||||
v = conf.env | |||||
v['FCFLAGS_fcshlib'] = ['-fPIC'] | |||||
v['FORTRANMODFLAG'] = ['-J', ''] # template for module path | |||||
v['FCFLAGS_DEBUG'] = ['-Werror'] # why not | |||||
@conf | |||||
def gfortran_modifier_win32(conf): | |||||
fc_config.fortran_modifier_win32(conf) | |||||
@conf | |||||
def gfortran_modifier_cygwin(conf): | |||||
fc_config.fortran_modifier_cygwin(conf) | |||||
@conf | |||||
def gfortran_modifier_darwin(conf): | |||||
fc_config.fortran_modifier_darwin(conf) | |||||
@conf | |||||
def gfortran_modifier_platform(conf): | |||||
dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform() | |||||
gfortran_modifier_func = getattr(conf, 'gfortran_modifier_' + dest_os, None) | |||||
if gfortran_modifier_func: | |||||
gfortran_modifier_func() | |||||
@conf | |||||
def get_gfortran_version(conf, fc): | |||||
"""Get the compiler version""" | |||||
# ensure this is actually gfortran, not an imposter. | |||||
version_re = re.compile(r"GNU\s*Fortran", re.I).search | |||||
cmd = fc + ['--version'] | |||||
out, err = fc_config.getoutput(conf, cmd, stdin=False) | |||||
if out: match = version_re(out) | |||||
else: match = version_re(err) | |||||
if not match: | |||||
conf.fatal('Could not determine the compiler type') | |||||
# --- now get more detailed info -- see c_config.get_cc_version | |||||
cmd = fc + ['-dM', '-E', '-'] | |||||
out, err = fc_config.getoutput(conf, cmd, stdin=True) | |||||
if out.find('__GNUC__') < 0: | |||||
conf.fatal('Could not determine the compiler type') | |||||
k = {} | |||||
out = out.splitlines() | |||||
import shlex | |||||
for line in out: | |||||
lst = shlex.split(line) | |||||
if len(lst)>2: | |||||
key = lst[1] | |||||
val = lst[2] | |||||
k[key] = val | |||||
def isD(var): | |||||
return var in k | |||||
def isT(var): | |||||
return var in k and k[var] != '0' | |||||
conf.env['FC_VERSION'] = (k['__GNUC__'], k['__GNUC_MINOR__'], k['__GNUC_PATCHLEVEL__']) | |||||
def configure(conf): | |||||
conf.find_gfortran() | |||||
conf.find_ar() | |||||
conf.fc_flags() | |||||
conf.fc_add_flags() | |||||
conf.gfortran_flags() | |||||
conf.gfortran_modifier_platform() |
@@ -0,0 +1,488 @@ | |||||
#! /usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2006-2010 (ita) | |||||
""" | |||||
Support for GLib2 tools: | |||||
* marshal | |||||
* enums | |||||
* gsettings | |||||
* gresource | |||||
""" | |||||
import os | |||||
from waflib import Context, Task, Utils, Options, Errors, Logs | |||||
from waflib.TaskGen import taskgen_method, before_method, after_method, feature, extension | |||||
from waflib.Configure import conf | |||||
################## marshal files | |||||
@taskgen_method | |||||
def add_marshal_file(self, filename, prefix): | |||||
""" | |||||
Add a file to the list of marshal files to process. Store them in the attribute *marshal_list*. | |||||
:param filename: xml file to compile | |||||
:type filename: string | |||||
:param prefix: marshal prefix (--prefix=prefix) | |||||
:type prefix: string | |||||
""" | |||||
if not hasattr(self, 'marshal_list'): | |||||
self.marshal_list = [] | |||||
self.meths.append('process_marshal') | |||||
self.marshal_list.append((filename, prefix)) | |||||
@before_method('process_source') | |||||
def process_marshal(self): | |||||
""" | |||||
Process the marshal files stored in the attribute *marshal_list* to create :py:class:`waflib.Tools.glib2.glib_genmarshal` instances. | |||||
Add the c file created to the list of source to process. | |||||
""" | |||||
for f, prefix in getattr(self, 'marshal_list', []): | |||||
node = self.path.find_resource(f) | |||||
if not node: | |||||
raise Errors.WafError('file not found %r' % f) | |||||
h_node = node.change_ext('.h') | |||||
c_node = node.change_ext('.c') | |||||
task = self.create_task('glib_genmarshal', node, [h_node, c_node]) | |||||
task.env.GLIB_GENMARSHAL_PREFIX = prefix | |||||
self.source = self.to_nodes(getattr(self, 'source', [])) | |||||
self.source.append(c_node) | |||||
class glib_genmarshal(Task.Task): | |||||
def run(self): | |||||
bld = self.inputs[0].__class__.ctx | |||||
get = self.env.get_flat | |||||
cmd1 = "%s %s --prefix=%s --header > %s" % ( | |||||
get('GLIB_GENMARSHAL'), | |||||
self.inputs[0].srcpath(), | |||||
get('GLIB_GENMARSHAL_PREFIX'), | |||||
self.outputs[0].abspath() | |||||
) | |||||
ret = bld.exec_command(cmd1) | |||||
if ret: return ret | |||||
#print self.outputs[1].abspath() | |||||
c = '''#include "%s"\n''' % self.outputs[0].name | |||||
self.outputs[1].write(c) | |||||
cmd2 = "%s %s --prefix=%s --body >> %s" % ( | |||||
get('GLIB_GENMARSHAL'), | |||||
self.inputs[0].srcpath(), | |||||
get('GLIB_GENMARSHAL_PREFIX'), | |||||
self.outputs[1].abspath() | |||||
) | |||||
return bld.exec_command(cmd2) | |||||
vars = ['GLIB_GENMARSHAL_PREFIX', 'GLIB_GENMARSHAL'] | |||||
color = 'BLUE' | |||||
ext_out = ['.h'] | |||||
########################## glib-mkenums | |||||
@taskgen_method | |||||
def add_enums_from_template(self, source='', target='', template='', comments=''): | |||||
""" | |||||
Add a file to the list of enum files to process. Store them in the attribute *enums_list*. | |||||
:param source: enum file to process | |||||
:type source: string | |||||
:param target: target file | |||||
:type target: string | |||||
:param template: template file | |||||
:type template: string | |||||
:param comments: comments | |||||
:type comments: string | |||||
""" | |||||
if not hasattr(self, 'enums_list'): | |||||
self.enums_list = [] | |||||
self.meths.append('process_enums') | |||||
self.enums_list.append({'source': source, | |||||
'target': target, | |||||
'template': template, | |||||
'file-head': '', | |||||
'file-prod': '', | |||||
'file-tail': '', | |||||
'enum-prod': '', | |||||
'value-head': '', | |||||
'value-prod': '', | |||||
'value-tail': '', | |||||
'comments': comments}) | |||||
@taskgen_method | |||||
def add_enums(self, source='', target='', | |||||
file_head='', file_prod='', file_tail='', enum_prod='', | |||||
value_head='', value_prod='', value_tail='', comments=''): | |||||
""" | |||||
Add a file to the list of enum files to process. Store them in the attribute *enums_list*. | |||||
:param source: enum file to process | |||||
:type source: string | |||||
:param target: target file | |||||
:type target: string | |||||
:param file_head: unused | |||||
:param file_prod: unused | |||||
:param file_tail: unused | |||||
:param enum_prod: unused | |||||
:param value_head: unused | |||||
:param value_prod: unused | |||||
:param value_tail: unused | |||||
:param comments: comments | |||||
:type comments: string | |||||
""" | |||||
if not hasattr(self, 'enums_list'): | |||||
self.enums_list = [] | |||||
self.meths.append('process_enums') | |||||
self.enums_list.append({'source': source, | |||||
'template': '', | |||||
'target': target, | |||||
'file-head': file_head, | |||||
'file-prod': file_prod, | |||||
'file-tail': file_tail, | |||||
'enum-prod': enum_prod, | |||||
'value-head': value_head, | |||||
'value-prod': value_prod, | |||||
'value-tail': value_tail, | |||||
'comments': comments}) | |||||
@before_method('process_source') | |||||
def process_enums(self): | |||||
""" | |||||
Process the enum files stored in the attribute *enum_list* to create :py:class:`waflib.Tools.glib2.glib_mkenums` instances. | |||||
""" | |||||
for enum in getattr(self, 'enums_list', []): | |||||
task = self.create_task('glib_mkenums') | |||||
env = task.env | |||||
inputs = [] | |||||
# process the source | |||||
source_list = self.to_list(enum['source']) | |||||
if not source_list: | |||||
raise Errors.WafError('missing source ' + str(enum)) | |||||
source_list = [self.path.find_resource(k) for k in source_list] | |||||
inputs += source_list | |||||
env['GLIB_MKENUMS_SOURCE'] = [k.abspath() for k in source_list] | |||||
# find the target | |||||
if not enum['target']: | |||||
raise Errors.WafError('missing target ' + str(enum)) | |||||
tgt_node = self.path.find_or_declare(enum['target']) | |||||
if tgt_node.name.endswith('.c'): | |||||
self.source.append(tgt_node) | |||||
env['GLIB_MKENUMS_TARGET'] = tgt_node.abspath() | |||||
options = [] | |||||
if enum['template']: # template, if provided | |||||
template_node = self.path.find_resource(enum['template']) | |||||
options.append('--template %s' % (template_node.abspath())) | |||||
inputs.append(template_node) | |||||
params = {'file-head' : '--fhead', | |||||
'file-prod' : '--fprod', | |||||
'file-tail' : '--ftail', | |||||
'enum-prod' : '--eprod', | |||||
'value-head' : '--vhead', | |||||
'value-prod' : '--vprod', | |||||
'value-tail' : '--vtail', | |||||
'comments': '--comments'} | |||||
for param, option in params.items(): | |||||
if enum[param]: | |||||
options.append('%s %r' % (option, enum[param])) | |||||
env['GLIB_MKENUMS_OPTIONS'] = ' '.join(options) | |||||
# update the task instance | |||||
task.set_inputs(inputs) | |||||
task.set_outputs(tgt_node) | |||||
class glib_mkenums(Task.Task): | |||||
""" | |||||
Process enum files | |||||
""" | |||||
run_str = '${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}' | |||||
color = 'PINK' | |||||
ext_out = ['.h'] | |||||
######################################### gsettings | |||||
@taskgen_method | |||||
def add_settings_schemas(self, filename_list): | |||||
""" | |||||
Add settings files to process, add them to *settings_schema_files* | |||||
:param filename_list: files | |||||
:type filename_list: list of string | |||||
""" | |||||
if not hasattr(self, 'settings_schema_files'): | |||||
self.settings_schema_files = [] | |||||
if not isinstance(filename_list, list): | |||||
filename_list = [filename_list] | |||||
self.settings_schema_files.extend(filename_list) | |||||
@taskgen_method | |||||
def add_settings_enums(self, namespace, filename_list): | |||||
""" | |||||
This function may be called only once by task generator to set the enums namespace. | |||||
:param namespace: namespace | |||||
:type namespace: string | |||||
:param filename_list: enum files to process | |||||
:type filename_list: file list | |||||
""" | |||||
if hasattr(self, 'settings_enum_namespace'): | |||||
raise Errors.WafError("Tried to add gsettings enums to '%s' more than once" % self.name) | |||||
self.settings_enum_namespace = namespace | |||||
if type(filename_list) != 'list': | |||||
filename_list = [filename_list] | |||||
self.settings_enum_files = filename_list | |||||
@feature('glib2') | |||||
def process_settings(self): | |||||
""" | |||||
Process the schema files in *settings_schema_files* to create :py:class:`waflib.Tools.glib2.glib_mkenums` instances. The | |||||
same files are validated through :py:class:`waflib.Tools.glib2.glib_validate_schema` tasks. | |||||
""" | |||||
enums_tgt_node = [] | |||||
install_files = [] | |||||
settings_schema_files = getattr(self, 'settings_schema_files', []) | |||||
if settings_schema_files and not self.env['GLIB_COMPILE_SCHEMAS']: | |||||
raise Errors.WafError ("Unable to process GSettings schemas - glib-compile-schemas was not found during configure") | |||||
# 1. process gsettings_enum_files (generate .enums.xml) | |||||
# | |||||
if hasattr(self, 'settings_enum_files'): | |||||
enums_task = self.create_task('glib_mkenums') | |||||
source_list = self.settings_enum_files | |||||
source_list = [self.path.find_resource(k) for k in source_list] | |||||
enums_task.set_inputs(source_list) | |||||
enums_task.env['GLIB_MKENUMS_SOURCE'] = [k.abspath() for k in source_list] | |||||
target = self.settings_enum_namespace + '.enums.xml' | |||||
tgt_node = self.path.find_or_declare(target) | |||||
enums_task.set_outputs(tgt_node) | |||||
enums_task.env['GLIB_MKENUMS_TARGET'] = tgt_node.abspath() | |||||
enums_tgt_node = [tgt_node] | |||||
install_files.append (tgt_node) | |||||
options = '--comments "<!-- @comment@ -->" --fhead "<schemalist>" --vhead " <@type@ id=\\"%s.@EnumName@\\">" --vprod " <value nick=\\"@valuenick@\\" value=\\"@valuenum@\\"/>" --vtail " </@type@>" --ftail "</schemalist>" ' % (self.settings_enum_namespace) | |||||
enums_task.env['GLIB_MKENUMS_OPTIONS'] = options | |||||
# 2. process gsettings_schema_files (validate .gschema.xml files) | |||||
# | |||||
for schema in settings_schema_files: | |||||
schema_task = self.create_task ('glib_validate_schema') | |||||
schema_node = self.path.find_resource(schema) | |||||
if not schema_node: | |||||
raise Errors.WafError("Cannot find the schema file '%s'" % schema) | |||||
install_files.append(schema_node) | |||||
source_list = enums_tgt_node + [schema_node] | |||||
schema_task.set_inputs (source_list) | |||||
schema_task.env['GLIB_COMPILE_SCHEMAS_OPTIONS'] = [("--schema-file=" + k.abspath()) for k in source_list] | |||||
target_node = schema_node.change_ext('.xml.valid') | |||||
schema_task.set_outputs (target_node) | |||||
schema_task.env['GLIB_VALIDATE_SCHEMA_OUTPUT'] = target_node.abspath() | |||||
# 3. schemas install task | |||||
def compile_schemas_callback(bld): | |||||
if not bld.is_install: return | |||||
Logs.pprint ('YELLOW','Updating GSettings schema cache') | |||||
command = Utils.subst_vars("${GLIB_COMPILE_SCHEMAS} ${GSETTINGSSCHEMADIR}", bld.env) | |||||
ret = self.bld.exec_command(command) | |||||
if self.bld.is_install: | |||||
if not self.env['GSETTINGSSCHEMADIR']: | |||||
raise Errors.WafError ('GSETTINGSSCHEMADIR not defined (should have been set up automatically during configure)') | |||||
if install_files: | |||||
self.bld.install_files (self.env['GSETTINGSSCHEMADIR'], install_files) | |||||
if not hasattr(self.bld, '_compile_schemas_registered'): | |||||
self.bld.add_post_fun (compile_schemas_callback) | |||||
self.bld._compile_schemas_registered = True | |||||
class glib_validate_schema(Task.Task): | |||||
""" | |||||
Validate schema files | |||||
""" | |||||
run_str = 'rm -f ${GLIB_VALIDATE_SCHEMA_OUTPUT} && ${GLIB_COMPILE_SCHEMAS} --dry-run ${GLIB_COMPILE_SCHEMAS_OPTIONS} && touch ${GLIB_VALIDATE_SCHEMA_OUTPUT}' | |||||
color = 'PINK' | |||||
################## gresource | |||||
@extension('.gresource.xml') | |||||
def process_gresource_source(self, node): | |||||
""" | |||||
Hook to process .gresource.xml to generate C source files | |||||
""" | |||||
if not self.env['GLIB_COMPILE_RESOURCES']: | |||||
raise Errors.WafError ("Unable to process GResource file - glib-compile-resources was not found during configure") | |||||
if 'gresource' in self.features: | |||||
return | |||||
h_node = node.change_ext('_xml.h') | |||||
c_node = node.change_ext('_xml.c') | |||||
self.create_task('glib_gresource_source', node, [h_node, c_node]) | |||||
self.source.append(c_node) | |||||
@feature('gresource') | |||||
def process_gresource_bundle(self): | |||||
""" | |||||
Generate a binary .gresource files from .gresource.xml files:: | |||||
def build(bld): | |||||
bld( | |||||
features='gresource', | |||||
source=['resources1.gresource.xml', 'resources2.gresource.xml'], | |||||
install_path='${LIBDIR}/${PACKAGE}' | |||||
) | |||||
:param source: XML files to process | |||||
:type source: list of string | |||||
:param install_path: installation path | |||||
:type install_path: string | |||||
""" | |||||
for i in self.to_list(self.source): | |||||
node = self.path.find_resource(i) | |||||
task = self.create_task('glib_gresource_bundle', node, node.change_ext('')) | |||||
inst_to = getattr(self, 'install_path', None) | |||||
if inst_to: | |||||
self.bld.install_files(inst_to, task.outputs) | |||||
class glib_gresource_base(Task.Task): | |||||
""" | |||||
Base class for gresource based tasks, it implements the implicit dependencies scan. | |||||
""" | |||||
color = 'BLUE' | |||||
base_cmd = '${GLIB_COMPILE_RESOURCES} --sourcedir=${SRC[0].parent.srcpath()} --sourcedir=${SRC[0].bld_dir()}' | |||||
def scan(self): | |||||
""" | |||||
Scan gresource dependencies through ``glib-compile-resources --generate-dependencies command`` | |||||
""" | |||||
bld = self.generator.bld | |||||
kw = {} | |||||
try: | |||||
if not kw.get('cwd', None): | |||||
kw['cwd'] = bld.cwd | |||||
except AttributeError: | |||||
bld.cwd = kw['cwd'] = bld.variant_dir | |||||
kw['quiet'] = Context.BOTH | |||||
cmd = Utils.subst_vars('${GLIB_COMPILE_RESOURCES} --sourcedir=%s --sourcedir=%s --generate-dependencies %s' % ( | |||||
self.inputs[0].parent.srcpath(), | |||||
self.inputs[0].bld_dir(), | |||||
self.inputs[0].bldpath() | |||||
), self.env) | |||||
output = bld.cmd_and_log(cmd, **kw) | |||||
nodes = [] | |||||
names = [] | |||||
for dep in output.splitlines(): | |||||
if dep: | |||||
node = bld.bldnode.find_node(dep) | |||||
if node: | |||||
nodes.append(node) | |||||
else: | |||||
names.append(dep) | |||||
return (nodes, names) | |||||
class glib_gresource_source(glib_gresource_base): | |||||
""" | |||||
Task to generate C source code (.h and .c files) from a gresource.xml file | |||||
""" | |||||
vars = ['GLIB_COMPILE_RESOURCES'] | |||||
fun_h = Task.compile_fun_shell(glib_gresource_base.base_cmd + ' --target=${TGT[0].abspath()} --generate-header ${SRC}') | |||||
fun_c = Task.compile_fun_shell(glib_gresource_base.base_cmd + ' --target=${TGT[1].abspath()} --generate-source ${SRC}') | |||||
ext_out = ['.h'] | |||||
def run(self): | |||||
return self.fun_h[0](self) or self.fun_c[0](self) | |||||
class glib_gresource_bundle(glib_gresource_base): | |||||
""" | |||||
Task to generate a .gresource binary file from a gresource.xml file | |||||
""" | |||||
run_str = glib_gresource_base.base_cmd + ' --target=${TGT} ${SRC}' | |||||
shell = True # temporary workaround for #795 | |||||
@conf | |||||
def find_glib_genmarshal(conf): | |||||
conf.find_program('glib-genmarshal', var='GLIB_GENMARSHAL') | |||||
@conf | |||||
def find_glib_mkenums(conf): | |||||
if not conf.env.PERL: | |||||
conf.find_program('perl', var='PERL') | |||||
conf.find_program('glib-mkenums', interpreter='PERL', var='GLIB_MKENUMS') | |||||
@conf | |||||
def find_glib_compile_schemas(conf): | |||||
# when cross-compiling, gsettings.m4 locates the program with the following: | |||||
# pkg-config --variable glib_compile_schemas gio-2.0 | |||||
conf.find_program('glib-compile-schemas', var='GLIB_COMPILE_SCHEMAS') | |||||
def getstr(varname): | |||||
return getattr(Options.options, varname, getattr(conf.env,varname, '')) | |||||
# TODO make this dependent on the gnu_dirs tool? | |||||
gsettingsschemadir = getstr('GSETTINGSSCHEMADIR') | |||||
if not gsettingsschemadir: | |||||
datadir = getstr('DATADIR') | |||||
if not datadir: | |||||
prefix = conf.env['PREFIX'] | |||||
datadir = os.path.join(prefix, 'share') | |||||
gsettingsschemadir = os.path.join(datadir, 'glib-2.0', 'schemas') | |||||
conf.env['GSETTINGSSCHEMADIR'] = gsettingsschemadir | |||||
@conf | |||||
def find_glib_compile_resources(conf): | |||||
conf.find_program('glib-compile-resources', var='GLIB_COMPILE_RESOURCES') | |||||
def configure(conf): | |||||
""" | |||||
Find the following programs: | |||||
* *glib-genmarshal* and set *GLIB_GENMARSHAL* | |||||
* *glib-mkenums* and set *GLIB_MKENUMS* | |||||
* *glib-compile-schemas* and set *GLIB_COMPILE_SCHEMAS* (not mandatory) | |||||
* *glib-compile-resources* and set *GLIB_COMPILE_RESOURCES* (not mandatory) | |||||
And set the variable *GSETTINGSSCHEMADIR* | |||||
""" | |||||
conf.find_glib_genmarshal() | |||||
conf.find_glib_mkenums() | |||||
conf.find_glib_compile_schemas(mandatory=False) | |||||
conf.find_glib_compile_resources(mandatory=False) | |||||
def options(opt): | |||||
""" | |||||
Add the ``--gsettingsschemadir`` command-line option | |||||
""" | |||||
gr = opt.add_option_group('Installation directories') | |||||
gr.add_option('--gsettingsschemadir', help='GSettings schema location [DATADIR/glib-2.0/schemas]', default='', dest='GSETTINGSSCHEMADIR') | |||||
@@ -0,0 +1,131 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Ali Sabil, 2007 | |||||
""" | |||||
Sets various standard variables such as INCLUDEDIR. SBINDIR and others. To use this module just call:: | |||||
opt.load('gnu_dirs') | |||||
and:: | |||||
conf.load('gnu_dirs') | |||||
Add options for the standard GNU directories, this tool will add the options | |||||
found in autotools, and will update the environment with the following | |||||
installation variables: | |||||
============== ========================================= ======================= | |||||
Variable Description Default Value | |||||
============== ========================================= ======================= | |||||
PREFIX installation prefix /usr/local | |||||
EXEC_PREFIX installation prefix for binaries PREFIX | |||||
BINDIR user commands EXEC_PREFIX/bin | |||||
SBINDIR system binaries EXEC_PREFIX/sbin | |||||
LIBEXECDIR program-specific binaries EXEC_PREFIX/libexec | |||||
SYSCONFDIR host-specific configuration PREFIX/etc | |||||
SHAREDSTATEDIR architecture-independent variable data PREFIX/com | |||||
LOCALSTATEDIR variable data PREFIX/var | |||||
LIBDIR object code libraries EXEC_PREFIX/lib | |||||
INCLUDEDIR header files PREFIX/include | |||||
OLDINCLUDEDIR header files for non-GCC compilers /usr/include | |||||
DATAROOTDIR architecture-independent data root PREFIX/share | |||||
DATADIR architecture-independent data DATAROOTDIR | |||||
INFODIR GNU "info" documentation DATAROOTDIR/info | |||||
LOCALEDIR locale-dependent data DATAROOTDIR/locale | |||||
MANDIR manual pages DATAROOTDIR/man | |||||
DOCDIR documentation root DATAROOTDIR/doc/APPNAME | |||||
HTMLDIR HTML documentation DOCDIR | |||||
DVIDIR DVI documentation DOCDIR | |||||
PDFDIR PDF documentation DOCDIR | |||||
PSDIR PostScript documentation DOCDIR | |||||
============== ========================================= ======================= | |||||
""" | |||||
import os, re | |||||
from waflib import Utils, Options, Context | |||||
gnuopts = ''' | |||||
bindir, user commands, ${EXEC_PREFIX}/bin | |||||
sbindir, system binaries, ${EXEC_PREFIX}/sbin | |||||
libexecdir, program-specific binaries, ${EXEC_PREFIX}/libexec | |||||
sysconfdir, host-specific configuration, ${PREFIX}/etc | |||||
sharedstatedir, architecture-independent variable data, ${PREFIX}/com | |||||
localstatedir, variable data, ${PREFIX}/var | |||||
libdir, object code libraries, ${EXEC_PREFIX}/lib%s | |||||
includedir, header files, ${PREFIX}/include | |||||
oldincludedir, header files for non-GCC compilers, /usr/include | |||||
datarootdir, architecture-independent data root, ${PREFIX}/share | |||||
datadir, architecture-independent data, ${DATAROOTDIR} | |||||
infodir, GNU "info" documentation, ${DATAROOTDIR}/info | |||||
localedir, locale-dependent data, ${DATAROOTDIR}/locale | |||||
mandir, manual pages, ${DATAROOTDIR}/man | |||||
docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE} | |||||
htmldir, HTML documentation, ${DOCDIR} | |||||
dvidir, DVI documentation, ${DOCDIR} | |||||
pdfdir, PDF documentation, ${DOCDIR} | |||||
psdir, PostScript documentation, ${DOCDIR} | |||||
''' % Utils.lib64() | |||||
_options = [x.split(', ') for x in gnuopts.splitlines() if x] | |||||
def configure(conf): | |||||
""" | |||||
Read the command-line options to set lots of variables in *conf.env*. The variables | |||||
BINDIR and LIBDIR will be overwritten. | |||||
""" | |||||
def get_param(varname, default): | |||||
return getattr(Options.options, varname, '') or default | |||||
env = conf.env | |||||
env.LIBDIR = env.BINDIR = [] | |||||
env.EXEC_PREFIX = get_param('EXEC_PREFIX', env.PREFIX) | |||||
env.PACKAGE = getattr(Context.g_module, 'APPNAME', None) or env.PACKAGE | |||||
complete = False | |||||
iter = 0 | |||||
while not complete and iter < len(_options) + 1: | |||||
iter += 1 | |||||
complete = True | |||||
for name, help, default in _options: | |||||
name = name.upper() | |||||
if not env[name]: | |||||
try: | |||||
env[name] = Utils.subst_vars(get_param(name, default).replace('/', os.sep), env) | |||||
except TypeError: | |||||
complete = False | |||||
if not complete: | |||||
lst = [x for x, _, _ in _options if not env[x.upper()]] | |||||
raise conf.errors.WafError('Variable substitution failure %r' % lst) | |||||
def options(opt): | |||||
""" | |||||
Add lots of command-line options, for example:: | |||||
--exec-prefix: EXEC_PREFIX | |||||
""" | |||||
inst_dir = opt.add_option_group('Installation prefix', | |||||
'By default, "waf install" will put the files in\ | |||||
"/usr/local/bin", "/usr/local/lib" etc. An installation prefix other\ | |||||
than "/usr/local" can be given using "--prefix", for example "--prefix=$HOME"') | |||||
for k in ('--prefix', '--destdir'): | |||||
option = opt.parser.get_option(k) | |||||
if option: | |||||
opt.parser.remove_option(k) | |||||
inst_dir.add_option(option) | |||||
inst_dir.add_option('--exec-prefix', | |||||
help = 'installation prefix for binaries [PREFIX]', | |||||
default = '', | |||||
dest = 'EXEC_PREFIX') | |||||
dirs_options = opt.add_option_group('Installation directories') | |||||
for name, help, default in _options: | |||||
option_name = '--' + name | |||||
str_default = default | |||||
str_help = '%s [%s]' % (help, re.sub(r'\$\{([^}]+)\}', r'\1', str_default)) | |||||
dirs_options.add_option(option_name, help=str_help, default='', dest=name.upper()) | |||||
@@ -0,0 +1,161 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2006-2010 (ita) | |||||
# Ralf Habacker, 2006 (rh) | |||||
# Yinon Ehrlich, 2009 | |||||
""" | |||||
g++/llvm detection. | |||||
""" | |||||
import os, sys | |||||
from waflib import Configure, Options, Utils | |||||
from waflib.Tools import ccroot, ar | |||||
from waflib.Configure import conf | |||||
@conf | |||||
def find_gxx(conf): | |||||
""" | |||||
Find the program g++, and if present, try to detect its version number | |||||
""" | |||||
cxx = conf.find_program(['g++', 'c++'], var='CXX') | |||||
conf.get_cc_version(cxx, gcc=True) | |||||
conf.env.CXX_NAME = 'gcc' | |||||
@conf | |||||
def gxx_common_flags(conf): | |||||
""" | |||||
Common flags for g++ on nearly all platforms | |||||
""" | |||||
v = conf.env | |||||
v['CXX_SRC_F'] = [] | |||||
v['CXX_TGT_F'] = ['-c', '-o'] | |||||
# linker | |||||
if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX'] | |||||
v['CXXLNK_SRC_F'] = [] | |||||
v['CXXLNK_TGT_F'] = ['-o'] | |||||
v['CPPPATH_ST'] = '-I%s' | |||||
v['DEFINES_ST'] = '-D%s' | |||||
v['LIB_ST'] = '-l%s' # template for adding libs | |||||
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths | |||||
v['STLIB_ST'] = '-l%s' | |||||
v['STLIBPATH_ST'] = '-L%s' | |||||
v['RPATH_ST'] = '-Wl,-rpath,%s' | |||||
v['SONAME_ST'] = '-Wl,-h,%s' | |||||
v['SHLIB_MARKER'] = '-Wl,-Bdynamic' | |||||
v['STLIB_MARKER'] = '-Wl,-Bstatic' | |||||
# program | |||||
v['cxxprogram_PATTERN'] = '%s' | |||||
# shared library | |||||
v['CXXFLAGS_cxxshlib'] = ['-fPIC'] | |||||
v['LINKFLAGS_cxxshlib'] = ['-shared'] | |||||
v['cxxshlib_PATTERN'] = 'lib%s.so' | |||||
# static lib | |||||
v['LINKFLAGS_cxxstlib'] = ['-Wl,-Bstatic'] | |||||
v['cxxstlib_PATTERN'] = 'lib%s.a' | |||||
# osx stuff | |||||
v['LINKFLAGS_MACBUNDLE'] = ['-bundle', '-undefined', 'dynamic_lookup'] | |||||
v['CXXFLAGS_MACBUNDLE'] = ['-fPIC'] | |||||
v['macbundle_PATTERN'] = '%s.bundle' | |||||
@conf | |||||
def gxx_modifier_win32(conf): | |||||
"""Configuration flags for executing gcc on Windows""" | |||||
v = conf.env | |||||
v['cxxprogram_PATTERN'] = '%s.exe' | |||||
v['cxxshlib_PATTERN'] = '%s.dll' | |||||
v['implib_PATTERN'] = 'lib%s.dll.a' | |||||
v['IMPLIB_ST'] = '-Wl,--out-implib,%s' | |||||
v['CXXFLAGS_cxxshlib'] = [] | |||||
# Auto-import is enabled by default even without this option, | |||||
# but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages | |||||
# that the linker emits otherwise. | |||||
v.append_value('LINKFLAGS', ['-Wl,--enable-auto-import']) | |||||
@conf | |||||
def gxx_modifier_cygwin(conf): | |||||
"""Configuration flags for executing g++ on Cygwin""" | |||||
gxx_modifier_win32(conf) | |||||
v = conf.env | |||||
v['cxxshlib_PATTERN'] = 'cyg%s.dll' | |||||
v.append_value('LINKFLAGS_cxxshlib', ['-Wl,--enable-auto-image-base']) | |||||
v['CXXFLAGS_cxxshlib'] = [] | |||||
@conf | |||||
def gxx_modifier_darwin(conf): | |||||
"""Configuration flags for executing g++ on MacOS""" | |||||
v = conf.env | |||||
v['CXXFLAGS_cxxshlib'] = ['-fPIC'] | |||||
v['LINKFLAGS_cxxshlib'] = ['-dynamiclib', '-Wl,-compatibility_version,1', '-Wl,-current_version,1'] | |||||
v['cxxshlib_PATTERN'] = 'lib%s.dylib' | |||||
v['FRAMEWORKPATH_ST'] = '-F%s' | |||||
v['FRAMEWORK_ST'] = ['-framework'] | |||||
v['ARCH_ST'] = ['-arch'] | |||||
v['LINKFLAGS_cxxstlib'] = [] | |||||
v['SHLIB_MARKER'] = [] | |||||
v['STLIB_MARKER'] = [] | |||||
v['SONAME_ST'] = [] | |||||
@conf | |||||
def gxx_modifier_aix(conf): | |||||
"""Configuration flags for executing g++ on AIX""" | |||||
v = conf.env | |||||
v['LINKFLAGS_cxxprogram']= ['-Wl,-brtl'] | |||||
v['LINKFLAGS_cxxshlib'] = ['-shared', '-Wl,-brtl,-bexpfull'] | |||||
v['SHLIB_MARKER'] = [] | |||||
@conf | |||||
def gxx_modifier_hpux(conf): | |||||
v = conf.env | |||||
v['SHLIB_MARKER'] = [] | |||||
v['STLIB_MARKER'] = [] | |||||
v['CFLAGS_cxxshlib'] = ['-fPIC','-DPIC'] | |||||
v['cxxshlib_PATTERN'] = 'lib%s.sl' | |||||
@conf | |||||
def gxx_modifier_openbsd(conf): | |||||
conf.env.SONAME_ST = [] | |||||
@conf | |||||
def gcc_modifier_osf1V(conf): | |||||
v = conf.env | |||||
v['SHLIB_MARKER'] = [] | |||||
v['STLIB_MARKER'] = [] | |||||
v['SONAME_ST'] = [] | |||||
@conf | |||||
def gxx_modifier_platform(conf): | |||||
"""Execute platform-specific functions based on *gxx_modifier_+NAME*""" | |||||
# * set configurations specific for a platform. | |||||
# * the destination platform is detected automatically by looking at the macros the compiler predefines, | |||||
# and if it's not recognised, it fallbacks to sys.platform. | |||||
gxx_modifier_func = getattr(conf, 'gxx_modifier_' + conf.env.DEST_OS, None) | |||||
if gxx_modifier_func: | |||||
gxx_modifier_func() | |||||
def configure(conf): | |||||
""" | |||||
Configuration for g++ | |||||
""" | |||||
conf.find_gxx() | |||||
conf.find_ar() | |||||
conf.gxx_common_flags() | |||||
conf.gxx_modifier_platform() | |||||
conf.cxx_load_tools() | |||||
conf.cxx_add_flags() | |||||
conf.link_add_flags() | |||||
@@ -0,0 +1,33 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Stian Selnes 2008 | |||||
# Thomas Nagy 2009-2010 (ita) | |||||
""" | |||||
Detect the Intel C compiler | |||||
""" | |||||
import os, sys | |||||
from waflib.Tools import ccroot, ar, gcc | |||||
from waflib.Configure import conf | |||||
@conf | |||||
def find_icc(conf): | |||||
""" | |||||
Find the program icc and execute it to ensure it really is icc | |||||
""" | |||||
if sys.platform == 'cygwin': | |||||
conf.fatal('The Intel compiler does not work on Cygwin') | |||||
cc = conf.find_program(['icc', 'ICL'], var='CC') | |||||
conf.get_cc_version(cc, icc=True) | |||||
conf.env.CC_NAME = 'icc' | |||||
def configure(conf): | |||||
conf.find_icc() | |||||
conf.find_ar() | |||||
conf.gcc_common_flags() | |||||
conf.gcc_modifier_platform() | |||||
conf.cc_load_tools() | |||||
conf.cc_add_flags() | |||||
conf.link_add_flags() |
@@ -0,0 +1,33 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy 2009-2010 (ita) | |||||
""" | |||||
Detect the Intel C++ compiler | |||||
""" | |||||
import os, sys | |||||
from waflib.Tools import ccroot, ar, gxx | |||||
from waflib.Configure import conf | |||||
@conf | |||||
def find_icpc(conf): | |||||
""" | |||||
Find the program icpc, and execute it to ensure it really is icpc | |||||
""" | |||||
if sys.platform == 'cygwin': | |||||
conf.fatal('The Intel compiler does not work on Cygwin') | |||||
cxx = conf.find_program('icpc', var='CXX') | |||||
conf.get_cc_version(cxx, icc=True) | |||||
conf.env.CXX_NAME = 'icc' | |||||
def configure(conf): | |||||
conf.find_icpc() | |||||
conf.find_ar() | |||||
conf.gxx_common_flags() | |||||
conf.gxx_modifier_platform() | |||||
conf.cxx_load_tools() | |||||
conf.cxx_add_flags() | |||||
conf.link_add_flags() | |||||
@@ -0,0 +1,60 @@ | |||||
#! /usr/bin/env python | |||||
# encoding: utf-8 | |||||
# DC 2008 | |||||
# Thomas Nagy 2010 (ita) | |||||
import re | |||||
from waflib import Utils | |||||
from waflib.Tools import fc, fc_config, fc_scan, ar | |||||
from waflib.Configure import conf | |||||
@conf | |||||
def find_ifort(conf): | |||||
fc = conf.find_program('ifort', var='FC') | |||||
conf.get_ifort_version(fc) | |||||
conf.env.FC_NAME = 'IFORT' | |||||
@conf | |||||
def ifort_modifier_cygwin(conf): | |||||
raise NotImplementedError("Ifort on cygwin not yet implemented") | |||||
@conf | |||||
def ifort_modifier_win32(conf): | |||||
fc_config.fortran_modifier_win32(conf) | |||||
@conf | |||||
def ifort_modifier_darwin(conf): | |||||
fc_config.fortran_modifier_darwin(conf) | |||||
@conf | |||||
def ifort_modifier_platform(conf): | |||||
dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform() | |||||
ifort_modifier_func = getattr(conf, 'ifort_modifier_' + dest_os, None) | |||||
if ifort_modifier_func: | |||||
ifort_modifier_func() | |||||
@conf | |||||
def get_ifort_version(conf, fc): | |||||
"""get the compiler version""" | |||||
version_re = re.compile(r"Intel[\sa-zA-Z()0-9,-]*Version\s*(?P<major>\d*)\.(?P<minor>\d*)",re.I).search | |||||
if Utils.is_win32: | |||||
cmd = fc | |||||
else: | |||||
cmd = fc + ['-logo'] | |||||
out, err = fc_config.getoutput(conf, cmd, stdin=False) | |||||
match = version_re(out) or version_re(err) | |||||
if not match: | |||||
conf.fatal('cannot determine ifort version.') | |||||
k = match.groupdict() | |||||
conf.env['FC_VERSION'] = (k['major'], k['minor']) | |||||
def configure(conf): | |||||
conf.find_ifort() | |||||
conf.find_program('xiar', var='AR') | |||||
conf.env.ARFLAGS = 'rcs' | |||||
conf.fc_flags() | |||||
conf.fc_add_flags() | |||||
conf.ifort_modifier_platform() | |||||
@@ -0,0 +1,220 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2006-2010 (ita) | |||||
""" | |||||
Support for translation tools such as msgfmt and intltool | |||||
Usage:: | |||||
def configure(conf): | |||||
conf.load('gnu_dirs intltool') | |||||
def build(bld): | |||||
# process the .po files into .gmo files, and install them in LOCALEDIR | |||||
bld(features='intltool_po', appname='myapp', podir='po', install_path="${LOCALEDIR}") | |||||
# process an input file, substituting the translations from the po dir | |||||
bld( | |||||
features = "intltool_in", | |||||
podir = "../po", | |||||
style = "desktop", | |||||
flags = ["-u"], | |||||
source = 'kupfer.desktop.in', | |||||
install_path = "${DATADIR}/applications", | |||||
) | |||||
Usage of the :py:mod:`waflib.Tools.gnu_dirs` is recommended, but not obligatory. | |||||
""" | |||||
import os, re | |||||
from waflib import Configure, Context, TaskGen, Task, Utils, Runner, Options, Build, Logs | |||||
import waflib.Tools.ccroot | |||||
from waflib.TaskGen import feature, before_method, taskgen_method | |||||
from waflib.Logs import error | |||||
from waflib.Configure import conf | |||||
_style_flags = { | |||||
'ba': '-b', | |||||
'desktop': '-d', | |||||
'keys': '-k', | |||||
'quoted': '--quoted-style', | |||||
'quotedxml': '--quotedxml-style', | |||||
'rfc822deb': '-r', | |||||
'schemas': '-s', | |||||
'xml': '-x', | |||||
} | |||||
@taskgen_method | |||||
def ensure_localedir(self): | |||||
""" | |||||
Expand LOCALEDIR from DATAROOTDIR/locale if possible, or fallback to PREFIX/share/locale | |||||
""" | |||||
# use the tool gnu_dirs to provide options to define this | |||||
if not self.env.LOCALEDIR: | |||||
if self.env.DATAROOTDIR: | |||||
self.env.LOCALEDIR = os.path.join(self.env.DATAROOTDIR, 'locale') | |||||
else: | |||||
self.env.LOCALEDIR = os.path.join(self.env.PREFIX, 'share', 'locale') | |||||
@before_method('process_source') | |||||
@feature('intltool_in') | |||||
def apply_intltool_in_f(self): | |||||
""" | |||||
Create tasks to translate files by intltool-merge:: | |||||
def build(bld): | |||||
bld( | |||||
features = "intltool_in", | |||||
podir = "../po", | |||||
style = "desktop", | |||||
flags = ["-u"], | |||||
source = 'kupfer.desktop.in', | |||||
install_path = "${DATADIR}/applications", | |||||
) | |||||
:param podir: location of the .po files | |||||
:type podir: string | |||||
:param source: source files to process | |||||
:type source: list of string | |||||
:param style: the intltool-merge mode of operation, can be one of the following values: | |||||
``ba``, ``desktop``, ``keys``, ``quoted``, ``quotedxml``, ``rfc822deb``, ``schemas`` and ``xml``. | |||||
See the ``intltool-merge`` man page for more information about supported modes of operation. | |||||
:type style: string | |||||
:param flags: compilation flags ("-quc" by default) | |||||
:type flags: list of string | |||||
:param install_path: installation path | |||||
:type install_path: string | |||||
""" | |||||
try: self.meths.remove('process_source') | |||||
except ValueError: pass | |||||
self.ensure_localedir() | |||||
podir = getattr(self, 'podir', '.') | |||||
podirnode = self.path.find_dir(podir) | |||||
if not podirnode: | |||||
error("could not find the podir %r" % podir) | |||||
return | |||||
cache = getattr(self, 'intlcache', '.intlcache') | |||||
self.env.INTLCACHE = [os.path.join(str(self.path.get_bld()), podir, cache)] | |||||
self.env.INTLPODIR = podirnode.bldpath() | |||||
self.env.append_value('INTLFLAGS', getattr(self, 'flags', self.env.INTLFLAGS_DEFAULT)) | |||||
if '-c' in self.env.INTLFLAGS: | |||||
self.bld.fatal('Redundant -c flag in intltool task %r' % self) | |||||
style = getattr(self, 'style', None) | |||||
if style: | |||||
try: | |||||
style_flag = _style_flags[style] | |||||
except KeyError: | |||||
self.bld.fatal('intltool_in style "%s" is not valid' % style) | |||||
self.env.append_unique('INTLFLAGS', [style_flag]) | |||||
for i in self.to_list(self.source): | |||||
node = self.path.find_resource(i) | |||||
task = self.create_task('intltool', node, node.change_ext('')) | |||||
inst = getattr(self, 'install_path', None) | |||||
if inst: | |||||
self.bld.install_files(inst, task.outputs) | |||||
@feature('intltool_po') | |||||
def apply_intltool_po(self): | |||||
""" | |||||
Create tasks to process po files:: | |||||
def build(bld): | |||||
bld(features='intltool_po', appname='myapp', podir='po', install_path="${LOCALEDIR}") | |||||
The relevant task generator arguments are: | |||||
:param podir: directory of the .po files | |||||
:type podir: string | |||||
:param appname: name of the application | |||||
:type appname: string | |||||
:param install_path: installation directory | |||||
:type install_path: string | |||||
The file LINGUAS must be present in the directory pointed by *podir* and list the translation files to process. | |||||
""" | |||||
try: self.meths.remove('process_source') | |||||
except ValueError: pass | |||||
self.ensure_localedir() | |||||
appname = getattr(self, 'appname', getattr(Context.g_module, Context.APPNAME, 'set_your_app_name')) | |||||
podir = getattr(self, 'podir', '.') | |||||
inst = getattr(self, 'install_path', '${LOCALEDIR}') | |||||
linguas = self.path.find_node(os.path.join(podir, 'LINGUAS')) | |||||
if linguas: | |||||
# scan LINGUAS file for locales to process | |||||
file = open(linguas.abspath()) | |||||
langs = [] | |||||
for line in file.readlines(): | |||||
# ignore lines containing comments | |||||
if not line.startswith('#'): | |||||
langs += line.split() | |||||
file.close() | |||||
re_linguas = re.compile('[-a-zA-Z_@.]+') | |||||
for lang in langs: | |||||
# Make sure that we only process lines which contain locales | |||||
if re_linguas.match(lang): | |||||
node = self.path.find_resource(os.path.join(podir, re_linguas.match(lang).group() + '.po')) | |||||
task = self.create_task('po', node, node.change_ext('.mo')) | |||||
if inst: | |||||
filename = task.outputs[0].name | |||||
(langname, ext) = os.path.splitext(filename) | |||||
inst_file = inst + os.sep + langname + os.sep + 'LC_MESSAGES' + os.sep + appname + '.mo' | |||||
self.bld.install_as(inst_file, task.outputs[0], chmod=getattr(self, 'chmod', Utils.O644), env=task.env) | |||||
else: | |||||
Logs.pprint('RED', "Error no LINGUAS file found in po directory") | |||||
class po(Task.Task): | |||||
""" | |||||
Compile .po files into .gmo files | |||||
""" | |||||
run_str = '${MSGFMT} -o ${TGT} ${SRC}' | |||||
color = 'BLUE' | |||||
class intltool(Task.Task): | |||||
""" | |||||
Let intltool-merge translate an input file | |||||
""" | |||||
run_str = '${INTLTOOL} ${INTLFLAGS} ${INTLCACHE_ST:INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}' | |||||
color = 'BLUE' | |||||
@conf | |||||
def find_msgfmt(conf): | |||||
conf.find_program('msgfmt', var='MSGFMT') | |||||
@conf | |||||
def find_intltool_merge(conf): | |||||
if not conf.env.PERL: | |||||
conf.find_program('perl', var='PERL') | |||||
conf.env.INTLCACHE_ST = '--cache=%s' | |||||
conf.env.INTLFLAGS_DEFAULT = ['-q', '-u'] | |||||
conf.find_program('intltool-merge', interpreter='PERL', var='INTLTOOL') | |||||
def configure(conf): | |||||
""" | |||||
Detect the program *msgfmt* and set *conf.env.MSGFMT*. | |||||
Detect the program *intltool-merge* and set *conf.env.INTLTOOL*. | |||||
It is possible to set INTLTOOL in the environment, but it must not have spaces in it:: | |||||
$ INTLTOOL="/path/to/the program/intltool" waf configure | |||||
If a C/C++ compiler is present, execute a compilation test to find the header *locale.h*. | |||||
""" | |||||
conf.find_msgfmt() | |||||
conf.find_intltool_merge() | |||||
if conf.env.CC or conf.env.CXX: | |||||
conf.check(header_name='locale.h') | |||||
@@ -0,0 +1,62 @@ | |||||
#! /usr/bin/env python | |||||
# imported from samba | |||||
""" | |||||
compiler definition for irix/MIPSpro cc compiler | |||||
based on suncc.py from waf | |||||
""" | |||||
import os | |||||
from waflib import Utils | |||||
from waflib.Tools import ccroot, ar | |||||
from waflib.Configure import conf | |||||
@conf | |||||
def find_irixcc(conf): | |||||
v = conf.env | |||||
cc = None | |||||
if v['CC']: cc = v['CC'] | |||||
elif 'CC' in conf.environ: cc = conf.environ['CC'] | |||||
if not cc: cc = conf.find_program('cc', var='CC') | |||||
if not cc: conf.fatal('irixcc was not found') | |||||
try: | |||||
conf.cmd_and_log(cc + ['-version']) | |||||
except Exception: | |||||
conf.fatal('%r -version could not be executed' % cc) | |||||
v['CC'] = cc | |||||
v['CC_NAME'] = 'irix' | |||||
@conf | |||||
def irixcc_common_flags(conf): | |||||
v = conf.env | |||||
v['CC_SRC_F'] = '' | |||||
v['CC_TGT_F'] = ['-c', '-o'] | |||||
v['CPPPATH_ST'] = '-I%s' | |||||
v['DEFINES_ST'] = '-D%s' | |||||
# linker | |||||
if not v['LINK_CC']: v['LINK_CC'] = v['CC'] | |||||
v['CCLNK_SRC_F'] = '' | |||||
v['CCLNK_TGT_F'] = ['-o'] | |||||
v['LIB_ST'] = '-l%s' # template for adding libs | |||||
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths | |||||
v['STLIB_ST'] = '-l%s' | |||||
v['STLIBPATH_ST'] = '-L%s' | |||||
v['cprogram_PATTERN'] = '%s' | |||||
v['cshlib_PATTERN'] = 'lib%s.so' | |||||
v['cstlib_PATTERN'] = 'lib%s.a' | |||||
def configure(conf): | |||||
conf.find_irixcc() | |||||
conf.find_cpp() | |||||
conf.find_ar() | |||||
conf.irixcc_common_flags() | |||||
conf.cc_load_tools() | |||||
conf.cc_add_flags() | |||||
conf.link_add_flags() | |||||
@@ -0,0 +1,477 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2006-2010 (ita) | |||||
""" | |||||
Java support | |||||
Javac is one of the few compilers that behaves very badly: | |||||
#. it outputs files where it wants to (-d is only for the package root) | |||||
#. it recompiles files silently behind your back | |||||
#. it outputs an undefined amount of files (inner classes) | |||||
Remember that the compilation can be performed using Jython[1] rather than regular Python. Instead of | |||||
running one of the following commands:: | |||||
./waf configure | |||||
python waf configure | |||||
You would have to run:: | |||||
java -jar /path/to/jython.jar waf configure | |||||
[1] http://www.jython.org/ | |||||
""" | |||||
import os, re, tempfile, shutil | |||||
from waflib import TaskGen, Task, Utils, Options, Build, Errors, Node, Logs | |||||
from waflib.Configure import conf | |||||
from waflib.TaskGen import feature, before_method, after_method | |||||
from waflib.Tools import ccroot | |||||
ccroot.USELIB_VARS['javac'] = set(['CLASSPATH', 'JAVACFLAGS']) | |||||
SOURCE_RE = '**/*.java' | |||||
JAR_RE = '**/*' | |||||
class_check_source = ''' | |||||
public class Test { | |||||
public static void main(String[] argv) { | |||||
Class lib; | |||||
if (argv.length < 1) { | |||||
System.err.println("Missing argument"); | |||||
System.exit(77); | |||||
} | |||||
try { | |||||
lib = Class.forName(argv[0]); | |||||
} catch (ClassNotFoundException e) { | |||||
System.err.println("ClassNotFoundException"); | |||||
System.exit(1); | |||||
} | |||||
lib = null; | |||||
System.exit(0); | |||||
} | |||||
} | |||||
''' | |||||
@feature('javac') | |||||
@before_method('process_source') | |||||
def apply_java(self): | |||||
""" | |||||
Create a javac task for compiling *.java files*. There can be | |||||
only one javac task by task generator. | |||||
""" | |||||
Utils.def_attrs(self, jarname='', classpath='', | |||||
sourcepath='.', srcdir='.', | |||||
jar_mf_attributes={}, jar_mf_classpath=[]) | |||||
nodes_lst = [] | |||||
outdir = getattr(self, 'outdir', None) | |||||
if outdir: | |||||
if not isinstance(outdir, Node.Node): | |||||
outdir = self.path.get_bld().make_node(self.outdir) | |||||
else: | |||||
outdir = self.path.get_bld() | |||||
outdir.mkdir() | |||||
self.outdir = outdir | |||||
self.env['OUTDIR'] = outdir.abspath() | |||||
self.javac_task = tsk = self.create_task('javac') | |||||
tmp = [] | |||||
srcdir = getattr(self, 'srcdir', '') | |||||
if isinstance(srcdir, Node.Node): | |||||
srcdir = [srcdir] | |||||
for x in Utils.to_list(srcdir): | |||||
if isinstance(x, Node.Node): | |||||
y = x | |||||
else: | |||||
y = self.path.find_dir(x) | |||||
if not y: | |||||
self.bld.fatal('Could not find the folder %s from %s' % (x, self.path)) | |||||
tmp.append(y) | |||||
tsk.srcdir = tmp | |||||
if getattr(self, 'compat', None): | |||||
tsk.env.append_value('JAVACFLAGS', ['-source', self.compat]) | |||||
if hasattr(self, 'sourcepath'): | |||||
fold = [isinstance(x, Node.Node) and x or self.path.find_dir(x) for x in self.to_list(self.sourcepath)] | |||||
names = os.pathsep.join([x.srcpath() for x in fold]) | |||||
else: | |||||
names = [x.srcpath() for x in tsk.srcdir] | |||||
if names: | |||||
tsk.env.append_value('JAVACFLAGS', ['-sourcepath', names]) | |||||
@feature('javac') | |||||
@after_method('apply_java') | |||||
def use_javac_files(self): | |||||
""" | |||||
Process the *use* attribute referring to other java compilations | |||||
""" | |||||
lst = [] | |||||
self.uselib = self.to_list(getattr(self, 'uselib', [])) | |||||
names = self.to_list(getattr(self, 'use', [])) | |||||
get = self.bld.get_tgen_by_name | |||||
for x in names: | |||||
try: | |||||
y = get(x) | |||||
except Exception: | |||||
self.uselib.append(x) | |||||
else: | |||||
y.post() | |||||
lst.append(y.jar_task.outputs[0].abspath()) | |||||
self.javac_task.set_run_after(y.jar_task) | |||||
if lst: | |||||
self.env.append_value('CLASSPATH', lst) | |||||
@feature('javac') | |||||
@after_method('apply_java', 'propagate_uselib_vars', 'use_javac_files') | |||||
def set_classpath(self): | |||||
""" | |||||
Set the CLASSPATH value on the *javac* task previously created. | |||||
""" | |||||
self.env.append_value('CLASSPATH', getattr(self, 'classpath', [])) | |||||
for x in self.tasks: | |||||
x.env.CLASSPATH = os.pathsep.join(self.env.CLASSPATH) + os.pathsep | |||||
@feature('jar') | |||||
@after_method('apply_java', 'use_javac_files') | |||||
@before_method('process_source') | |||||
def jar_files(self): | |||||
""" | |||||
Create a jar task. There can be only one jar task by task generator. | |||||
""" | |||||
destfile = getattr(self, 'destfile', 'test.jar') | |||||
jaropts = getattr(self, 'jaropts', []) | |||||
manifest = getattr(self, 'manifest', None) | |||||
basedir = getattr(self, 'basedir', None) | |||||
if basedir: | |||||
if not isinstance(self.basedir, Node.Node): | |||||
basedir = self.path.get_bld().make_node(basedir) | |||||
else: | |||||
basedir = self.path.get_bld() | |||||
if not basedir: | |||||
self.bld.fatal('Could not find the basedir %r for %r' % (self.basedir, self)) | |||||
self.jar_task = tsk = self.create_task('jar_create') | |||||
if manifest: | |||||
jarcreate = getattr(self, 'jarcreate', 'cfm') | |||||
node = self.path.find_node(manifest) | |||||
tsk.dep_nodes.append(node) | |||||
jaropts.insert(0, node.abspath()) | |||||
else: | |||||
jarcreate = getattr(self, 'jarcreate', 'cf') | |||||
if not isinstance(destfile, Node.Node): | |||||
destfile = self.path.find_or_declare(destfile) | |||||
if not destfile: | |||||
self.bld.fatal('invalid destfile %r for %r' % (destfile, self)) | |||||
tsk.set_outputs(destfile) | |||||
tsk.basedir = basedir | |||||
jaropts.append('-C') | |||||
jaropts.append(basedir.bldpath()) | |||||
jaropts.append('.') | |||||
tsk.env['JAROPTS'] = jaropts | |||||
tsk.env['JARCREATE'] = jarcreate | |||||
if getattr(self, 'javac_task', None): | |||||
tsk.set_run_after(self.javac_task) | |||||
@feature('jar') | |||||
@after_method('jar_files') | |||||
def use_jar_files(self): | |||||
""" | |||||
Process the *use* attribute to set the build order on the | |||||
tasks created by another task generator. | |||||
""" | |||||
lst = [] | |||||
self.uselib = self.to_list(getattr(self, 'uselib', [])) | |||||
names = self.to_list(getattr(self, 'use', [])) | |||||
get = self.bld.get_tgen_by_name | |||||
for x in names: | |||||
try: | |||||
y = get(x) | |||||
except Exception: | |||||
self.uselib.append(x) | |||||
else: | |||||
y.post() | |||||
self.jar_task.run_after.update(y.tasks) | |||||
class jar_create(Task.Task): | |||||
""" | |||||
Create a jar file | |||||
""" | |||||
color = 'GREEN' | |||||
run_str = '${JAR} ${JARCREATE} ${TGT} ${JAROPTS}' | |||||
def runnable_status(self): | |||||
""" | |||||
Wait for dependent tasks to be executed, then read the | |||||
files to update the list of inputs. | |||||
""" | |||||
for t in self.run_after: | |||||
if not t.hasrun: | |||||
return Task.ASK_LATER | |||||
if not self.inputs: | |||||
global JAR_RE | |||||
try: | |||||
self.inputs = [x for x in self.basedir.ant_glob(JAR_RE, remove=False) if id(x) != id(self.outputs[0])] | |||||
except Exception: | |||||
raise Errors.WafError('Could not find the basedir %r for %r' % (self.basedir, self)) | |||||
return super(jar_create, self).runnable_status() | |||||
class javac(Task.Task): | |||||
""" | |||||
Compile java files | |||||
""" | |||||
color = 'BLUE' | |||||
vars = ['CLASSPATH', 'JAVACFLAGS', 'JAVAC', 'OUTDIR'] | |||||
""" | |||||
The javac task will be executed again if the variables CLASSPATH, JAVACFLAGS, JAVAC or OUTDIR change. | |||||
""" | |||||
def runnable_status(self): | |||||
""" | |||||
Wait for dependent tasks to be complete, then read the file system to find the input nodes. | |||||
""" | |||||
for t in self.run_after: | |||||
if not t.hasrun: | |||||
return Task.ASK_LATER | |||||
if not self.inputs: | |||||
global SOURCE_RE | |||||
self.inputs = [] | |||||
for x in self.srcdir: | |||||
self.inputs.extend(x.ant_glob(SOURCE_RE, remove=False)) | |||||
return super(javac, self).runnable_status() | |||||
def run(self): | |||||
""" | |||||
Execute the javac compiler | |||||
""" | |||||
env = self.env | |||||
gen = self.generator | |||||
bld = gen.bld | |||||
wd = bld.bldnode.abspath() | |||||
def to_list(xx): | |||||
if isinstance(xx, str): return [xx] | |||||
return xx | |||||
cmd = [] | |||||
cmd.extend(to_list(env['JAVAC'])) | |||||
cmd.extend(['-classpath']) | |||||
cmd.extend(to_list(env['CLASSPATH'])) | |||||
cmd.extend(['-d']) | |||||
cmd.extend(to_list(env['OUTDIR'])) | |||||
cmd.extend(to_list(env['JAVACFLAGS'])) | |||||
files = [a.path_from(bld.bldnode) for a in self.inputs] | |||||
# workaround for command line length limit: | |||||
# http://support.microsoft.com/kb/830473 | |||||
tmp = None | |||||
try: | |||||
if len(str(files)) + len(str(cmd)) > 8192: | |||||
(fd, tmp) = tempfile.mkstemp(dir=bld.bldnode.abspath()) | |||||
try: | |||||
os.write(fd, '\n'.join(files).encode()) | |||||
finally: | |||||
if tmp: | |||||
os.close(fd) | |||||
if Logs.verbose: | |||||
Logs.debug('runner: %r' % (cmd + files)) | |||||
cmd.append('@' + tmp) | |||||
else: | |||||
cmd += files | |||||
ret = self.exec_command(cmd, cwd=wd, env=env.env or None) | |||||
finally: | |||||
if tmp: | |||||
os.remove(tmp) | |||||
return ret | |||||
def post_run(self): | |||||
""" | |||||
""" | |||||
for n in self.generator.outdir.ant_glob('**/*.class'): | |||||
n.sig = Utils.h_file(n.abspath()) # careful with this | |||||
self.generator.bld.task_sigs[self.uid()] = self.cache_sig | |||||
@feature('javadoc') | |||||
@after_method('process_rule') | |||||
def create_javadoc(self): | |||||
""" | |||||
Creates a javadoc task (feature 'javadoc') | |||||
""" | |||||
tsk = self.create_task('javadoc') | |||||
tsk.classpath = getattr(self, 'classpath', []) | |||||
self.javadoc_package = Utils.to_list(self.javadoc_package) | |||||
if not isinstance(self.javadoc_output, Node.Node): | |||||
self.javadoc_output = self.bld.path.find_or_declare(self.javadoc_output) | |||||
class javadoc(Task.Task): | |||||
color = 'BLUE' | |||||
def __str__(self): | |||||
return '%s: %s -> %s\n' % (self.__class__.__name__, self.generator.srcdir, self.generator.javadoc_output) | |||||
def run(self): | |||||
env = self.env | |||||
bld = self.generator.bld | |||||
wd = bld.bldnode.abspath() | |||||
#add src node + bld node (for generated java code) | |||||
srcpath = self.generator.path.abspath() + os.sep + self.generator.srcdir | |||||
srcpath += os.pathsep | |||||
srcpath += self.generator.path.get_bld().abspath() + os.sep + self.generator.srcdir | |||||
classpath = env.CLASSPATH | |||||
classpath += os.pathsep | |||||
classpath += os.pathsep.join(self.classpath) | |||||
classpath = "".join(classpath) | |||||
self.last_cmd = lst = [] | |||||
lst.extend(Utils.to_list(env['JAVADOC'])) | |||||
lst.extend(['-d', self.generator.javadoc_output.abspath()]) | |||||
lst.extend(['-sourcepath', srcpath]) | |||||
lst.extend(['-classpath', classpath]) | |||||
lst.extend(['-subpackages']) | |||||
lst.extend(self.generator.javadoc_package) | |||||
lst = [x for x in lst if x] | |||||
self.generator.bld.cmd_and_log(lst, cwd=wd, env=env.env or None, quiet=0) | |||||
def post_run(self): | |||||
nodes = self.generator.javadoc_output.ant_glob('**') | |||||
for x in nodes: | |||||
x.sig = Utils.h_file(x.abspath()) | |||||
self.generator.bld.task_sigs[self.uid()] = self.cache_sig | |||||
def configure(self): | |||||
""" | |||||
Detect the javac, java and jar programs | |||||
""" | |||||
# If JAVA_PATH is set, we prepend it to the path list | |||||
java_path = self.environ['PATH'].split(os.pathsep) | |||||
v = self.env | |||||
if 'JAVA_HOME' in self.environ: | |||||
java_path = [os.path.join(self.environ['JAVA_HOME'], 'bin')] + java_path | |||||
self.env['JAVA_HOME'] = [self.environ['JAVA_HOME']] | |||||
for x in 'javac java jar javadoc'.split(): | |||||
self.find_program(x, var=x.upper(), path_list=java_path) | |||||
if 'CLASSPATH' in self.environ: | |||||
v['CLASSPATH'] = self.environ['CLASSPATH'] | |||||
if not v['JAR']: self.fatal('jar is required for making java packages') | |||||
if not v['JAVAC']: self.fatal('javac is required for compiling java classes') | |||||
v['JARCREATE'] = 'cf' # can use cvf | |||||
v['JAVACFLAGS'] = [] | |||||
@conf | |||||
def check_java_class(self, classname, with_classpath=None): | |||||
""" | |||||
Check if the specified java class exists | |||||
:param classname: class to check, like java.util.HashMap | |||||
:type classname: string | |||||
:param with_classpath: additional classpath to give | |||||
:type with_classpath: string | |||||
""" | |||||
javatestdir = '.waf-javatest' | |||||
classpath = javatestdir | |||||
if self.env['CLASSPATH']: | |||||
classpath += os.pathsep + self.env['CLASSPATH'] | |||||
if isinstance(with_classpath, str): | |||||
classpath += os.pathsep + with_classpath | |||||
shutil.rmtree(javatestdir, True) | |||||
os.mkdir(javatestdir) | |||||
Utils.writef(os.path.join(javatestdir, 'Test.java'), class_check_source) | |||||
# Compile the source | |||||
self.exec_command(self.env['JAVAC'] + [os.path.join(javatestdir, 'Test.java')], shell=False) | |||||
# Try to run the app | |||||
cmd = self.env['JAVA'] + ['-cp', classpath, 'Test', classname] | |||||
self.to_log("%s\n" % str(cmd)) | |||||
found = self.exec_command(cmd, shell=False) | |||||
self.msg('Checking for java class %s' % classname, not found) | |||||
shutil.rmtree(javatestdir, True) | |||||
return found | |||||
@conf | |||||
def check_jni_headers(conf): | |||||
""" | |||||
Check for jni headers and libraries. On success the conf.env variables xxx_JAVA are added for use in C/C++ targets:: | |||||
def options(opt): | |||||
opt.load('compiler_c') | |||||
def configure(conf): | |||||
conf.load('compiler_c java') | |||||
conf.check_jni_headers() | |||||
def build(bld): | |||||
bld.shlib(source='a.c', target='app', use='JAVA') | |||||
""" | |||||
if not conf.env.CC_NAME and not conf.env.CXX_NAME: | |||||
conf.fatal('load a compiler first (gcc, g++, ..)') | |||||
if not conf.env.JAVA_HOME: | |||||
conf.fatal('set JAVA_HOME in the system environment') | |||||
# jni requires the jvm | |||||
javaHome = conf.env['JAVA_HOME'][0] | |||||
dir = conf.root.find_dir(conf.env.JAVA_HOME[0] + '/include') | |||||
if dir is None: | |||||
dir = conf.root.find_dir(conf.env.JAVA_HOME[0] + '/../Headers') # think different?! | |||||
if dir is None: | |||||
conf.fatal('JAVA_HOME does not seem to be set properly') | |||||
f = dir.ant_glob('**/(jni|jni_md).h') | |||||
incDirs = [x.parent.abspath() for x in f] | |||||
dir = conf.root.find_dir(conf.env.JAVA_HOME[0]) | |||||
f = dir.ant_glob('**/*jvm.(so|dll|dylib)') | |||||
libDirs = [x.parent.abspath() for x in f] or [javaHome] | |||||
# On windows, we need both the .dll and .lib to link. On my JDK, they are | |||||
# in different directories... | |||||
f = dir.ant_glob('**/*jvm.(lib)') | |||||
if f: | |||||
libDirs = [[x, y.parent.abspath()] for x in libDirs for y in f] | |||||
for d in libDirs: | |||||
try: | |||||
conf.check(header_name='jni.h', define_name='HAVE_JNI_H', lib='jvm', | |||||
libpath=d, includes=incDirs, uselib_store='JAVA', uselib='JAVA') | |||||
except Exception: | |||||
pass | |||||
else: | |||||
break | |||||
else: | |||||
conf.fatal('could not find lib jvm in %r (see config.log)' % libDirs) | |||||
@@ -0,0 +1,90 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2006-2010 (ita) | |||||
""" | |||||
Support for the KDE4 libraries and msgfmt | |||||
""" | |||||
import os, sys, re | |||||
from waflib import Options, TaskGen, Task, Utils | |||||
from waflib.TaskGen import feature, after_method | |||||
@feature('msgfmt') | |||||
def apply_msgfmt(self): | |||||
""" | |||||
Process all languages to create .mo files and to install them:: | |||||
def build(bld): | |||||
bld(features='msgfmt', langs='es de fr', appname='myapp', install_path='${KDE4_LOCALE_INSTALL_DIR}') | |||||
""" | |||||
for lang in self.to_list(self.langs): | |||||
node = self.path.find_resource(lang+'.po') | |||||
task = self.create_task('msgfmt', node, node.change_ext('.mo')) | |||||
langname = lang.split('/') | |||||
langname = langname[-1] | |||||
inst = getattr(self, 'install_path', '${KDE4_LOCALE_INSTALL_DIR}') | |||||
self.bld.install_as( | |||||
inst + os.sep + langname + os.sep + 'LC_MESSAGES' + os.sep + getattr(self, 'appname', 'set_your_appname') + '.mo', | |||||
task.outputs[0], | |||||
chmod = getattr(self, 'chmod', Utils.O644)) | |||||
class msgfmt(Task.Task): | |||||
""" | |||||
Transform .po files into .mo files | |||||
""" | |||||
color = 'BLUE' | |||||
run_str = '${MSGFMT} ${SRC} -o ${TGT}' | |||||
def configure(self): | |||||
""" | |||||
Detect kde4-config and set various variables for the *use* system:: | |||||
def options(opt): | |||||
opt.load('compiler_cxx kde4') | |||||
def configure(conf): | |||||
conf.load('compiler_cxx kde4') | |||||
def build(bld): | |||||
bld.program(source='main.c', target='app', use='KDECORE KIO KHTML') | |||||
""" | |||||
kdeconfig = self.find_program('kde4-config') | |||||
prefix = self.cmd_and_log(kdeconfig + ['--prefix']).strip() | |||||
fname = '%s/share/apps/cmake/modules/KDELibsDependencies.cmake' % prefix | |||||
try: os.stat(fname) | |||||
except OSError: | |||||
fname = '%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake' % prefix | |||||
try: os.stat(fname) | |||||
except OSError: self.fatal('could not open %s' % fname) | |||||
try: | |||||
txt = Utils.readf(fname) | |||||
except EnvironmentError: | |||||
self.fatal('could not read %s' % fname) | |||||
txt = txt.replace('\\\n', '\n') | |||||
fu = re.compile('#(.*)\n') | |||||
txt = fu.sub('', txt) | |||||
setregexp = re.compile('([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)') | |||||
found = setregexp.findall(txt) | |||||
for (_, key, val) in found: | |||||
#print key, val | |||||
self.env[key] = val | |||||
# well well, i could just write an interpreter for cmake files | |||||
self.env['LIB_KDECORE']= ['kdecore'] | |||||
self.env['LIB_KDEUI'] = ['kdeui'] | |||||
self.env['LIB_KIO'] = ['kio'] | |||||
self.env['LIB_KHTML'] = ['khtml'] | |||||
self.env['LIB_KPARTS'] = ['kparts'] | |||||
self.env['LIBPATH_KDECORE'] = [os.path.join(self.env.KDE4_LIB_INSTALL_DIR, 'kde4', 'devel'), self.env.KDE4_LIB_INSTALL_DIR] | |||||
self.env['INCLUDES_KDECORE'] = [self.env['KDE4_INCLUDE_INSTALL_DIR']] | |||||
self.env.append_value('INCLUDES_KDECORE', [self.env['KDE4_INCLUDE_INSTALL_DIR']+ os.sep + 'KDE']) | |||||
self.find_program('msgfmt', var='MSGFMT') | |||||
@@ -0,0 +1,59 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Alex Rønne Petersen, 2012 (alexrp/Zor) | |||||
import sys | |||||
from waflib.Tools import ar, d | |||||
from waflib.Configure import conf | |||||
@conf | |||||
def find_ldc2(conf): | |||||
""" | |||||
Find the program *ldc2* and set the variable *D* | |||||
""" | |||||
conf.find_program(['ldc2'], var='D') | |||||
out = conf.cmd_and_log(conf.env.D + ['-version']) | |||||
if out.find("based on DMD v2.") == -1: | |||||
conf.fatal("detected compiler is not ldc2") | |||||
@conf | |||||
def common_flags_ldc2(conf): | |||||
""" | |||||
Set the D flags required by *ldc2* | |||||
""" | |||||
v = conf.env | |||||
v['D_SRC_F'] = ['-c'] | |||||
v['D_TGT_F'] = '-of%s' | |||||
v['D_LINKER'] = v['D'] | |||||
v['DLNK_SRC_F'] = '' | |||||
v['DLNK_TGT_F'] = '-of%s' | |||||
v['DINC_ST'] = '-I%s' | |||||
v['DSHLIB_MARKER'] = v['DSTLIB_MARKER'] = '' | |||||
v['DSTLIB_ST'] = v['DSHLIB_ST'] = '-L-l%s' | |||||
v['DSTLIBPATH_ST'] = v['DLIBPATH_ST'] = '-L-L%s' | |||||
v['LINKFLAGS_dshlib'] = ['-L-shared'] | |||||
v['DHEADER_ext'] = '.di' | |||||
v['DFLAGS_d_with_header'] = ['-H', '-Hf'] | |||||
v['D_HDR_F'] = '%s' | |||||
v['LINKFLAGS'] = [] | |||||
v['DFLAGS_dshlib'] = ['-relocation-model=pic'] | |||||
def configure(conf): | |||||
""" | |||||
Configuration for *ldc2* | |||||
""" | |||||
conf.find_ldc2() | |||||
conf.load('ar') | |||||
conf.load('d') | |||||
conf.common_flags_ldc2() | |||||
conf.d_platform_flags() |
@@ -0,0 +1,38 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Sebastian Schlingmann, 2008 | |||||
# Thomas Nagy, 2008-2010 (ita) | |||||
""" | |||||
Lua support. | |||||
Compile *.lua* files into *.luac*:: | |||||
def configure(conf): | |||||
conf.load('lua') | |||||
conf.env.LUADIR = '/usr/local/share/myapp/scripts/' | |||||
def build(bld): | |||||
bld(source='foo.lua') | |||||
""" | |||||
from waflib.TaskGen import extension | |||||
from waflib import Task, Utils | |||||
@extension('.lua') | |||||
def add_lua(self, node): | |||||
tsk = self.create_task('luac', node, node.change_ext('.luac')) | |||||
inst_to = getattr(self, 'install_path', self.env.LUADIR and '${LUADIR}' or None) | |||||
if inst_to: | |||||
self.bld.install_files(inst_to, tsk.outputs) | |||||
return tsk | |||||
class luac(Task.Task): | |||||
run_str = '${LUAC} -s -o ${TGT} ${SRC}' | |||||
color = 'PINK' | |||||
def configure(conf): | |||||
""" | |||||
Detect the luac compiler and set *conf.env.LUAC* | |||||
""" | |||||
conf.find_program('luac', var='LUAC') | |||||
@@ -0,0 +1,26 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2008-2010 (ita) | |||||
""" | |||||
Nasm tool (asm processing) | |||||
""" | |||||
import os | |||||
import waflib.Tools.asm # leave this | |||||
from waflib.TaskGen import feature | |||||
@feature('asm') | |||||
def apply_nasm_vars(self): | |||||
"""provided for compatibility""" | |||||
self.env.append_value('ASFLAGS', self.to_list(getattr(self, 'nasm_flags', []))) | |||||
def configure(conf): | |||||
""" | |||||
Detect nasm/yasm and set the variable *AS* | |||||
""" | |||||
nasm = conf.find_program(['nasm', 'yasm'], var='AS') | |||||
conf.env.AS_TGT_F = ['-o'] | |||||
conf.env.ASLNK_TGT_F = ['-o'] | |||||
conf.load('asm') | |||||
conf.env.ASMPATH_ST = '-I%s' + os.sep |
@@ -0,0 +1,163 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# andersg at 0x63.nu 2007 | |||||
# Thomas Nagy 2010 (ita) | |||||
""" | |||||
Support for Perl extensions. A C/C++ compiler is required:: | |||||
def options(opt): | |||||
opt.load('compiler_c perl') | |||||
def configure(conf): | |||||
conf.load('compiler_c perl') | |||||
conf.check_perl_version((5,6,0)) | |||||
conf.check_perl_ext_devel() | |||||
conf.check_perl_module('Cairo') | |||||
conf.check_perl_module('Devel::PPPort 4.89') | |||||
def build(bld): | |||||
bld( | |||||
features = 'c cshlib perlext', | |||||
source = 'Mytest.xs', | |||||
target = 'Mytest', | |||||
install_path = '${ARCHDIR_PERL}/auto') | |||||
bld.install_files('${ARCHDIR_PERL}', 'Mytest.pm') | |||||
""" | |||||
import os | |||||
from waflib import Task, Options, Utils | |||||
from waflib.Configure import conf | |||||
from waflib.TaskGen import extension, feature, before_method | |||||
@before_method('apply_incpaths', 'apply_link', 'propagate_uselib_vars') | |||||
@feature('perlext') | |||||
def init_perlext(self): | |||||
""" | |||||
Change the values of *cshlib_PATTERN* and *cxxshlib_PATTERN* to remove the | |||||
*lib* prefix from library names. | |||||
""" | |||||
self.uselib = self.to_list(getattr(self, 'uselib', [])) | |||||
if not 'PERLEXT' in self.uselib: self.uselib.append('PERLEXT') | |||||
self.env['cshlib_PATTERN'] = self.env['cxxshlib_PATTERN'] = self.env['perlext_PATTERN'] | |||||
@extension('.xs') | |||||
def xsubpp_file(self, node): | |||||
""" | |||||
Create :py:class:`waflib.Tools.perl.xsubpp` tasks to process *.xs* files | |||||
""" | |||||
outnode = node.change_ext('.c') | |||||
self.create_task('xsubpp', node, outnode) | |||||
self.source.append(outnode) | |||||
class xsubpp(Task.Task): | |||||
""" | |||||
Process *.xs* files | |||||
""" | |||||
run_str = '${PERL} ${XSUBPP} -noprototypes -typemap ${EXTUTILS_TYPEMAP} ${SRC} > ${TGT}' | |||||
color = 'BLUE' | |||||
ext_out = ['.h'] | |||||
@conf | |||||
def check_perl_version(self, minver=None): | |||||
""" | |||||
Check if Perl is installed, and set the variable PERL. | |||||
minver is supposed to be a tuple | |||||
""" | |||||
res = True | |||||
if minver: | |||||
cver = '.'.join(map(str,minver)) | |||||
else: | |||||
cver = '' | |||||
self.start_msg('Checking for minimum perl version %s' % cver) | |||||
perl = getattr(Options.options, 'perlbinary', None) | |||||
if not perl: | |||||
perl = self.find_program('perl', var='PERL') | |||||
if not perl: | |||||
self.end_msg("Perl not found", color="YELLOW") | |||||
return False | |||||
self.env['PERL'] = perl | |||||
version = self.cmd_and_log(self.env.PERL + ["-e", 'printf \"%vd\", $^V']) | |||||
if not version: | |||||
res = False | |||||
version = "Unknown" | |||||
elif not minver is None: | |||||
ver = tuple(map(int, version.split("."))) | |||||
if ver < minver: | |||||
res = False | |||||
self.end_msg(version, color=res and "GREEN" or "YELLOW") | |||||
return res | |||||
@conf | |||||
def check_perl_module(self, module): | |||||
""" | |||||
Check if specified perlmodule is installed. | |||||
The minimum version can be specified by specifying it after modulename | |||||
like this:: | |||||
def configure(conf): | |||||
conf.check_perl_module("Some::Module 2.92") | |||||
""" | |||||
cmd = self.env.PERL + ['-e', 'use %s' % module] | |||||
self.start_msg('perl module %s' % module) | |||||
try: | |||||
r = self.cmd_and_log(cmd) | |||||
except Exception: | |||||
self.end_msg(False) | |||||
return None | |||||
self.end_msg(r or True) | |||||
return r | |||||
@conf | |||||
def check_perl_ext_devel(self): | |||||
""" | |||||
Check for configuration needed to build perl extensions. | |||||
Sets different xxx_PERLEXT variables in the environment. | |||||
Also sets the ARCHDIR_PERL variable useful as installation path, | |||||
which can be overridden by ``--with-perl-archdir`` option. | |||||
""" | |||||
env = self.env | |||||
perl = env.PERL | |||||
if not perl: | |||||
self.fatal('find perl first') | |||||
def cmd_perl_config(s): | |||||
return perl + ['-MConfig', '-e', 'print \"%s\"' % s] | |||||
def cfg_str(cfg): | |||||
return self.cmd_and_log(cmd_perl_config(cfg)) | |||||
def cfg_lst(cfg): | |||||
return Utils.to_list(cfg_str(cfg)) | |||||
def find_xsubpp(): | |||||
for var in ('privlib', 'vendorlib'): | |||||
xsubpp = cfg_lst('$Config{%s}/ExtUtils/xsubpp$Config{exe_ext}' % var) | |||||
if xsubpp and os.path.isfile(xsubpp[0]): | |||||
return xsubpp | |||||
return self.find_program('xsubpp') | |||||
env['LINKFLAGS_PERLEXT'] = cfg_lst('$Config{lddlflags}') | |||||
env['INCLUDES_PERLEXT'] = cfg_lst('$Config{archlib}/CORE') | |||||
env['CFLAGS_PERLEXT'] = cfg_lst('$Config{ccflags} $Config{cccdlflags}') | |||||
env['EXTUTILS_TYPEMAP'] = cfg_lst('$Config{privlib}/ExtUtils/typemap') | |||||
env['XSUBPP'] = find_xsubpp() | |||||
if not getattr(Options.options, 'perlarchdir', None): | |||||
env['ARCHDIR_PERL'] = cfg_str('$Config{sitearch}') | |||||
else: | |||||
env['ARCHDIR_PERL'] = getattr(Options.options, 'perlarchdir') | |||||
env['perlext_PATTERN'] = '%s.' + cfg_str('$Config{dlext}') | |||||
def options(opt): | |||||
""" | |||||
Add the ``--with-perl-archdir`` and ``--with-perl-binary`` command-line options. | |||||
""" | |||||
opt.add_option('--with-perl-binary', type='string', dest='perlbinary', help = 'Specify alternate perl binary', default=None) | |||||
opt.add_option('--with-perl-archdir', type='string', dest='perlarchdir', help = 'Specify directory where to install arch specific files', default=None) |
@@ -0,0 +1,594 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2007-2015 (ita) | |||||
# Gustavo Carneiro (gjc), 2007 | |||||
""" | |||||
Support for Python, detect the headers and libraries and provide | |||||
*use* variables to link C/C++ programs against them:: | |||||
def options(opt): | |||||
opt.load('compiler_c python') | |||||
def configure(conf): | |||||
conf.load('compiler_c python') | |||||
conf.check_python_version((2,4,2)) | |||||
conf.check_python_headers() | |||||
def build(bld): | |||||
bld.program(features='pyembed', source='a.c', target='myprog') | |||||
bld.shlib(features='pyext', source='b.c', target='mylib') | |||||
""" | |||||
import os, sys | |||||
from waflib import Utils, Options, Errors, Logs, Task, Node | |||||
from waflib.TaskGen import extension, before_method, after_method, feature | |||||
from waflib.Configure import conf | |||||
FRAG = ''' | |||||
#include <Python.h> | |||||
#ifdef __cplusplus | |||||
extern "C" { | |||||
#endif | |||||
void Py_Initialize(void); | |||||
void Py_Finalize(void); | |||||
#ifdef __cplusplus | |||||
} | |||||
#endif | |||||
int main(int argc, char **argv) | |||||
{ | |||||
(void)argc; (void)argv; | |||||
Py_Initialize(); | |||||
Py_Finalize(); | |||||
return 0; | |||||
} | |||||
''' | |||||
""" | |||||
Piece of C/C++ code used in :py:func:`waflib.Tools.python.check_python_headers` | |||||
""" | |||||
INST = ''' | |||||
import sys, py_compile | |||||
py_compile.compile(sys.argv[1], sys.argv[2], sys.argv[3], True) | |||||
''' | |||||
""" | |||||
Piece of Python code used in :py:func:`waflib.Tools.python.pytask` for byte-compiling python files | |||||
""" | |||||
DISTUTILS_IMP = ['from distutils.sysconfig import get_config_var, get_python_lib'] | |||||
@before_method('process_source') | |||||
@feature('py') | |||||
def feature_py(self): | |||||
""" | |||||
Create tasks to byte-compile .py files and install them, if requested | |||||
""" | |||||
self.install_path = getattr(self, 'install_path', '${PYTHONDIR}') | |||||
install_from = getattr(self, 'install_from', None) | |||||
if install_from and not isinstance(install_from, Node.Node): | |||||
install_from = self.path.find_dir(install_from) | |||||
self.install_from = install_from | |||||
ver = self.env.PYTHON_VERSION | |||||
if not ver: | |||||
self.bld.fatal('Installing python files requires PYTHON_VERSION, try conf.check_python_version') | |||||
if int(ver.replace('.', '')) > 31: | |||||
self.install_32 = True | |||||
@extension('.py') | |||||
def process_py(self, node): | |||||
""" | |||||
Add signature of .py file, so it will be byte-compiled when necessary | |||||
""" | |||||
assert(node.get_bld_sig()) | |||||
assert(getattr(self, 'install_path')), 'add features="py"' | |||||
# where to install the python file | |||||
if self.install_path: | |||||
if self.install_from: | |||||
self.bld.install_files(self.install_path, [node], cwd=self.install_from, relative_trick=True) | |||||
else: | |||||
self.bld.install_files(self.install_path, [node], relative_trick=True) | |||||
lst = [] | |||||
if self.env.PYC: | |||||
lst.append('pyc') | |||||
if self.env.PYO: | |||||
lst.append('pyo') | |||||
if self.install_path: | |||||
if self.install_from: | |||||
pyd = Utils.subst_vars("%s/%s" % (self.install_path, node.path_from(self.install_from)), self.env) | |||||
else: | |||||
pyd = Utils.subst_vars("%s/%s" % (self.install_path, node.path_from(self.path)), self.env) | |||||
else: | |||||
pyd = node.abspath() | |||||
for ext in lst: | |||||
if self.env.PYTAG: | |||||
# __pycache__ installation for python 3.2 - PEP 3147 | |||||
name = node.name[:-3] | |||||
pyobj = node.parent.get_bld().make_node('__pycache__').make_node("%s.%s.%s" % (name, self.env.PYTAG, ext)) | |||||
pyobj.parent.mkdir() | |||||
else: | |||||
pyobj = node.change_ext(".%s" % ext) | |||||
tsk = self.create_task(ext, node, pyobj) | |||||
tsk.pyd = pyd | |||||
if self.install_path: | |||||
self.bld.install_files(os.path.dirname(pyd), pyobj, cwd=node.parent.get_bld(), relative_trick=True) | |||||
class pyc(Task.Task): | |||||
""" | |||||
Byte-compiling python files | |||||
""" | |||||
color = 'PINK' | |||||
def run(self): | |||||
cmd = [Utils.subst_vars('${PYTHON}', self.env), '-c', INST, self.inputs[0].abspath(), self.outputs[0].abspath(), self.pyd] | |||||
ret = self.generator.bld.exec_command(cmd) | |||||
return ret | |||||
class pyo(Task.Task): | |||||
""" | |||||
Byte-compiling python files | |||||
""" | |||||
color = 'PINK' | |||||
def run(self): | |||||
cmd = [Utils.subst_vars('${PYTHON}', self.env), Utils.subst_vars('${PYFLAGS_OPT}', self.env), '-c', INST, self.inputs[0].abspath(), self.outputs[0].abspath(), self.pyd] | |||||
ret = self.generator.bld.exec_command(cmd) | |||||
return ret | |||||
@feature('pyext') | |||||
@before_method('propagate_uselib_vars', 'apply_link') | |||||
@after_method('apply_bundle') | |||||
def init_pyext(self): | |||||
""" | |||||
Change the values of *cshlib_PATTERN* and *cxxshlib_PATTERN* to remove the | |||||
*lib* prefix from library names. | |||||
""" | |||||
self.uselib = self.to_list(getattr(self, 'uselib', [])) | |||||
if not 'PYEXT' in self.uselib: | |||||
self.uselib.append('PYEXT') | |||||
# override shlib_PATTERN set by the osx module | |||||
self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.macbundle_PATTERN = self.env.pyext_PATTERN | |||||
self.env.fcshlib_PATTERN = self.env.dshlib_PATTERN = self.env.pyext_PATTERN | |||||
try: | |||||
if not self.install_path: | |||||
return | |||||
except AttributeError: | |||||
self.install_path = '${PYTHONARCHDIR}' | |||||
@feature('pyext') | |||||
@before_method('apply_link', 'apply_bundle') | |||||
def set_bundle(self): | |||||
"""Mac-specific pyext extension that enables bundles from c_osx.py""" | |||||
if Utils.unversioned_sys_platform() == 'darwin': | |||||
self.mac_bundle = True | |||||
@before_method('propagate_uselib_vars') | |||||
@feature('pyembed') | |||||
def init_pyembed(self): | |||||
""" | |||||
Add the PYEMBED variable. | |||||
""" | |||||
self.uselib = self.to_list(getattr(self, 'uselib', [])) | |||||
if not 'PYEMBED' in self.uselib: | |||||
self.uselib.append('PYEMBED') | |||||
@conf | |||||
def get_python_variables(self, variables, imports=None): | |||||
""" | |||||
Spawn a new python process to dump configuration variables | |||||
:param variables: variables to print | |||||
:type variables: list of string | |||||
:param imports: one import by element | |||||
:type imports: list of string | |||||
:return: the variable values | |||||
:rtype: list of string | |||||
""" | |||||
if not imports: | |||||
try: | |||||
imports = self.python_imports | |||||
except AttributeError: | |||||
imports = DISTUTILS_IMP | |||||
program = list(imports) # copy | |||||
program.append('') | |||||
for v in variables: | |||||
program.append("print(repr(%s))" % v) | |||||
os_env = dict(os.environ) | |||||
try: | |||||
del os_env['MACOSX_DEPLOYMENT_TARGET'] # see comments in the OSX tool | |||||
except KeyError: | |||||
pass | |||||
try: | |||||
out = self.cmd_and_log(self.env.PYTHON + ['-c', '\n'.join(program)], env=os_env) | |||||
except Errors.WafError: | |||||
self.fatal('The distutils module is unusable: install "python-devel"?') | |||||
self.to_log(out) | |||||
return_values = [] | |||||
for s in out.splitlines(): | |||||
s = s.strip() | |||||
if not s: | |||||
continue | |||||
if s == 'None': | |||||
return_values.append(None) | |||||
elif (s[0] == "'" and s[-1] == "'") or (s[0] == '"' and s[-1] == '"'): | |||||
return_values.append(eval(s)) | |||||
elif s[0].isdigit(): | |||||
return_values.append(int(s)) | |||||
else: break | |||||
return return_values | |||||
@conf | |||||
def python_cross_compile(self, features='pyembed pyext'): | |||||
""" | |||||
For cross-compilation purposes, it is possible to bypass the normal detection and set the flags that you want: | |||||
PYTHON_VERSION='3.4' PYTAG='cpython34' pyext_PATTERN="%s.so" PYTHON_LDFLAGS='-lpthread -ldl' waf configure | |||||
The following variables are used: | |||||
PYTHON_VERSION required | |||||
PYTAG required | |||||
PYTHON_LDFLAGS required | |||||
pyext_PATTERN required | |||||
PYTHON_PYEXT_LDFLAGS | |||||
PYTHON_PYEMBED_LDFLAGS | |||||
""" | |||||
features = Utils.to_list(features) | |||||
if not ('PYTHON_LDFLAGS' in self.environ or 'PYTHON_PYEXT_LDFLAGS' in self.environ or 'PYTHON_PYEMBED_LDFLAGS' in self.environ): | |||||
return False | |||||
for x in 'PYTHON_VERSION PYTAG pyext_PATTERN'.split(): | |||||
if not x in self.environ: | |||||
self.fatal('Please set %s in the os environment' % x) | |||||
else: | |||||
self.env[x] = self.environ[x] | |||||
xx = self.env.CXX_NAME and 'cxx' or 'c' | |||||
if 'pyext' in features: | |||||
flags = self.environ.get('PYTHON_PYEXT_LDFLAGS', self.environ.get('PYTHON_LDFLAGS', None)) | |||||
if flags is None: | |||||
self.fatal('No flags provided through PYTHON_PYEXT_LDFLAGS as required') | |||||
else: | |||||
self.parse_flags(flags, 'PYEXT') | |||||
self.check(header_name='Python.h', define_name='HAVE_PYEXT', msg='Testing pyext configuration', | |||||
features='%s %sshlib pyext' % (xx, xx), fragment=FRAG, errmsg='Could not build python extensions') | |||||
if 'pyembed' in features: | |||||
flags = self.environ.get('PYTHON_PYEMBED_LDFLAGS', self.environ.get('PYTHON_LDFLAGS', None)) | |||||
if flags is None: | |||||
self.fatal('No flags provided through PYTHON_PYEMBED_LDFLAGS as required') | |||||
else: | |||||
self.parse_flags(flags, 'PYEMBED') | |||||
self.check(header_name='Python.h', define_name='HAVE_PYEMBED', msg='Testing pyembed configuration', | |||||
fragment=FRAG, errmsg='Could not build a python embedded interpreter', features='%s %sprogram pyembed' % (xx, xx)) | |||||
return True | |||||
@conf | |||||
def check_python_headers(conf, features='pyembed pyext'): | |||||
""" | |||||
Check for headers and libraries necessary to extend or embed python by using the module *distutils*. | |||||
On success the environment variables xxx_PYEXT and xxx_PYEMBED are added: | |||||
* PYEXT: for compiling python extensions | |||||
* PYEMBED: for embedding a python interpreter | |||||
""" | |||||
features = Utils.to_list(features) | |||||
assert ('pyembed' in features) or ('pyext' in features), "check_python_headers features must include 'pyembed' and/or 'pyext'" | |||||
env = conf.env | |||||
if not env['CC_NAME'] and not env['CXX_NAME']: | |||||
conf.fatal('load a compiler first (gcc, g++, ..)') | |||||
# bypass all the code below for cross-compilation | |||||
if conf.python_cross_compile(features): | |||||
return | |||||
if not env['PYTHON_VERSION']: | |||||
conf.check_python_version() | |||||
pybin = env.PYTHON | |||||
if not pybin: | |||||
conf.fatal('Could not find the python executable') | |||||
# so we actually do all this for compatibility reasons and for obtaining pyext_PATTERN below | |||||
v = 'prefix SO LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS LDVERSION'.split() | |||||
try: | |||||
lst = conf.get_python_variables(["get_config_var('%s') or ''" % x for x in v]) | |||||
except RuntimeError: | |||||
conf.fatal("Python development headers not found (-v for details).") | |||||
vals = ['%s = %r' % (x, y) for (x, y) in zip(v, lst)] | |||||
conf.to_log("Configuration returned from %r:\n%s\n" % (pybin, '\n'.join(vals))) | |||||
dct = dict(zip(v, lst)) | |||||
x = 'MACOSX_DEPLOYMENT_TARGET' | |||||
if dct[x]: | |||||
env[x] = conf.environ[x] = dct[x] | |||||
env['pyext_PATTERN'] = '%s' + dct['SO'] # not a mistake | |||||
# Try to get pythonX.Y-config | |||||
num = '.'.join(env['PYTHON_VERSION'].split('.')[:2]) | |||||
conf.find_program([''.join(pybin) + '-config', 'python%s-config' % num, 'python-config-%s' % num, 'python%sm-config' % num], var='PYTHON_CONFIG', msg="python-config", mandatory=False) | |||||
if env.PYTHON_CONFIG: | |||||
# python2.6-config requires 3 runs | |||||
all_flags = [['--cflags', '--libs', '--ldflags']] | |||||
if sys.hexversion < 0x2070000: | |||||
all_flags = [[k] for k in all_flags[0]] | |||||
xx = env.CXX_NAME and 'cxx' or 'c' | |||||
if 'pyembed' in features: | |||||
for flags in all_flags: | |||||
conf.check_cfg(msg='Asking python-config for pyembed %r flags' % ' '.join(flags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEMBED', args=flags) | |||||
conf.check(header_name='Python.h', define_name='HAVE_PYEMBED', msg='Getting pyembed flags from python-config', | |||||
fragment=FRAG, errmsg='Could not build a python embedded interpreter', | |||||
features='%s %sprogram pyembed' % (xx, xx)) | |||||
if 'pyext' in features: | |||||
for flags in all_flags: | |||||
conf.check_cfg(msg='Asking python-config for pyext %r flags' % ' '.join(flags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEXT', args=flags) | |||||
conf.check(header_name='Python.h', define_name='HAVE_PYEXT', msg='Getting pyext flags from python-config', | |||||
features='%s %sshlib pyext' % (xx, xx), fragment=FRAG, errmsg='Could not build python extensions') | |||||
conf.define('HAVE_PYTHON_H', 1) | |||||
return | |||||
# No python-config, do something else on windows systems | |||||
all_flags = dct['LDFLAGS'] + ' ' + dct['CFLAGS'] | |||||
conf.parse_flags(all_flags, 'PYEMBED') | |||||
all_flags = dct['LDFLAGS'] + ' ' + dct['LDSHARED'] + ' ' + dct['CFLAGS'] | |||||
conf.parse_flags(all_flags, 'PYEXT') | |||||
result = None | |||||
if not dct["LDVERSION"]: | |||||
dct["LDVERSION"] = env['PYTHON_VERSION'] | |||||
# further simplification will be complicated | |||||
for name in ('python' + dct['LDVERSION'], 'python' + env['PYTHON_VERSION'] + 'm', 'python' + env['PYTHON_VERSION'].replace('.', '')): | |||||
# LIBPATH_PYEMBED is already set; see if it works. | |||||
if not result and env['LIBPATH_PYEMBED']: | |||||
path = env['LIBPATH_PYEMBED'] | |||||
conf.to_log("\n\n# Trying default LIBPATH_PYEMBED: %r\n" % path) | |||||
result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in LIBPATH_PYEMBED' % name) | |||||
if not result and dct['LIBDIR']: | |||||
path = [dct['LIBDIR']] | |||||
conf.to_log("\n\n# try again with -L$python_LIBDIR: %r\n" % path) | |||||
result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in LIBDIR' % name) | |||||
if not result and dct['LIBPL']: | |||||
path = [dct['LIBPL']] | |||||
conf.to_log("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n") | |||||
result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in python_LIBPL' % name) | |||||
if not result: | |||||
path = [os.path.join(dct['prefix'], "libs")] | |||||
conf.to_log("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n") | |||||
result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in $prefix/libs' % name) | |||||
if result: | |||||
break # do not forget to set LIBPATH_PYEMBED | |||||
if result: | |||||
env['LIBPATH_PYEMBED'] = path | |||||
env.append_value('LIB_PYEMBED', [name]) | |||||
else: | |||||
conf.to_log("\n\n### LIB NOT FOUND\n") | |||||
# under certain conditions, python extensions must link to | |||||
# python libraries, not just python embedding programs. | |||||
if Utils.is_win32 or dct['Py_ENABLE_SHARED']: | |||||
env['LIBPATH_PYEXT'] = env['LIBPATH_PYEMBED'] | |||||
env['LIB_PYEXT'] = env['LIB_PYEMBED'] | |||||
conf.to_log("Include path for Python extensions (found via distutils module): %r\n" % (dct['INCLUDEPY'],)) | |||||
env['INCLUDES_PYEXT'] = [dct['INCLUDEPY']] | |||||
env['INCLUDES_PYEMBED'] = [dct['INCLUDEPY']] | |||||
# Code using the Python API needs to be compiled with -fno-strict-aliasing | |||||
if env['CC_NAME'] == 'gcc': | |||||
env.append_value('CFLAGS_PYEMBED', ['-fno-strict-aliasing']) | |||||
env.append_value('CFLAGS_PYEXT', ['-fno-strict-aliasing']) | |||||
if env['CXX_NAME'] == 'gcc': | |||||
env.append_value('CXXFLAGS_PYEMBED', ['-fno-strict-aliasing']) | |||||
env.append_value('CXXFLAGS_PYEXT', ['-fno-strict-aliasing']) | |||||
if env.CC_NAME == "msvc": | |||||
from distutils.msvccompiler import MSVCCompiler | |||||
dist_compiler = MSVCCompiler() | |||||
dist_compiler.initialize() | |||||
env.append_value('CFLAGS_PYEXT', dist_compiler.compile_options) | |||||
env.append_value('CXXFLAGS_PYEXT', dist_compiler.compile_options) | |||||
env.append_value('LINKFLAGS_PYEXT', dist_compiler.ldflags_shared) | |||||
# See if it compiles | |||||
conf.check(header_name='Python.h', define_name='HAVE_PYTHON_H', uselib='PYEMBED', fragment=FRAG, errmsg='Distutils not installed? Broken python installation? Get python-config now!') | |||||
@conf | |||||
def check_python_version(conf, minver=None): | |||||
""" | |||||
Check if the python interpreter is found matching a given minimum version. | |||||
minver should be a tuple, eg. to check for python >= 2.4.2 pass (2,4,2) as minver. | |||||
If successful, PYTHON_VERSION is defined as 'MAJOR.MINOR' | |||||
(eg. '2.4') of the actual python version found, and PYTHONDIR is | |||||
defined, pointing to the site-packages directory appropriate for | |||||
this python version, where modules/packages/extensions should be | |||||
installed. | |||||
:param minver: minimum version | |||||
:type minver: tuple of int | |||||
""" | |||||
assert minver is None or isinstance(minver, tuple) | |||||
pybin = conf.env['PYTHON'] | |||||
if not pybin: | |||||
conf.fatal('could not find the python executable') | |||||
# Get python version string | |||||
cmd = pybin + ['-c', 'import sys\nfor x in sys.version_info: print(str(x))'] | |||||
Logs.debug('python: Running python command %r' % cmd) | |||||
lines = conf.cmd_and_log(cmd).split() | |||||
assert len(lines) == 5, "found %i lines, expected 5: %r" % (len(lines), lines) | |||||
pyver_tuple = (int(lines[0]), int(lines[1]), int(lines[2]), lines[3], int(lines[4])) | |||||
# compare python version with the minimum required | |||||
result = (minver is None) or (pyver_tuple >= minver) | |||||
if result: | |||||
# define useful environment variables | |||||
pyver = '.'.join([str(x) for x in pyver_tuple[:2]]) | |||||
conf.env['PYTHON_VERSION'] = pyver | |||||
if 'PYTHONDIR' in conf.env: | |||||
# Check if --pythondir was specified | |||||
pydir = conf.env['PYTHONDIR'] | |||||
elif 'PYTHONDIR' in conf.environ: | |||||
# Check environment for PYTHONDIR | |||||
pydir = conf.environ['PYTHONDIR'] | |||||
else: | |||||
# Finally, try to guess | |||||
if Utils.is_win32: | |||||
(python_LIBDEST, pydir) = conf.get_python_variables( | |||||
["get_config_var('LIBDEST') or ''", | |||||
"get_python_lib(standard_lib=0) or ''"]) | |||||
else: | |||||
python_LIBDEST = None | |||||
(pydir,) = conf.get_python_variables( ["get_python_lib(standard_lib=0, prefix='%s') or ''" % conf.env['PREFIX']]) | |||||
if python_LIBDEST is None: | |||||
if conf.env['LIBDIR']: | |||||
python_LIBDEST = os.path.join(conf.env['LIBDIR'], "python" + pyver) | |||||
else: | |||||
python_LIBDEST = os.path.join(conf.env['PREFIX'], "lib", "python" + pyver) | |||||
if 'PYTHONARCHDIR' in conf.env: | |||||
# Check if --pythonarchdir was specified | |||||
pyarchdir = conf.env['PYTHONARCHDIR'] | |||||
elif 'PYTHONARCHDIR' in conf.environ: | |||||
# Check environment for PYTHONDIR | |||||
pyarchdir = conf.environ['PYTHONARCHDIR'] | |||||
else: | |||||
# Finally, try to guess | |||||
(pyarchdir, ) = conf.get_python_variables( ["get_python_lib(plat_specific=1, standard_lib=0, prefix='%s') or ''" % conf.env['PREFIX']]) | |||||
if not pyarchdir: | |||||
pyarchdir = pydir | |||||
if hasattr(conf, 'define'): # conf.define is added by the C tool, so may not exist | |||||
conf.define('PYTHONDIR', pydir) | |||||
conf.define('PYTHONARCHDIR', pyarchdir) | |||||
conf.env['PYTHONDIR'] = pydir | |||||
conf.env['PYTHONARCHDIR'] = pyarchdir | |||||
# Feedback | |||||
pyver_full = '.'.join(map(str, pyver_tuple[:3])) | |||||
if minver is None: | |||||
conf.msg('Checking for python version', pyver_full) | |||||
else: | |||||
minver_str = '.'.join(map(str, minver)) | |||||
conf.msg('Checking for python version', pyver_tuple, ">= %s" % (minver_str,) and 'GREEN' or 'YELLOW') | |||||
if not result: | |||||
conf.fatal('The python version is too old, expecting %r' % (minver,)) | |||||
PYTHON_MODULE_TEMPLATE = ''' | |||||
import %s as current_module | |||||
version = getattr(current_module, '__version__', None) | |||||
if version is not None: | |||||
print(str(version)) | |||||
else: | |||||
print('unknown version') | |||||
''' | |||||
@conf | |||||
def check_python_module(conf, module_name, condition=''): | |||||
""" | |||||
Check if the selected python interpreter can import the given python module:: | |||||
def configure(conf): | |||||
conf.check_python_module('pygccxml') | |||||
conf.check_python_module('re', condition="ver > num(2, 0, 4) and ver <= num(3, 0, 0)") | |||||
:param module_name: module | |||||
:type module_name: string | |||||
""" | |||||
msg = "Checking for python module '%s'" % module_name | |||||
if condition: | |||||
msg = '%s (%s)' % (msg, condition) | |||||
conf.start_msg(msg) | |||||
try: | |||||
ret = conf.cmd_and_log(conf.env['PYTHON'] + ['-c', PYTHON_MODULE_TEMPLATE % module_name]) | |||||
except Exception: | |||||
conf.end_msg(False) | |||||
conf.fatal('Could not find the python module %r' % module_name) | |||||
ret = ret.strip() | |||||
if condition: | |||||
conf.end_msg(ret) | |||||
if ret == 'unknown version': | |||||
conf.fatal('Could not check the %s version' % module_name) | |||||
from distutils.version import LooseVersion | |||||
def num(*k): | |||||
if isinstance(k[0], int): | |||||
return LooseVersion('.'.join([str(x) for x in k])) | |||||
else: | |||||
return LooseVersion(k[0]) | |||||
d = {'num': num, 'ver': LooseVersion(ret)} | |||||
ev = eval(condition, {}, d) | |||||
if not ev: | |||||
conf.fatal('The %s version does not satisfy the requirements' % module_name) | |||||
else: | |||||
if ret == 'unknown version': | |||||
conf.end_msg(True) | |||||
else: | |||||
conf.end_msg(ret) | |||||
def configure(conf): | |||||
""" | |||||
Detect the python interpreter | |||||
""" | |||||
v = conf.env | |||||
v['PYTHON'] = Options.options.python or os.environ.get('PYTHON', sys.executable) | |||||
if Options.options.pythondir: | |||||
v['PYTHONDIR'] = Options.options.pythondir | |||||
if Options.options.pythonarchdir: | |||||
v['PYTHONARCHDIR'] = Options.options.pythonarchdir | |||||
conf.find_program('python', var='PYTHON') | |||||
v['PYFLAGS'] = '' | |||||
v['PYFLAGS_OPT'] = '-O' | |||||
v['PYC'] = getattr(Options.options, 'pyc', 1) | |||||
v['PYO'] = getattr(Options.options, 'pyo', 1) | |||||
try: | |||||
v.PYTAG = conf.cmd_and_log(conf.env.PYTHON + ['-c', "import imp;print(imp.get_tag())"]).strip() | |||||
except Errors.WafError: | |||||
pass | |||||
def options(opt): | |||||
""" | |||||
Add python-specific options | |||||
""" | |||||
pyopt=opt.add_option_group("Python Options") | |||||
pyopt.add_option('--nopyc', dest = 'pyc', action='store_false', default=1, | |||||
help = 'Do not install bytecode compiled .pyc files (configuration) [Default:install]') | |||||
pyopt.add_option('--nopyo', dest='pyo', action='store_false', default=1, | |||||
help='Do not install optimised compiled .pyo files (configuration) [Default:install]') | |||||
pyopt.add_option('--python', dest="python", | |||||
help='python binary to be used [Default: %s]' % sys.executable) | |||||
pyopt.add_option('--pythondir', dest='pythondir', | |||||
help='Installation path for python modules (py, platform-independent .py and .pyc files)') | |||||
pyopt.add_option('--pythonarchdir', dest='pythonarchdir', | |||||
help='Installation path for python extension (pyext, platform-dependent .so or .dylib files)') | |||||
@@ -0,0 +1,692 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2006-2010 (ita) | |||||
""" | |||||
Tool Description | |||||
================ | |||||
This tool helps with finding Qt4 tools and libraries, | |||||
and also provides syntactic sugar for using Qt4 tools. | |||||
The following snippet illustrates the tool usage:: | |||||
def options(opt): | |||||
opt.load('compiler_cxx qt4') | |||||
def configure(conf): | |||||
conf.load('compiler_cxx qt4') | |||||
def build(bld): | |||||
bld( | |||||
features = 'qt4 cxx cxxprogram', | |||||
uselib = 'QTCORE QTGUI QTOPENGL QTSVG', | |||||
source = 'main.cpp textures.qrc aboutDialog.ui', | |||||
target = 'window', | |||||
) | |||||
Here, the UI description and resource files will be processed | |||||
to generate code. | |||||
Usage | |||||
===== | |||||
Load the "qt4" tool. | |||||
You also need to edit your sources accordingly: | |||||
- the normal way of doing things is to have your C++ files | |||||
include the .moc file. | |||||
This is regarded as the best practice (and provides much faster | |||||
compilations). | |||||
It also implies that the include paths have beenset properly. | |||||
- to have the include paths added automatically, use the following:: | |||||
from waflib.TaskGen import feature, before_method, after_method | |||||
@feature('cxx') | |||||
@after_method('process_source') | |||||
@before_method('apply_incpaths') | |||||
def add_includes_paths(self): | |||||
incs = set(self.to_list(getattr(self, 'includes', ''))) | |||||
for x in self.compiled_tasks: | |||||
incs.add(x.inputs[0].parent.path_from(self.path)) | |||||
self.includes = list(incs) | |||||
Note: another tool provides Qt processing that does not require | |||||
.moc includes, see 'playground/slow_qt/'. | |||||
A few options (--qt{dir,bin,...}) and environment variables | |||||
(QT4_{ROOT,DIR,MOC,UIC,XCOMPILE}) allow finer tuning of the tool, | |||||
tool path selection, etc; please read the source for more info. | |||||
""" | |||||
try: | |||||
from xml.sax import make_parser | |||||
from xml.sax.handler import ContentHandler | |||||
except ImportError: | |||||
has_xml = False | |||||
ContentHandler = object | |||||
else: | |||||
has_xml = True | |||||
import os, sys | |||||
from waflib.Tools import cxx | |||||
from waflib import Task, Utils, Options, Errors, Context | |||||
from waflib.TaskGen import feature, after_method, extension | |||||
from waflib.Configure import conf | |||||
from waflib import Logs | |||||
MOC_H = ['.h', '.hpp', '.hxx', '.hh'] | |||||
""" | |||||
File extensions associated to the .moc files | |||||
""" | |||||
EXT_RCC = ['.qrc'] | |||||
""" | |||||
File extension for the resource (.qrc) files | |||||
""" | |||||
EXT_UI = ['.ui'] | |||||
""" | |||||
File extension for the user interface (.ui) files | |||||
""" | |||||
EXT_QT4 = ['.cpp', '.cc', '.cxx', '.C'] | |||||
""" | |||||
File extensions of C++ files that may require a .moc processing | |||||
""" | |||||
QT4_LIBS = "QtCore QtGui QtUiTools QtNetwork QtOpenGL QtSql QtSvg QtTest QtXml QtXmlPatterns QtWebKit Qt3Support QtHelp QtScript QtDeclarative QtDesigner" | |||||
class qxx(Task.classes['cxx']): | |||||
""" | |||||
Each C++ file can have zero or several .moc files to create. | |||||
They are known only when the files are scanned (preprocessor) | |||||
To avoid scanning the c++ files each time (parsing C/C++), the results | |||||
are retrieved from the task cache (bld.node_deps/bld.raw_deps). | |||||
The moc tasks are also created *dynamically* during the build. | |||||
""" | |||||
def __init__(self, *k, **kw): | |||||
Task.Task.__init__(self, *k, **kw) | |||||
self.moc_done = 0 | |||||
def runnable_status(self): | |||||
""" | |||||
Compute the task signature to make sure the scanner was executed. Create the | |||||
moc tasks by using :py:meth:`waflib.Tools.qt4.qxx.add_moc_tasks` (if necessary), | |||||
then postpone the task execution (there is no need to recompute the task signature). | |||||
""" | |||||
if self.moc_done: | |||||
return Task.Task.runnable_status(self) | |||||
else: | |||||
for t in self.run_after: | |||||
if not t.hasrun: | |||||
return Task.ASK_LATER | |||||
self.add_moc_tasks() | |||||
return Task.Task.runnable_status(self) | |||||
def create_moc_task(self, h_node, m_node): | |||||
""" | |||||
If several libraries use the same classes, it is possible that moc will run several times (Issue 1318) | |||||
It is not possible to change the file names, but we can assume that the moc transformation will be identical, | |||||
and the moc tasks can be shared in a global cache. | |||||
The defines passed to moc will then depend on task generator order. If this is not acceptable, then | |||||
use the tool slow_qt4 instead (and enjoy the slow builds... :-( ) | |||||
""" | |||||
try: | |||||
moc_cache = self.generator.bld.moc_cache | |||||
except AttributeError: | |||||
moc_cache = self.generator.bld.moc_cache = {} | |||||
try: | |||||
return moc_cache[h_node] | |||||
except KeyError: | |||||
tsk = moc_cache[h_node] = Task.classes['moc'](env=self.env, generator=self.generator) | |||||
tsk.set_inputs(h_node) | |||||
tsk.set_outputs(m_node) | |||||
if self.generator: | |||||
self.generator.tasks.append(tsk) | |||||
# direct injection in the build phase (safe because called from the main thread) | |||||
gen = self.generator.bld.producer | |||||
gen.outstanding.insert(0, tsk) | |||||
gen.total += 1 | |||||
return tsk | |||||
def moc_h_ext(self): | |||||
try: | |||||
ext = Options.options.qt_header_ext.split() | |||||
except AttributeError: | |||||
pass | |||||
if not ext: | |||||
ext = MOC_H | |||||
return ext | |||||
def add_moc_tasks(self): | |||||
""" | |||||
Create the moc tasks by looking in ``bld.raw_deps[self.uid()]`` | |||||
""" | |||||
node = self.inputs[0] | |||||
bld = self.generator.bld | |||||
try: | |||||
# compute the signature once to know if there is a moc file to create | |||||
self.signature() | |||||
except KeyError: | |||||
# the moc file may be referenced somewhere else | |||||
pass | |||||
else: | |||||
# remove the signature, it must be recomputed with the moc task | |||||
delattr(self, 'cache_sig') | |||||
include_nodes = [node.parent] + self.generator.includes_nodes | |||||
moctasks = [] | |||||
mocfiles = set([]) | |||||
for d in bld.raw_deps.get(self.uid(), []): | |||||
if not d.endswith('.moc'): | |||||
continue | |||||
# process that base.moc only once | |||||
if d in mocfiles: | |||||
continue | |||||
mocfiles.add(d) | |||||
# find the source associated with the moc file | |||||
h_node = None | |||||
base2 = d[:-4] | |||||
for x in include_nodes: | |||||
for e in self.moc_h_ext(): | |||||
h_node = x.find_node(base2 + e) | |||||
if h_node: | |||||
break | |||||
if h_node: | |||||
m_node = h_node.change_ext('.moc') | |||||
break | |||||
else: | |||||
# foo.cpp -> foo.cpp.moc | |||||
for k in EXT_QT4: | |||||
if base2.endswith(k): | |||||
for x in include_nodes: | |||||
h_node = x.find_node(base2) | |||||
if h_node: | |||||
break | |||||
if h_node: | |||||
m_node = h_node.change_ext(k + '.moc') | |||||
break | |||||
if not h_node: | |||||
raise Errors.WafError('No source found for %r which is a moc file' % d) | |||||
# create the moc task | |||||
task = self.create_moc_task(h_node, m_node) | |||||
moctasks.append(task) | |||||
# simple scheduler dependency: run the moc task before others | |||||
self.run_after.update(set(moctasks)) | |||||
self.moc_done = 1 | |||||
class trans_update(Task.Task): | |||||
"""Update a .ts files from a list of C++ files""" | |||||
run_str = '${QT_LUPDATE} ${SRC} -ts ${TGT}' | |||||
color = 'BLUE' | |||||
Task.update_outputs(trans_update) | |||||
class XMLHandler(ContentHandler): | |||||
""" | |||||
Parser for *.qrc* files | |||||
""" | |||||
def __init__(self): | |||||
self.buf = [] | |||||
self.files = [] | |||||
def startElement(self, name, attrs): | |||||
if name == 'file': | |||||
self.buf = [] | |||||
def endElement(self, name): | |||||
if name == 'file': | |||||
self.files.append(str(''.join(self.buf))) | |||||
def characters(self, cars): | |||||
self.buf.append(cars) | |||||
@extension(*EXT_RCC) | |||||
def create_rcc_task(self, node): | |||||
"Create rcc and cxx tasks for *.qrc* files" | |||||
rcnode = node.change_ext('_rc.cpp') | |||||
self.create_task('rcc', node, rcnode) | |||||
cpptask = self.create_task('cxx', rcnode, rcnode.change_ext('.o')) | |||||
try: | |||||
self.compiled_tasks.append(cpptask) | |||||
except AttributeError: | |||||
self.compiled_tasks = [cpptask] | |||||
return cpptask | |||||
@extension(*EXT_UI) | |||||
def create_uic_task(self, node): | |||||
"hook for uic tasks" | |||||
uictask = self.create_task('ui4', node) | |||||
uictask.outputs = [self.path.find_or_declare(self.env['ui_PATTERN'] % node.name[:-3])] | |||||
@extension('.ts') | |||||
def add_lang(self, node): | |||||
"""add all the .ts file into self.lang""" | |||||
self.lang = self.to_list(getattr(self, 'lang', [])) + [node] | |||||
@feature('qt4') | |||||
@after_method('apply_link') | |||||
def apply_qt4(self): | |||||
""" | |||||
Add MOC_FLAGS which may be necessary for moc:: | |||||
def build(bld): | |||||
bld.program(features='qt4', source='main.cpp', target='app', use='QTCORE') | |||||
The additional parameters are: | |||||
:param lang: list of translation files (\*.ts) to process | |||||
:type lang: list of :py:class:`waflib.Node.Node` or string without the .ts extension | |||||
:param update: whether to process the C++ files to update the \*.ts files (use **waf --translate**) | |||||
:type update: bool | |||||
:param langname: if given, transform the \*.ts files into a .qrc files to include in the binary file | |||||
:type langname: :py:class:`waflib.Node.Node` or string without the .qrc extension | |||||
""" | |||||
if getattr(self, 'lang', None): | |||||
qmtasks = [] | |||||
for x in self.to_list(self.lang): | |||||
if isinstance(x, str): | |||||
x = self.path.find_resource(x + '.ts') | |||||
qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.qm'))) | |||||
if getattr(self, 'update', None) and Options.options.trans_qt4: | |||||
cxxnodes = [a.inputs[0] for a in self.compiled_tasks] + [ | |||||
a.inputs[0] for a in self.tasks if getattr(a, 'inputs', None) and a.inputs[0].name.endswith('.ui')] | |||||
for x in qmtasks: | |||||
self.create_task('trans_update', cxxnodes, x.inputs) | |||||
if getattr(self, 'langname', None): | |||||
qmnodes = [x.outputs[0] for x in qmtasks] | |||||
rcnode = self.langname | |||||
if isinstance(rcnode, str): | |||||
rcnode = self.path.find_or_declare(rcnode + '.qrc') | |||||
t = self.create_task('qm2rcc', qmnodes, rcnode) | |||||
k = create_rcc_task(self, t.outputs[0]) | |||||
self.link_task.inputs.append(k.outputs[0]) | |||||
lst = [] | |||||
for flag in self.to_list(self.env['CXXFLAGS']): | |||||
if len(flag) < 2: continue | |||||
f = flag[0:2] | |||||
if f in ('-D', '-I', '/D', '/I'): | |||||
if (f[0] == '/'): | |||||
lst.append('-' + flag[1:]) | |||||
else: | |||||
lst.append(flag) | |||||
self.env.append_value('MOC_FLAGS', lst) | |||||
@extension(*EXT_QT4) | |||||
def cxx_hook(self, node): | |||||
""" | |||||
Re-map C++ file extensions to the :py:class:`waflib.Tools.qt4.qxx` task. | |||||
""" | |||||
return self.create_compiled_task('qxx', node) | |||||
class rcc(Task.Task): | |||||
""" | |||||
Process *.qrc* files | |||||
""" | |||||
color = 'BLUE' | |||||
run_str = '${QT_RCC} -name ${tsk.rcname()} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}' | |||||
ext_out = ['.h'] | |||||
def rcname(self): | |||||
return os.path.splitext(self.inputs[0].name)[0] | |||||
def scan(self): | |||||
"""Parse the *.qrc* files""" | |||||
if not has_xml: | |||||
Logs.error('no xml support was found, the rcc dependencies will be incomplete!') | |||||
return ([], []) | |||||
parser = make_parser() | |||||
curHandler = XMLHandler() | |||||
parser.setContentHandler(curHandler) | |||||
fi = open(self.inputs[0].abspath(), 'r') | |||||
try: | |||||
parser.parse(fi) | |||||
finally: | |||||
fi.close() | |||||
nodes = [] | |||||
names = [] | |||||
root = self.inputs[0].parent | |||||
for x in curHandler.files: | |||||
nd = root.find_resource(x) | |||||
if nd: nodes.append(nd) | |||||
else: names.append(x) | |||||
return (nodes, names) | |||||
class moc(Task.Task): | |||||
""" | |||||
Create *.moc* files | |||||
""" | |||||
color = 'BLUE' | |||||
run_str = '${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}' | |||||
def keyword(self): | |||||
return "Creating" | |||||
def __str__(self): | |||||
return self.outputs[0].path_from(self.generator.bld.launch_node()) | |||||
class ui4(Task.Task): | |||||
""" | |||||
Process *.ui* files | |||||
""" | |||||
color = 'BLUE' | |||||
run_str = '${QT_UIC} ${SRC} -o ${TGT}' | |||||
ext_out = ['.h'] | |||||
class ts2qm(Task.Task): | |||||
""" | |||||
Create *.qm* files from *.ts* files | |||||
""" | |||||
color = 'BLUE' | |||||
run_str = '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}' | |||||
class qm2rcc(Task.Task): | |||||
""" | |||||
Transform *.qm* files into *.rc* files | |||||
""" | |||||
color = 'BLUE' | |||||
after = 'ts2qm' | |||||
def run(self): | |||||
"""Create a qrc file including the inputs""" | |||||
txt = '\n'.join(['<file>%s</file>' % k.path_from(self.outputs[0].parent) for k in self.inputs]) | |||||
code = '<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n%s\n</qresource>\n</RCC>' % txt | |||||
self.outputs[0].write(code) | |||||
def configure(self): | |||||
""" | |||||
Besides the configuration options, the environment variable QT4_ROOT may be used | |||||
to give the location of the qt4 libraries (absolute path). | |||||
The detection will use the program *pkg-config* through :py:func:`waflib.Tools.config_c.check_cfg` | |||||
""" | |||||
self.find_qt4_binaries() | |||||
self.set_qt4_libs_to_check() | |||||
self.set_qt4_defines() | |||||
self.find_qt4_libraries() | |||||
self.add_qt4_rpath() | |||||
self.simplify_qt4_libs() | |||||
@conf | |||||
def find_qt4_binaries(self): | |||||
env = self.env | |||||
opt = Options.options | |||||
qtdir = getattr(opt, 'qtdir', '') | |||||
qtbin = getattr(opt, 'qtbin', '') | |||||
paths = [] | |||||
if qtdir: | |||||
qtbin = os.path.join(qtdir, 'bin') | |||||
# the qt directory has been given from QT4_ROOT - deduce the qt binary path | |||||
if not qtdir: | |||||
qtdir = os.environ.get('QT4_ROOT', '') | |||||
qtbin = os.environ.get('QT4_BIN', None) or os.path.join(qtdir, 'bin') | |||||
if qtbin: | |||||
paths = [qtbin] | |||||
# no qtdir, look in the path and in /usr/local/Trolltech | |||||
if not qtdir: | |||||
paths = os.environ.get('PATH', '').split(os.pathsep) | |||||
paths.append('/usr/share/qt4/bin/') | |||||
try: | |||||
lst = Utils.listdir('/usr/local/Trolltech/') | |||||
except OSError: | |||||
pass | |||||
else: | |||||
if lst: | |||||
lst.sort() | |||||
lst.reverse() | |||||
# keep the highest version | |||||
qtdir = '/usr/local/Trolltech/%s/' % lst[0] | |||||
qtbin = os.path.join(qtdir, 'bin') | |||||
paths.append(qtbin) | |||||
# at the end, try to find qmake in the paths given | |||||
# keep the one with the highest version | |||||
cand = None | |||||
prev_ver = ['4', '0', '0'] | |||||
for qmk in ('qmake-qt4', 'qmake4', 'qmake'): | |||||
try: | |||||
qmake = self.find_program(qmk, path_list=paths) | |||||
except self.errors.ConfigurationError: | |||||
pass | |||||
else: | |||||
try: | |||||
version = self.cmd_and_log(qmake + ['-query', 'QT_VERSION']).strip() | |||||
except self.errors.WafError: | |||||
pass | |||||
else: | |||||
if version: | |||||
new_ver = version.split('.') | |||||
if new_ver > prev_ver: | |||||
cand = qmake | |||||
prev_ver = new_ver | |||||
if cand: | |||||
self.env.QMAKE = cand | |||||
else: | |||||
self.fatal('Could not find qmake for qt4') | |||||
qtbin = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_BINS']).strip() + os.sep | |||||
def find_bin(lst, var): | |||||
if var in env: | |||||
return | |||||
for f in lst: | |||||
try: | |||||
ret = self.find_program(f, path_list=paths) | |||||
except self.errors.ConfigurationError: | |||||
pass | |||||
else: | |||||
env[var]=ret | |||||
break | |||||
find_bin(['uic-qt3', 'uic3'], 'QT_UIC3') | |||||
find_bin(['uic-qt4', 'uic'], 'QT_UIC') | |||||
if not env.QT_UIC: | |||||
self.fatal('cannot find the uic compiler for qt4') | |||||
self.start_msg('Checking for uic version') | |||||
uicver = self.cmd_and_log(env.QT_UIC + ["-version"], output=Context.BOTH) | |||||
uicver = ''.join(uicver).strip() | |||||
uicver = uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt', '') | |||||
self.end_msg(uicver) | |||||
if uicver.find(' 3.') != -1: | |||||
self.fatal('this uic compiler is for qt3, add uic for qt4 to your path') | |||||
find_bin(['moc-qt4', 'moc'], 'QT_MOC') | |||||
find_bin(['rcc-qt4', 'rcc'], 'QT_RCC') | |||||
find_bin(['lrelease-qt4', 'lrelease'], 'QT_LRELEASE') | |||||
find_bin(['lupdate-qt4', 'lupdate'], 'QT_LUPDATE') | |||||
env['UIC3_ST']= '%s -o %s' | |||||
env['UIC_ST'] = '%s -o %s' | |||||
env['MOC_ST'] = '-o' | |||||
env['ui_PATTERN'] = 'ui_%s.h' | |||||
env['QT_LRELEASE_FLAGS'] = ['-silent'] | |||||
env.MOCCPPPATH_ST = '-I%s' | |||||
env.MOCDEFINES_ST = '-D%s' | |||||
@conf | |||||
def find_qt4_libraries(self): | |||||
qtlibs = getattr(Options.options, 'qtlibs', None) or os.environ.get("QT4_LIBDIR", None) | |||||
if not qtlibs: | |||||
try: | |||||
qtlibs = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_LIBS']).strip() | |||||
except Errors.WafError: | |||||
qtdir = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_PREFIX']).strip() + os.sep | |||||
qtlibs = os.path.join(qtdir, 'lib') | |||||
self.msg('Found the Qt4 libraries in', qtlibs) | |||||
qtincludes = os.environ.get("QT4_INCLUDES", None) or self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_HEADERS']).strip() | |||||
env = self.env | |||||
if not 'PKG_CONFIG_PATH' in os.environ: | |||||
os.environ['PKG_CONFIG_PATH'] = '%s:%s/pkgconfig:/usr/lib/qt4/lib/pkgconfig:/opt/qt4/lib/pkgconfig:/usr/lib/qt4/lib:/opt/qt4/lib' % (qtlibs, qtlibs) | |||||
try: | |||||
if os.environ.get("QT4_XCOMPILE", None): | |||||
raise self.errors.ConfigurationError() | |||||
self.check_cfg(atleast_pkgconfig_version='0.1') | |||||
except self.errors.ConfigurationError: | |||||
for i in self.qt4_vars: | |||||
uselib = i.upper() | |||||
if Utils.unversioned_sys_platform() == "darwin": | |||||
# Since at least qt 4.7.3 each library locates in separate directory | |||||
frameworkName = i + ".framework" | |||||
qtDynamicLib = os.path.join(qtlibs, frameworkName, i) | |||||
if os.path.exists(qtDynamicLib): | |||||
env.append_unique('FRAMEWORK_' + uselib, i) | |||||
self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN') | |||||
else: | |||||
self.msg('Checking for %s' % i, False, 'YELLOW') | |||||
env.append_unique('INCLUDES_' + uselib, os.path.join(qtlibs, frameworkName, 'Headers')) | |||||
elif env.DEST_OS != "win32": | |||||
qtDynamicLib = os.path.join(qtlibs, "lib" + i + ".so") | |||||
qtStaticLib = os.path.join(qtlibs, "lib" + i + ".a") | |||||
if os.path.exists(qtDynamicLib): | |||||
env.append_unique('LIB_' + uselib, i) | |||||
self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN') | |||||
elif os.path.exists(qtStaticLib): | |||||
env.append_unique('LIB_' + uselib, i) | |||||
self.msg('Checking for %s' % i, qtStaticLib, 'GREEN') | |||||
else: | |||||
self.msg('Checking for %s' % i, False, 'YELLOW') | |||||
env.append_unique('LIBPATH_' + uselib, qtlibs) | |||||
env.append_unique('INCLUDES_' + uselib, qtincludes) | |||||
env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i)) | |||||
else: | |||||
# Release library names are like QtCore4 | |||||
for k in ("lib%s.a", "lib%s4.a", "%s.lib", "%s4.lib"): | |||||
lib = os.path.join(qtlibs, k % i) | |||||
if os.path.exists(lib): | |||||
env.append_unique('LIB_' + uselib, i + k[k.find("%s") + 2 : k.find('.')]) | |||||
self.msg('Checking for %s' % i, lib, 'GREEN') | |||||
break | |||||
else: | |||||
self.msg('Checking for %s' % i, False, 'YELLOW') | |||||
env.append_unique('LIBPATH_' + uselib, qtlibs) | |||||
env.append_unique('INCLUDES_' + uselib, qtincludes) | |||||
env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i)) | |||||
# Debug library names are like QtCore4d | |||||
uselib = i.upper() + "_debug" | |||||
for k in ("lib%sd.a", "lib%sd4.a", "%sd.lib", "%sd4.lib"): | |||||
lib = os.path.join(qtlibs, k % i) | |||||
if os.path.exists(lib): | |||||
env.append_unique('LIB_' + uselib, i + k[k.find("%s") + 2 : k.find('.')]) | |||||
self.msg('Checking for %s' % i, lib, 'GREEN') | |||||
break | |||||
else: | |||||
self.msg('Checking for %s' % i, False, 'YELLOW') | |||||
env.append_unique('LIBPATH_' + uselib, qtlibs) | |||||
env.append_unique('INCLUDES_' + uselib, qtincludes) | |||||
env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i)) | |||||
else: | |||||
for i in self.qt4_vars_debug + self.qt4_vars: | |||||
self.check_cfg(package=i, args='--cflags --libs', mandatory=False) | |||||
@conf | |||||
def simplify_qt4_libs(self): | |||||
# the libpaths make really long command-lines | |||||
# remove the qtcore ones from qtgui, etc | |||||
env = self.env | |||||
def process_lib(vars_, coreval): | |||||
for d in vars_: | |||||
var = d.upper() | |||||
if var == 'QTCORE': | |||||
continue | |||||
value = env['LIBPATH_'+var] | |||||
if value: | |||||
core = env[coreval] | |||||
accu = [] | |||||
for lib in value: | |||||
if lib in core: | |||||
continue | |||||
accu.append(lib) | |||||
env['LIBPATH_'+var] = accu | |||||
process_lib(self.qt4_vars, 'LIBPATH_QTCORE') | |||||
process_lib(self.qt4_vars_debug, 'LIBPATH_QTCORE_DEBUG') | |||||
@conf | |||||
def add_qt4_rpath(self): | |||||
# rpath if wanted | |||||
env = self.env | |||||
if getattr(Options.options, 'want_rpath', False): | |||||
def process_rpath(vars_, coreval): | |||||
for d in vars_: | |||||
var = d.upper() | |||||
value = env['LIBPATH_'+var] | |||||
if value: | |||||
core = env[coreval] | |||||
accu = [] | |||||
for lib in value: | |||||
if var != 'QTCORE': | |||||
if lib in core: | |||||
continue | |||||
accu.append('-Wl,--rpath='+lib) | |||||
env['RPATH_'+var] = accu | |||||
process_rpath(self.qt4_vars, 'LIBPATH_QTCORE') | |||||
process_rpath(self.qt4_vars_debug, 'LIBPATH_QTCORE_DEBUG') | |||||
@conf | |||||
def set_qt4_libs_to_check(self): | |||||
if not hasattr(self, 'qt4_vars'): | |||||
self.qt4_vars = QT4_LIBS | |||||
self.qt4_vars = Utils.to_list(self.qt4_vars) | |||||
if not hasattr(self, 'qt4_vars_debug'): | |||||
self.qt4_vars_debug = [a + '_debug' for a in self.qt4_vars] | |||||
self.qt4_vars_debug = Utils.to_list(self.qt4_vars_debug) | |||||
@conf | |||||
def set_qt4_defines(self): | |||||
if sys.platform != 'win32': | |||||
return | |||||
for x in self.qt4_vars: | |||||
y = x[2:].upper() | |||||
self.env.append_unique('DEFINES_%s' % x.upper(), 'QT_%s_LIB' % y) | |||||
self.env.append_unique('DEFINES_%s_DEBUG' % x.upper(), 'QT_%s_LIB' % y) | |||||
def options(opt): | |||||
""" | |||||
Command-line options | |||||
""" | |||||
opt.add_option('--want-rpath', action='store_true', default=False, dest='want_rpath', help='enable the rpath for qt libraries') | |||||
opt.add_option('--header-ext', | |||||
type='string', | |||||
default='', | |||||
help='header extension for moc files', | |||||
dest='qt_header_ext') | |||||
for i in 'qtdir qtbin qtlibs'.split(): | |||||
opt.add_option('--'+i, type='string', default='', dest=i) | |||||
opt.add_option('--translate', action="store_true", help="collect translation strings", dest="trans_qt4", default=False) | |||||
@@ -0,0 +1,744 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2006-2015 (ita) | |||||
""" | |||||
Tool Description | |||||
================ | |||||
This tool helps with finding Qt5 tools and libraries, | |||||
and also provides syntactic sugar for using Qt5 tools. | |||||
The following snippet illustrates the tool usage:: | |||||
def options(opt): | |||||
opt.load('compiler_cxx qt5') | |||||
def configure(conf): | |||||
conf.load('compiler_cxx qt5') | |||||
def build(bld): | |||||
bld( | |||||
features = 'qt5 cxx cxxprogram', | |||||
uselib = 'QTCORE QTGUI QTOPENGL QTSVG', | |||||
source = 'main.cpp textures.qrc aboutDialog.ui', | |||||
target = 'window', | |||||
) | |||||
Here, the UI description and resource files will be processed | |||||
to generate code. | |||||
Usage | |||||
===== | |||||
Load the "qt5" tool. | |||||
You also need to edit your sources accordingly: | |||||
- the normal way of doing things is to have your C++ files | |||||
include the .moc file. | |||||
This is regarded as the best practice (and provides much faster | |||||
compilations). | |||||
It also implies that the include paths have beenset properly. | |||||
- to have the include paths added automatically, use the following:: | |||||
from waflib.TaskGen import feature, before_method, after_method | |||||
@feature('cxx') | |||||
@after_method('process_source') | |||||
@before_method('apply_incpaths') | |||||
def add_includes_paths(self): | |||||
incs = set(self.to_list(getattr(self, 'includes', ''))) | |||||
for x in self.compiled_tasks: | |||||
incs.add(x.inputs[0].parent.path_from(self.path)) | |||||
self.includes = list(incs) | |||||
Note: another tool provides Qt processing that does not require | |||||
.moc includes, see 'playground/slow_qt/'. | |||||
A few options (--qt{dir,bin,...}) and environment variables | |||||
(QT5_{ROOT,DIR,MOC,UIC,XCOMPILE}) allow finer tuning of the tool, | |||||
tool path selection, etc; please read the source for more info. | |||||
""" | |||||
try: | |||||
from xml.sax import make_parser | |||||
from xml.sax.handler import ContentHandler | |||||
except ImportError: | |||||
has_xml = False | |||||
ContentHandler = object | |||||
else: | |||||
has_xml = True | |||||
import os, sys | |||||
from waflib.Tools import cxx | |||||
from waflib import Task, Utils, Options, Errors, Context | |||||
from waflib.TaskGen import feature, after_method, extension | |||||
from waflib.Configure import conf | |||||
from waflib import Logs | |||||
MOC_H = ['.h', '.hpp', '.hxx', '.hh'] | |||||
""" | |||||
File extensions associated to the .moc files | |||||
""" | |||||
EXT_RCC = ['.qrc'] | |||||
""" | |||||
File extension for the resource (.qrc) files | |||||
""" | |||||
EXT_UI = ['.ui'] | |||||
""" | |||||
File extension for the user interface (.ui) files | |||||
""" | |||||
EXT_QT5 = ['.cpp', '.cc', '.cxx', '.C'] | |||||
""" | |||||
File extensions of C++ files that may require a .moc processing | |||||
""" | |||||
QT5_LIBS = ''' | |||||
qtmain | |||||
Qt5Bluetooth | |||||
Qt5CLucene | |||||
Qt5Concurrent | |||||
Qt5Core | |||||
Qt5DBus | |||||
Qt5Declarative | |||||
Qt5DesignerComponents | |||||
Qt5Designer | |||||
Qt5Gui | |||||
Qt5Help | |||||
Qt5MultimediaQuick_p | |||||
Qt5Multimedia | |||||
Qt5MultimediaWidgets | |||||
Qt5Network | |||||
Qt5Nfc | |||||
Qt5OpenGL | |||||
Qt5Positioning | |||||
Qt5PrintSupport | |||||
Qt5Qml | |||||
Qt5QuickParticles | |||||
Qt5Quick | |||||
Qt5QuickTest | |||||
Qt5Script | |||||
Qt5ScriptTools | |||||
Qt5Sensors | |||||
Qt5SerialPort | |||||
Qt5Sql | |||||
Qt5Svg | |||||
Qt5Test | |||||
Qt5WebKit | |||||
Qt5WebKitWidgets | |||||
Qt5Widgets | |||||
Qt5WinExtras | |||||
Qt5X11Extras | |||||
Qt5XmlPatterns | |||||
Qt5Xml''' | |||||
class qxx(Task.classes['cxx']): | |||||
""" | |||||
Each C++ file can have zero or several .moc files to create. | |||||
They are known only when the files are scanned (preprocessor) | |||||
To avoid scanning the c++ files each time (parsing C/C++), the results | |||||
are retrieved from the task cache (bld.node_deps/bld.raw_deps). | |||||
The moc tasks are also created *dynamically* during the build. | |||||
""" | |||||
def __init__(self, *k, **kw): | |||||
Task.Task.__init__(self, *k, **kw) | |||||
self.moc_done = 0 | |||||
def runnable_status(self): | |||||
""" | |||||
Compute the task signature to make sure the scanner was executed. Create the | |||||
moc tasks by using :py:meth:`waflib.Tools.qt5.qxx.add_moc_tasks` (if necessary), | |||||
then postpone the task execution (there is no need to recompute the task signature). | |||||
""" | |||||
if self.moc_done: | |||||
return Task.Task.runnable_status(self) | |||||
else: | |||||
for t in self.run_after: | |||||
if not t.hasrun: | |||||
return Task.ASK_LATER | |||||
self.add_moc_tasks() | |||||
return Task.Task.runnable_status(self) | |||||
def create_moc_task(self, h_node, m_node): | |||||
""" | |||||
If several libraries use the same classes, it is possible that moc will run several times (Issue 1318) | |||||
It is not possible to change the file names, but we can assume that the moc transformation will be identical, | |||||
and the moc tasks can be shared in a global cache. | |||||
The defines passed to moc will then depend on task generator order. If this is not acceptable, then | |||||
use the tool slow_qt5 instead (and enjoy the slow builds... :-( ) | |||||
""" | |||||
try: | |||||
moc_cache = self.generator.bld.moc_cache | |||||
except AttributeError: | |||||
moc_cache = self.generator.bld.moc_cache = {} | |||||
try: | |||||
return moc_cache[h_node] | |||||
except KeyError: | |||||
tsk = moc_cache[h_node] = Task.classes['moc'](env=self.env, generator=self.generator) | |||||
tsk.set_inputs(h_node) | |||||
tsk.set_outputs(m_node) | |||||
if self.generator: | |||||
self.generator.tasks.append(tsk) | |||||
# direct injection in the build phase (safe because called from the main thread) | |||||
gen = self.generator.bld.producer | |||||
gen.outstanding.insert(0, tsk) | |||||
gen.total += 1 | |||||
return tsk | |||||
else: | |||||
# remove the signature, it must be recomputed with the moc task | |||||
delattr(self, 'cache_sig') | |||||
def moc_h_ext(self): | |||||
try: | |||||
ext = Options.options.qt_header_ext.split() | |||||
except AttributeError: | |||||
pass | |||||
if not ext: | |||||
ext = MOC_H | |||||
return ext | |||||
def add_moc_tasks(self): | |||||
""" | |||||
Create the moc tasks by looking in ``bld.raw_deps[self.uid()]`` | |||||
""" | |||||
node = self.inputs[0] | |||||
bld = self.generator.bld | |||||
try: | |||||
# compute the signature once to know if there is a moc file to create | |||||
self.signature() | |||||
except KeyError: | |||||
# the moc file may be referenced somewhere else | |||||
pass | |||||
else: | |||||
# remove the signature, it must be recomputed with the moc task | |||||
delattr(self, 'cache_sig') | |||||
include_nodes = [node.parent] + self.generator.includes_nodes | |||||
moctasks = [] | |||||
mocfiles = set([]) | |||||
for d in bld.raw_deps.get(self.uid(), []): | |||||
if not d.endswith('.moc'): | |||||
continue | |||||
# process that base.moc only once | |||||
if d in mocfiles: | |||||
continue | |||||
mocfiles.add(d) | |||||
# find the source associated with the moc file | |||||
h_node = None | |||||
base2 = d[:-4] | |||||
for x in include_nodes: | |||||
for e in self.moc_h_ext(): | |||||
h_node = x.find_node(base2 + e) | |||||
if h_node: | |||||
break | |||||
if h_node: | |||||
m_node = h_node.change_ext('.moc') | |||||
break | |||||
else: | |||||
# foo.cpp -> foo.cpp.moc | |||||
for k in EXT_QT5: | |||||
if base2.endswith(k): | |||||
for x in include_nodes: | |||||
h_node = x.find_node(base2) | |||||
if h_node: | |||||
break | |||||
if h_node: | |||||
m_node = h_node.change_ext(k + '.moc') | |||||
break | |||||
if not h_node: | |||||
raise Errors.WafError('No source found for %r which is a moc file' % d) | |||||
# create the moc task | |||||
task = self.create_moc_task(h_node, m_node) | |||||
moctasks.append(task) | |||||
# simple scheduler dependency: run the moc task before others | |||||
self.run_after.update(set(moctasks)) | |||||
self.moc_done = 1 | |||||
class trans_update(Task.Task): | |||||
"""Update a .ts files from a list of C++ files""" | |||||
run_str = '${QT_LUPDATE} ${SRC} -ts ${TGT}' | |||||
color = 'BLUE' | |||||
Task.update_outputs(trans_update) | |||||
class XMLHandler(ContentHandler): | |||||
""" | |||||
Parser for *.qrc* files | |||||
""" | |||||
def __init__(self): | |||||
self.buf = [] | |||||
self.files = [] | |||||
def startElement(self, name, attrs): | |||||
if name == 'file': | |||||
self.buf = [] | |||||
def endElement(self, name): | |||||
if name == 'file': | |||||
self.files.append(str(''.join(self.buf))) | |||||
def characters(self, cars): | |||||
self.buf.append(cars) | |||||
@extension(*EXT_RCC) | |||||
def create_rcc_task(self, node): | |||||
"Create rcc and cxx tasks for *.qrc* files" | |||||
rcnode = node.change_ext('_rc.cpp') | |||||
self.create_task('rcc', node, rcnode) | |||||
cpptask = self.create_task('cxx', rcnode, rcnode.change_ext('.o')) | |||||
try: | |||||
self.compiled_tasks.append(cpptask) | |||||
except AttributeError: | |||||
self.compiled_tasks = [cpptask] | |||||
return cpptask | |||||
@extension(*EXT_UI) | |||||
def create_uic_task(self, node): | |||||
"hook for uic tasks" | |||||
uictask = self.create_task('ui5', node) | |||||
uictask.outputs = [self.path.find_or_declare(self.env['ui_PATTERN'] % node.name[:-3])] | |||||
@extension('.ts') | |||||
def add_lang(self, node): | |||||
"""add all the .ts file into self.lang""" | |||||
self.lang = self.to_list(getattr(self, 'lang', [])) + [node] | |||||
@feature('qt5') | |||||
@after_method('apply_link') | |||||
def apply_qt5(self): | |||||
""" | |||||
Add MOC_FLAGS which may be necessary for moc:: | |||||
def build(bld): | |||||
bld.program(features='qt5', source='main.cpp', target='app', use='QTCORE') | |||||
The additional parameters are: | |||||
:param lang: list of translation files (\*.ts) to process | |||||
:type lang: list of :py:class:`waflib.Node.Node` or string without the .ts extension | |||||
:param update: whether to process the C++ files to update the \*.ts files (use **waf --translate**) | |||||
:type update: bool | |||||
:param langname: if given, transform the \*.ts files into a .qrc files to include in the binary file | |||||
:type langname: :py:class:`waflib.Node.Node` or string without the .qrc extension | |||||
""" | |||||
if getattr(self, 'lang', None): | |||||
qmtasks = [] | |||||
for x in self.to_list(self.lang): | |||||
if isinstance(x, str): | |||||
x = self.path.find_resource(x + '.ts') | |||||
qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.qm'))) | |||||
if getattr(self, 'update', None) and Options.options.trans_qt5: | |||||
cxxnodes = [a.inputs[0] for a in self.compiled_tasks] + [ | |||||
a.inputs[0] for a in self.tasks if getattr(a, 'inputs', None) and a.inputs[0].name.endswith('.ui')] | |||||
for x in qmtasks: | |||||
self.create_task('trans_update', cxxnodes, x.inputs) | |||||
if getattr(self, 'langname', None): | |||||
qmnodes = [x.outputs[0] for x in qmtasks] | |||||
rcnode = self.langname | |||||
if isinstance(rcnode, str): | |||||
rcnode = self.path.find_or_declare(rcnode + '.qrc') | |||||
t = self.create_task('qm2rcc', qmnodes, rcnode) | |||||
k = create_rcc_task(self, t.outputs[0]) | |||||
self.link_task.inputs.append(k.outputs[0]) | |||||
lst = [] | |||||
for flag in self.to_list(self.env['CXXFLAGS']): | |||||
if len(flag) < 2: continue | |||||
f = flag[0:2] | |||||
if f in ('-D', '-I', '/D', '/I'): | |||||
if (f[0] == '/'): | |||||
lst.append('-' + flag[1:]) | |||||
else: | |||||
lst.append(flag) | |||||
self.env.append_value('MOC_FLAGS', lst) | |||||
@extension(*EXT_QT5) | |||||
def cxx_hook(self, node): | |||||
""" | |||||
Re-map C++ file extensions to the :py:class:`waflib.Tools.qt5.qxx` task. | |||||
""" | |||||
return self.create_compiled_task('qxx', node) | |||||
class rcc(Task.Task): | |||||
""" | |||||
Process *.qrc* files | |||||
""" | |||||
color = 'BLUE' | |||||
run_str = '${QT_RCC} -name ${tsk.rcname()} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}' | |||||
ext_out = ['.h'] | |||||
def rcname(self): | |||||
return os.path.splitext(self.inputs[0].name)[0] | |||||
def scan(self): | |||||
"""Parse the *.qrc* files""" | |||||
if not has_xml: | |||||
Logs.error('no xml support was found, the rcc dependencies will be incomplete!') | |||||
return ([], []) | |||||
parser = make_parser() | |||||
curHandler = XMLHandler() | |||||
parser.setContentHandler(curHandler) | |||||
fi = open(self.inputs[0].abspath(), 'r') | |||||
try: | |||||
parser.parse(fi) | |||||
finally: | |||||
fi.close() | |||||
nodes = [] | |||||
names = [] | |||||
root = self.inputs[0].parent | |||||
for x in curHandler.files: | |||||
nd = root.find_resource(x) | |||||
if nd: nodes.append(nd) | |||||
else: names.append(x) | |||||
return (nodes, names) | |||||
class moc(Task.Task): | |||||
""" | |||||
Create *.moc* files | |||||
""" | |||||
color = 'BLUE' | |||||
run_str = '${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}' | |||||
class ui5(Task.Task): | |||||
""" | |||||
Process *.ui* files | |||||
""" | |||||
color = 'BLUE' | |||||
run_str = '${QT_UIC} ${SRC} -o ${TGT}' | |||||
ext_out = ['.h'] | |||||
class ts2qm(Task.Task): | |||||
""" | |||||
Create *.qm* files from *.ts* files | |||||
""" | |||||
color = 'BLUE' | |||||
run_str = '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}' | |||||
class qm2rcc(Task.Task): | |||||
""" | |||||
Transform *.qm* files into *.rc* files | |||||
""" | |||||
color = 'BLUE' | |||||
after = 'ts2qm' | |||||
def run(self): | |||||
"""Create a qrc file including the inputs""" | |||||
txt = '\n'.join(['<file>%s</file>' % k.path_from(self.outputs[0].parent) for k in self.inputs]) | |||||
code = '<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n%s\n</qresource>\n</RCC>' % txt | |||||
self.outputs[0].write(code) | |||||
def configure(self): | |||||
""" | |||||
Besides the configuration options, the environment variable QT5_ROOT may be used | |||||
to give the location of the qt5 libraries (absolute path). | |||||
The detection will use the program *pkg-config* through :py:func:`waflib.Tools.config_c.check_cfg` | |||||
""" | |||||
self.find_qt5_binaries() | |||||
self.set_qt5_libs_to_check() | |||||
self.set_qt5_defines() | |||||
self.find_qt5_libraries() | |||||
self.add_qt5_rpath() | |||||
self.simplify_qt5_libs() | |||||
@conf | |||||
def find_qt5_binaries(self): | |||||
env = self.env | |||||
opt = Options.options | |||||
qtdir = getattr(opt, 'qtdir', '') | |||||
qtbin = getattr(opt, 'qtbin', '') | |||||
paths = [] | |||||
if qtdir: | |||||
qtbin = os.path.join(qtdir, 'bin') | |||||
# the qt directory has been given from QT5_ROOT - deduce the qt binary path | |||||
if not qtdir: | |||||
qtdir = os.environ.get('QT5_ROOT', '') | |||||
qtbin = os.environ.get('QT5_BIN', None) or os.path.join(qtdir, 'bin') | |||||
if qtbin: | |||||
paths = [qtbin] | |||||
# no qtdir, look in the path and in /usr/local/Trolltech | |||||
if not qtdir: | |||||
paths = os.environ.get('PATH', '').split(os.pathsep) | |||||
paths.append('/usr/share/qt5/bin/') | |||||
try: | |||||
lst = Utils.listdir('/usr/local/Trolltech/') | |||||
except OSError: | |||||
pass | |||||
else: | |||||
if lst: | |||||
lst.sort() | |||||
lst.reverse() | |||||
# keep the highest version | |||||
qtdir = '/usr/local/Trolltech/%s/' % lst[0] | |||||
qtbin = os.path.join(qtdir, 'bin') | |||||
paths.append(qtbin) | |||||
# at the end, try to find qmake in the paths given | |||||
# keep the one with the highest version | |||||
cand = None | |||||
prev_ver = ['5', '0', '0'] | |||||
for qmk in ('qmake-qt5', 'qmake5', 'qmake'): | |||||
try: | |||||
qmake = self.find_program(qmk, path_list=paths) | |||||
except self.errors.ConfigurationError: | |||||
pass | |||||
else: | |||||
try: | |||||
version = self.cmd_and_log(qmake + ['-query', 'QT_VERSION']).strip() | |||||
except self.errors.WafError: | |||||
pass | |||||
else: | |||||
if version: | |||||
new_ver = version.split('.') | |||||
if new_ver > prev_ver: | |||||
cand = qmake | |||||
prev_ver = new_ver | |||||
# qmake could not be found easily, rely on qtchooser | |||||
if not cand: | |||||
try: | |||||
self.find_program('qtchooser') | |||||
except self.errors.ConfigurationError: | |||||
pass | |||||
else: | |||||
cmd = self.env.QTCHOOSER + ['-qt=5', '-run-tool=qmake'] | |||||
try: | |||||
version = self.cmd_and_log(cmd + ['-query', 'QT_VERSION']) | |||||
except self.errors.WafError: | |||||
pass | |||||
else: | |||||
cand = cmd | |||||
if cand: | |||||
self.env.QMAKE = cand | |||||
else: | |||||
self.fatal('Could not find qmake for qt5') | |||||
self.env.QT_INSTALL_BINS = qtbin = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_BINS']).strip() + os.sep | |||||
paths.insert(0, qtbin) | |||||
def find_bin(lst, var): | |||||
if var in env: | |||||
return | |||||
for f in lst: | |||||
try: | |||||
ret = self.find_program(f, path_list=paths) | |||||
except self.errors.ConfigurationError: | |||||
pass | |||||
else: | |||||
env[var]=ret | |||||
break | |||||
find_bin(['uic-qt5', 'uic'], 'QT_UIC') | |||||
if not env.QT_UIC: | |||||
self.fatal('cannot find the uic compiler for qt5') | |||||
self.start_msg('Checking for uic version') | |||||
uicver = self.cmd_and_log(env.QT_UIC + ['-version'], output=Context.BOTH) | |||||
uicver = ''.join(uicver).strip() | |||||
uicver = uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt', '') | |||||
self.end_msg(uicver) | |||||
if uicver.find(' 3.') != -1 or uicver.find(' 4.') != -1: | |||||
self.fatal('this uic compiler is for qt3 or qt5, add uic for qt5 to your path') | |||||
find_bin(['moc-qt5', 'moc'], 'QT_MOC') | |||||
find_bin(['rcc-qt5', 'rcc'], 'QT_RCC') | |||||
find_bin(['lrelease-qt5', 'lrelease'], 'QT_LRELEASE') | |||||
find_bin(['lupdate-qt5', 'lupdate'], 'QT_LUPDATE') | |||||
env['UIC_ST'] = '%s -o %s' | |||||
env['MOC_ST'] = '-o' | |||||
env['ui_PATTERN'] = 'ui_%s.h' | |||||
env['QT_LRELEASE_FLAGS'] = ['-silent'] | |||||
env.MOCCPPPATH_ST = '-I%s' | |||||
env.MOCDEFINES_ST = '-D%s' | |||||
@conf | |||||
def find_qt5_libraries(self): | |||||
qtlibs = getattr(Options.options, 'qtlibs', None) or os.environ.get("QT5_LIBDIR", None) | |||||
if not qtlibs: | |||||
try: | |||||
qtlibs = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_LIBS']).strip() | |||||
except Errors.WafError: | |||||
qtdir = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_PREFIX']).strip() + os.sep | |||||
qtlibs = os.path.join(qtdir, 'lib') | |||||
self.msg('Found the Qt5 libraries in', qtlibs) | |||||
qtincludes = os.environ.get("QT5_INCLUDES", None) or self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_HEADERS']).strip() | |||||
env = self.env | |||||
if not 'PKG_CONFIG_PATH' in os.environ: | |||||
os.environ['PKG_CONFIG_PATH'] = '%s:%s/pkgconfig:/usr/lib/qt5/lib/pkgconfig:/opt/qt5/lib/pkgconfig:/usr/lib/qt5/lib:/opt/qt5/lib' % (qtlibs, qtlibs) | |||||
try: | |||||
if os.environ.get("QT5_XCOMPILE", None): | |||||
raise self.errors.ConfigurationError() | |||||
self.check_cfg(atleast_pkgconfig_version='0.1') | |||||
except self.errors.ConfigurationError: | |||||
for i in self.qt5_vars: | |||||
uselib = i.upper() | |||||
if Utils.unversioned_sys_platform() == "darwin": | |||||
# Since at least qt 4.7.3 each library locates in separate directory | |||||
frameworkName = i + ".framework" | |||||
qtDynamicLib = os.path.join(qtlibs, frameworkName, i) | |||||
if os.path.exists(qtDynamicLib): | |||||
env.append_unique('FRAMEWORK_' + uselib, i) | |||||
self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN') | |||||
else: | |||||
self.msg('Checking for %s' % i, False, 'YELLOW') | |||||
env.append_unique('INCLUDES_' + uselib, os.path.join(qtlibs, frameworkName, 'Headers')) | |||||
elif env.DEST_OS != "win32": | |||||
qtDynamicLib = os.path.join(qtlibs, "lib" + i + ".so") | |||||
qtStaticLib = os.path.join(qtlibs, "lib" + i + ".a") | |||||
if os.path.exists(qtDynamicLib): | |||||
env.append_unique('LIB_' + uselib, i) | |||||
self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN') | |||||
elif os.path.exists(qtStaticLib): | |||||
env.append_unique('LIB_' + uselib, i) | |||||
self.msg('Checking for %s' % i, qtStaticLib, 'GREEN') | |||||
else: | |||||
self.msg('Checking for %s' % i, False, 'YELLOW') | |||||
env.append_unique('LIBPATH_' + uselib, qtlibs) | |||||
env.append_unique('INCLUDES_' + uselib, qtincludes) | |||||
env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i)) | |||||
else: | |||||
# Release library names are like QtCore5 | |||||
for k in ("lib%s.a", "lib%s5.a", "%s.lib", "%s5.lib"): | |||||
lib = os.path.join(qtlibs, k % i) | |||||
if os.path.exists(lib): | |||||
env.append_unique('LIB_' + uselib, i + k[k.find("%s") + 2 : k.find('.')]) | |||||
self.msg('Checking for %s' % i, lib, 'GREEN') | |||||
break | |||||
else: | |||||
self.msg('Checking for %s' % i, False, 'YELLOW') | |||||
env.append_unique('LIBPATH_' + uselib, qtlibs) | |||||
env.append_unique('INCLUDES_' + uselib, qtincludes) | |||||
env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i)) | |||||
# Debug library names are like QtCore5d | |||||
uselib = i.upper() + "_debug" | |||||
for k in ("lib%sd.a", "lib%sd5.a", "%sd.lib", "%sd5.lib"): | |||||
lib = os.path.join(qtlibs, k % i) | |||||
if os.path.exists(lib): | |||||
env.append_unique('LIB_' + uselib, i + k[k.find("%s") + 2 : k.find('.')]) | |||||
self.msg('Checking for %s' % i, lib, 'GREEN') | |||||
break | |||||
else: | |||||
self.msg('Checking for %s' % i, False, 'YELLOW') | |||||
env.append_unique('LIBPATH_' + uselib, qtlibs) | |||||
env.append_unique('INCLUDES_' + uselib, qtincludes) | |||||
env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i)) | |||||
else: | |||||
for i in self.qt5_vars_debug + self.qt5_vars: | |||||
self.check_cfg(package=i, args='--cflags --libs', mandatory=False) | |||||
@conf | |||||
def simplify_qt5_libs(self): | |||||
# the libpaths make really long command-lines | |||||
# remove the qtcore ones from qtgui, etc | |||||
env = self.env | |||||
def process_lib(vars_, coreval): | |||||
for d in vars_: | |||||
var = d.upper() | |||||
if var == 'QTCORE': | |||||
continue | |||||
value = env['LIBPATH_'+var] | |||||
if value: | |||||
core = env[coreval] | |||||
accu = [] | |||||
for lib in value: | |||||
if lib in core: | |||||
continue | |||||
accu.append(lib) | |||||
env['LIBPATH_'+var] = accu | |||||
process_lib(self.qt5_vars, 'LIBPATH_QTCORE') | |||||
process_lib(self.qt5_vars_debug, 'LIBPATH_QTCORE_DEBUG') | |||||
@conf | |||||
def add_qt5_rpath(self): | |||||
# rpath if wanted | |||||
env = self.env | |||||
if getattr(Options.options, 'want_rpath', False): | |||||
def process_rpath(vars_, coreval): | |||||
for d in vars_: | |||||
var = d.upper() | |||||
value = env['LIBPATH_'+var] | |||||
if value: | |||||
core = env[coreval] | |||||
accu = [] | |||||
for lib in value: | |||||
if var != 'QTCORE': | |||||
if lib in core: | |||||
continue | |||||
accu.append('-Wl,--rpath='+lib) | |||||
env['RPATH_'+var] = accu | |||||
process_rpath(self.qt5_vars, 'LIBPATH_QTCORE') | |||||
process_rpath(self.qt5_vars_debug, 'LIBPATH_QTCORE_DEBUG') | |||||
@conf | |||||
def set_qt5_libs_to_check(self): | |||||
if not hasattr(self, 'qt5_vars'): | |||||
self.qt5_vars = QT5_LIBS | |||||
self.qt5_vars = Utils.to_list(self.qt5_vars) | |||||
if not hasattr(self, 'qt5_vars_debug'): | |||||
self.qt5_vars_debug = [a + '_debug' for a in self.qt5_vars] | |||||
self.qt5_vars_debug = Utils.to_list(self.qt5_vars_debug) | |||||
@conf | |||||
def set_qt5_defines(self): | |||||
if sys.platform != 'win32': | |||||
return | |||||
for x in self.qt5_vars: | |||||
y = x[2:].upper() | |||||
self.env.append_unique('DEFINES_%s' % x.upper(), 'QT_%s_LIB' % y) | |||||
self.env.append_unique('DEFINES_%s_DEBUG' % x.upper(), 'QT_%s_LIB' % y) | |||||
def options(opt): | |||||
""" | |||||
Command-line options | |||||
""" | |||||
opt.add_option('--want-rpath', action='store_true', default=False, dest='want_rpath', help='enable the rpath for qt libraries') | |||||
opt.add_option('--header-ext', | |||||
type='string', | |||||
default='', | |||||
help='header extension for moc files', | |||||
dest='qt_header_ext') | |||||
for i in 'qtdir qtbin qtlibs'.split(): | |||||
opt.add_option('--'+i, type='string', default='', dest=i) | |||||
opt.add_option('--translate', action="store_true", help="collect translation strings", dest="trans_qt5", default=False) | |||||
@@ -0,0 +1,193 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# daniel.svensson at purplescout.se 2008 | |||||
# Thomas Nagy 2010 (ita) | |||||
""" | |||||
Support for Ruby extensions. A C/C++ compiler is required:: | |||||
def options(opt): | |||||
opt.load('compiler_c ruby') | |||||
def configure(conf): | |||||
conf.load('compiler_c ruby') | |||||
conf.check_ruby_version((1,8,0)) | |||||
conf.check_ruby_ext_devel() | |||||
conf.check_ruby_module('libxml') | |||||
def build(bld): | |||||
bld( | |||||
features = 'c cshlib rubyext', | |||||
source = 'rb_mytest.c', | |||||
target = 'mytest_ext', | |||||
install_path = '${ARCHDIR_RUBY}') | |||||
bld.install_files('${LIBDIR_RUBY}', 'Mytest.rb') | |||||
""" | |||||
import os | |||||
from waflib import Task, Options, Utils | |||||
from waflib.TaskGen import before_method, feature, after_method, Task, extension | |||||
from waflib.Configure import conf | |||||
@feature('rubyext') | |||||
@before_method('apply_incpaths', 'apply_lib_vars', 'apply_bundle', 'apply_link') | |||||
def init_rubyext(self): | |||||
""" | |||||
Add required variables for ruby extensions | |||||
""" | |||||
self.install_path = '${ARCHDIR_RUBY}' | |||||
self.uselib = self.to_list(getattr(self, 'uselib', '')) | |||||
if not 'RUBY' in self.uselib: | |||||
self.uselib.append('RUBY') | |||||
if not 'RUBYEXT' in self.uselib: | |||||
self.uselib.append('RUBYEXT') | |||||
@feature('rubyext') | |||||
@before_method('apply_link', 'propagate_uselib') | |||||
def apply_ruby_so_name(self): | |||||
""" | |||||
Strip the *lib* prefix from ruby extensions | |||||
""" | |||||
self.env['cshlib_PATTERN'] = self.env['cxxshlib_PATTERN'] = self.env['rubyext_PATTERN'] | |||||
@conf | |||||
def check_ruby_version(self, minver=()): | |||||
""" | |||||
Checks if ruby is installed. | |||||
If installed the variable RUBY will be set in environment. | |||||
The ruby binary can be overridden by ``--with-ruby-binary`` command-line option. | |||||
""" | |||||
if Options.options.rubybinary: | |||||
self.env.RUBY = Options.options.rubybinary | |||||
else: | |||||
self.find_program('ruby', var='RUBY') | |||||
ruby = self.env.RUBY | |||||
try: | |||||
version = self.cmd_and_log(ruby + ['-e', 'puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip() | |||||
except Exception: | |||||
self.fatal('could not determine ruby version') | |||||
self.env.RUBY_VERSION = version | |||||
try: | |||||
ver = tuple(map(int, version.split("."))) | |||||
except Exception: | |||||
self.fatal('unsupported ruby version %r' % version) | |||||
cver = '' | |||||
if minver: | |||||
if ver < minver: | |||||
self.fatal('ruby is too old %r' % ver) | |||||
cver = '.'.join([str(x) for x in minver]) | |||||
else: | |||||
cver = ver | |||||
self.msg('Checking for ruby version %s' % str(minver or ''), cver) | |||||
@conf | |||||
def check_ruby_ext_devel(self): | |||||
""" | |||||
Check if a ruby extension can be created | |||||
""" | |||||
if not self.env.RUBY: | |||||
self.fatal('ruby detection is required first') | |||||
if not self.env.CC_NAME and not self.env.CXX_NAME: | |||||
self.fatal('load a c/c++ compiler first') | |||||
version = tuple(map(int, self.env.RUBY_VERSION.split("."))) | |||||
def read_out(cmd): | |||||
return Utils.to_list(self.cmd_and_log(self.env.RUBY + ['-rrbconfig', '-e', cmd])) | |||||
def read_config(key): | |||||
return read_out('puts RbConfig::CONFIG[%r]' % key) | |||||
ruby = self.env['RUBY'] | |||||
archdir = read_config('archdir') | |||||
cpppath = archdir | |||||
if version >= (1, 9, 0): | |||||
ruby_hdrdir = read_config('rubyhdrdir') | |||||
cpppath += ruby_hdrdir | |||||
cpppath += [os.path.join(ruby_hdrdir[0], read_config('arch')[0])] | |||||
self.check(header_name='ruby.h', includes=cpppath, errmsg='could not find ruby header file') | |||||
self.env.LIBPATH_RUBYEXT = read_config('libdir') | |||||
self.env.LIBPATH_RUBYEXT += archdir | |||||
self.env.INCLUDES_RUBYEXT = cpppath | |||||
self.env.CFLAGS_RUBYEXT = read_config('CCDLFLAGS') | |||||
self.env.rubyext_PATTERN = '%s.' + read_config('DLEXT')[0] | |||||
# ok this is really stupid, but the command and flags are combined. | |||||
# so we try to find the first argument... | |||||
flags = read_config('LDSHARED') | |||||
while flags and flags[0][0] != '-': | |||||
flags = flags[1:] | |||||
# we also want to strip out the deprecated ppc flags | |||||
if len(flags) > 1 and flags[1] == "ppc": | |||||
flags = flags[2:] | |||||
self.env.LINKFLAGS_RUBYEXT = flags | |||||
self.env.LINKFLAGS_RUBYEXT += read_config('LIBS') | |||||
self.env.LINKFLAGS_RUBYEXT += read_config('LIBRUBYARG_SHARED') | |||||
if Options.options.rubyarchdir: | |||||
self.env.ARCHDIR_RUBY = Options.options.rubyarchdir | |||||
else: | |||||
self.env.ARCHDIR_RUBY = read_config('sitearchdir')[0] | |||||
if Options.options.rubylibdir: | |||||
self.env.LIBDIR_RUBY = Options.options.rubylibdir | |||||
else: | |||||
self.env.LIBDIR_RUBY = read_config('sitelibdir')[0] | |||||
@conf | |||||
def check_ruby_module(self, module_name): | |||||
""" | |||||
Check if the selected ruby interpreter can require the given ruby module:: | |||||
def configure(conf): | |||||
conf.check_ruby_module('libxml') | |||||
:param module_name: module | |||||
:type module_name: string | |||||
""" | |||||
self.start_msg('Ruby module %s' % module_name) | |||||
try: | |||||
self.cmd_and_log(self.env.RUBY + ['-e', 'require \'%s\';puts 1' % module_name]) | |||||
except Exception: | |||||
self.end_msg(False) | |||||
self.fatal('Could not find the ruby module %r' % module_name) | |||||
self.end_msg(True) | |||||
@extension('.rb') | |||||
def process(self, node): | |||||
tsk = self.create_task('run_ruby', node) | |||||
class run_ruby(Task.Task): | |||||
""" | |||||
Task to run ruby files detected by file extension .rb:: | |||||
def options(opt): | |||||
opt.load('ruby') | |||||
def configure(ctx): | |||||
ctx.check_ruby_version() | |||||
def build(bld): | |||||
bld.env['RBFLAGS'] = '-e puts "hello world"' | |||||
bld(source='a_ruby_file.rb') | |||||
""" | |||||
run_str = '${RUBY} ${RBFLAGS} -I ${SRC[0].parent.abspath()} ${SRC}' | |||||
def options(opt): | |||||
""" | |||||
Add the ``--with-ruby-archdir``, ``--with-ruby-libdir`` and ``--with-ruby-binary`` options | |||||
""" | |||||
opt.add_option('--with-ruby-archdir', type='string', dest='rubyarchdir', help='Specify directory where to install arch specific files') | |||||
opt.add_option('--with-ruby-libdir', type='string', dest='rubylibdir', help='Specify alternate ruby library path') | |||||
opt.add_option('--with-ruby-binary', type='string', dest='rubybinary', help='Specify alternate ruby binary') | |||||
@@ -0,0 +1,71 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2006-2010 (ita) | |||||
# Ralf Habacker, 2006 (rh) | |||||
from waflib import Utils | |||||
from waflib.Tools import ccroot, ar | |||||
from waflib.Configure import conf | |||||
@conf | |||||
def find_scc(conf): | |||||
""" | |||||
Detect the Sun C compiler | |||||
""" | |||||
v = conf.env | |||||
cc = conf.find_program('cc', var='CC') | |||||
try: | |||||
conf.cmd_and_log(cc + ['-flags']) | |||||
except Exception: | |||||
conf.fatal('%r is not a Sun compiler' % cc) | |||||
v.CC_NAME = 'sun' | |||||
conf.get_suncc_version(cc) | |||||
@conf | |||||
def scc_common_flags(conf): | |||||
""" | |||||
Flags required for executing the sun C compiler | |||||
""" | |||||
v = conf.env | |||||
v['CC_SRC_F'] = [] | |||||
v['CC_TGT_F'] = ['-c', '-o'] | |||||
# linker | |||||
if not v['LINK_CC']: v['LINK_CC'] = v['CC'] | |||||
v['CCLNK_SRC_F'] = '' | |||||
v['CCLNK_TGT_F'] = ['-o'] | |||||
v['CPPPATH_ST'] = '-I%s' | |||||
v['DEFINES_ST'] = '-D%s' | |||||
v['LIB_ST'] = '-l%s' # template for adding libs | |||||
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths | |||||
v['STLIB_ST'] = '-l%s' | |||||
v['STLIBPATH_ST'] = '-L%s' | |||||
v['SONAME_ST'] = '-Wl,-h,%s' | |||||
v['SHLIB_MARKER'] = '-Bdynamic' | |||||
v['STLIB_MARKER'] = '-Bstatic' | |||||
# program | |||||
v['cprogram_PATTERN'] = '%s' | |||||
# shared library | |||||
v['CFLAGS_cshlib'] = ['-Kpic', '-DPIC'] | |||||
v['LINKFLAGS_cshlib'] = ['-G'] | |||||
v['cshlib_PATTERN'] = 'lib%s.so' | |||||
# static lib | |||||
v['LINKFLAGS_cstlib'] = ['-Bstatic'] | |||||
v['cstlib_PATTERN'] = 'lib%s.a' | |||||
def configure(conf): | |||||
conf.find_scc() | |||||
conf.find_ar() | |||||
conf.scc_common_flags() | |||||
conf.cc_load_tools() | |||||
conf.cc_add_flags() | |||||
conf.link_add_flags() | |||||
@@ -0,0 +1,69 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2006-2010 (ita) | |||||
# Ralf Habacker, 2006 (rh) | |||||
from waflib import Utils | |||||
from waflib.Tools import ccroot, ar | |||||
from waflib.Configure import conf | |||||
@conf | |||||
def find_sxx(conf): | |||||
""" | |||||
Detect the sun C++ compiler | |||||
""" | |||||
v = conf.env | |||||
cc = conf.find_program(['CC', 'c++'], var='CXX') | |||||
try: | |||||
conf.cmd_and_log(cc + ['-flags']) | |||||
except Exception: | |||||
conf.fatal('%r is not a Sun compiler' % cc) | |||||
v.CXX_NAME = 'sun' | |||||
conf.get_suncc_version(cc) | |||||
@conf | |||||
def sxx_common_flags(conf): | |||||
""" | |||||
Flags required for executing the sun C++ compiler | |||||
""" | |||||
v = conf.env | |||||
v['CXX_SRC_F'] = [] | |||||
v['CXX_TGT_F'] = ['-c', '-o'] | |||||
# linker | |||||
if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX'] | |||||
v['CXXLNK_SRC_F'] = [] | |||||
v['CXXLNK_TGT_F'] = ['-o'] | |||||
v['CPPPATH_ST'] = '-I%s' | |||||
v['DEFINES_ST'] = '-D%s' | |||||
v['LIB_ST'] = '-l%s' # template for adding libs | |||||
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths | |||||
v['STLIB_ST'] = '-l%s' | |||||
v['STLIBPATH_ST'] = '-L%s' | |||||
v['SONAME_ST'] = '-Wl,-h,%s' | |||||
v['SHLIB_MARKER'] = '-Bdynamic' | |||||
v['STLIB_MARKER'] = '-Bstatic' | |||||
# program | |||||
v['cxxprogram_PATTERN'] = '%s' | |||||
# shared library | |||||
v['CXXFLAGS_cxxshlib'] = ['-Kpic', '-DPIC'] | |||||
v['LINKFLAGS_cxxshlib'] = ['-G'] | |||||
v['cxxshlib_PATTERN'] = 'lib%s.so' | |||||
# static lib | |||||
v['LINKFLAGS_cxxstlib'] = ['-Bstatic'] | |||||
v['cxxstlib_PATTERN'] = 'lib%s.a' | |||||
def configure(conf): | |||||
conf.find_sxx() | |||||
conf.find_ar() | |||||
conf.sxx_common_flags() | |||||
conf.cxx_load_tools() | |||||
conf.cxx_add_flags() | |||||
conf.link_add_flags() | |||||
@@ -0,0 +1,515 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2006-2010 (ita) | |||||
""" | |||||
TeX/LaTeX/PDFLaTeX/XeLaTeX support | |||||
Example:: | |||||
def configure(conf): | |||||
conf.load('tex') | |||||
if not conf.env.LATEX: | |||||
conf.fatal('The program LaTex is required') | |||||
def build(bld): | |||||
bld( | |||||
features = 'tex', | |||||
type = 'latex', # pdflatex or xelatex | |||||
source = 'document.ltx', # mandatory, the source | |||||
outs = 'ps', # 'pdf' or 'ps pdf' | |||||
deps = 'crossreferencing.lst', # to give dependencies directly | |||||
prompt = 1, # 0 for the batch mode | |||||
) | |||||
Notes: | |||||
- To configure with a special program, use:: | |||||
$ PDFLATEX=luatex waf configure | |||||
- This tool doesn't use the target attribute of the task generator | |||||
(``bld(target=...)``); the target file name is built from the source | |||||
base name and the out type(s) | |||||
""" | |||||
import os, re | |||||
from waflib import Utils, Task, Errors, Logs, Node | |||||
from waflib.TaskGen import feature, before_method | |||||
re_bibunit = re.compile(r'\\(?P<type>putbib)\[(?P<file>[^\[\]]*)\]',re.M) | |||||
def bibunitscan(self): | |||||
""" | |||||
Parse the inputs and try to find the *bibunit* dependencies | |||||
:return: list of bibunit files | |||||
:rtype: list of :py:class:`waflib.Node.Node` | |||||
""" | |||||
node = self.inputs[0] | |||||
nodes = [] | |||||
if not node: return nodes | |||||
code = node.read() | |||||
for match in re_bibunit.finditer(code): | |||||
path = match.group('file') | |||||
if path: | |||||
for k in ('', '.bib'): | |||||
# add another loop for the tex include paths? | |||||
Logs.debug('tex: trying %s%s' % (path, k)) | |||||
fi = node.parent.find_resource(path + k) | |||||
if fi: | |||||
nodes.append(fi) | |||||
# no break, people are crazy | |||||
else: | |||||
Logs.debug('tex: could not find %s' % path) | |||||
Logs.debug("tex: found the following bibunit files: %s" % nodes) | |||||
return nodes | |||||
exts_deps_tex = ['', '.ltx', '.tex', '.bib', '.pdf', '.png', '.eps', '.ps', '.sty'] | |||||
"""List of typical file extensions included in latex files""" | |||||
exts_tex = ['.ltx', '.tex'] | |||||
"""List of typical file extensions that contain latex""" | |||||
re_tex = re.compile(r'\\(?P<type>usepackage|RequirePackage|include|bibliography([^\[\]{}]*)|putbib|includegraphics|input|import|bringin|lstinputlisting)(\[[^\[\]]*\])?{(?P<file>[^{}]*)}',re.M) | |||||
"""Regexp for expressions that may include latex files""" | |||||
g_bibtex_re = re.compile('bibdata', re.M) | |||||
"""Regexp for bibtex files""" | |||||
g_glossaries_re = re.compile('\\@newglossary', re.M) | |||||
"""Regexp for expressions that create glossaries""" | |||||
class tex(Task.Task): | |||||
""" | |||||
Compile a tex/latex file. | |||||
.. inheritance-diagram:: waflib.Tools.tex.latex waflib.Tools.tex.xelatex waflib.Tools.tex.pdflatex | |||||
""" | |||||
bibtex_fun, _ = Task.compile_fun('${BIBTEX} ${BIBTEXFLAGS} ${SRCFILE}', shell=False) | |||||
bibtex_fun.__doc__ = """ | |||||
Execute the program **bibtex** | |||||
""" | |||||
makeindex_fun, _ = Task.compile_fun('${MAKEINDEX} ${MAKEINDEXFLAGS} ${SRCFILE}', shell=False) | |||||
makeindex_fun.__doc__ = """ | |||||
Execute the program **makeindex** | |||||
""" | |||||
makeglossaries_fun, _ = Task.compile_fun('${MAKEGLOSSARIES} ${SRCFILE}', shell=False) | |||||
makeglossaries_fun.__doc__ = """ | |||||
Execute the program **makeglossaries** | |||||
""" | |||||
def exec_command(self, cmd, **kw): | |||||
""" | |||||
Override :py:meth:`waflib.Task.Task.exec_command` to execute the command without buffering (latex may prompt for inputs) | |||||
:return: the return code | |||||
:rtype: int | |||||
""" | |||||
bld = self.generator.bld | |||||
Logs.info('runner: %r' % cmd) | |||||
try: | |||||
if not kw.get('cwd', None): | |||||
kw['cwd'] = bld.cwd | |||||
except AttributeError: | |||||
bld.cwd = kw['cwd'] = bld.variant_dir | |||||
return Utils.subprocess.Popen(cmd, **kw).wait() | |||||
def scan_aux(self, node): | |||||
""" | |||||
A recursive regex-based scanner that finds included auxiliary files. | |||||
""" | |||||
nodes = [node] | |||||
re_aux = re.compile(r'\\@input{(?P<file>[^{}]*)}', re.M) | |||||
def parse_node(node): | |||||
code = node.read() | |||||
for match in re_aux.finditer(code): | |||||
path = match.group('file') | |||||
found = node.parent.find_or_declare(path) | |||||
if found and found not in nodes: | |||||
Logs.debug('tex: found aux node ' + found.abspath()) | |||||
nodes.append(found) | |||||
parse_node(found) | |||||
parse_node(node) | |||||
return nodes | |||||
def scan(self): | |||||
""" | |||||
A recursive regex-based scanner that finds latex dependencies. It uses :py:attr:`waflib.Tools.tex.re_tex` | |||||
Depending on your needs you might want: | |||||
* to change re_tex:: | |||||
from waflib.Tools import tex | |||||
tex.re_tex = myregex | |||||
* or to change the method scan from the latex tasks:: | |||||
from waflib.Task import classes | |||||
classes['latex'].scan = myscanfunction | |||||
""" | |||||
node = self.inputs[0] | |||||
nodes = [] | |||||
names = [] | |||||
seen = [] | |||||
if not node: return (nodes, names) | |||||
def parse_node(node): | |||||
if node in seen: | |||||
return | |||||
seen.append(node) | |||||
code = node.read() | |||||
global re_tex | |||||
for match in re_tex.finditer(code): | |||||
multibib = match.group('type') | |||||
if multibib and multibib.startswith('bibliography'): | |||||
multibib = multibib[len('bibliography'):] | |||||
if multibib.startswith('style'): | |||||
continue | |||||
else: | |||||
multibib = None | |||||
for path in match.group('file').split(','): | |||||
if path: | |||||
add_name = True | |||||
found = None | |||||
for k in exts_deps_tex: | |||||
# issue 1067, scan in all texinputs folders | |||||
for up in self.texinputs_nodes: | |||||
Logs.debug('tex: trying %s%s' % (path, k)) | |||||
found = up.find_resource(path + k) | |||||
if found: | |||||
break | |||||
for tsk in self.generator.tasks: | |||||
if not found or found in tsk.outputs: | |||||
break | |||||
else: | |||||
nodes.append(found) | |||||
add_name = False | |||||
for ext in exts_tex: | |||||
if found.name.endswith(ext): | |||||
parse_node(found) | |||||
break | |||||
# multibib stuff | |||||
if found and multibib and found.name.endswith('.bib'): | |||||
try: | |||||
self.multibibs.append(found) | |||||
except AttributeError: | |||||
self.multibibs = [found] | |||||
# no break, people are crazy | |||||
if add_name: | |||||
names.append(path) | |||||
parse_node(node) | |||||
for x in nodes: | |||||
x.parent.get_bld().mkdir() | |||||
Logs.debug("tex: found the following : %s and names %s" % (nodes, names)) | |||||
return (nodes, names) | |||||
def check_status(self, msg, retcode): | |||||
""" | |||||
Check an exit status and raise an error with a particular message | |||||
:param msg: message to display if the code is non-zero | |||||
:type msg: string | |||||
:param retcode: condition | |||||
:type retcode: boolean | |||||
""" | |||||
if retcode != 0: | |||||
raise Errors.WafError("%r command exit status %r" % (msg, retcode)) | |||||
def bibfile(self): | |||||
""" | |||||
Parse the *.aux* files to find bibfiles to process. | |||||
If yes, execute :py:meth:`waflib.Tools.tex.tex.bibtex_fun` | |||||
""" | |||||
for aux_node in self.aux_nodes: | |||||
try: | |||||
ct = aux_node.read() | |||||
except EnvironmentError: | |||||
Logs.error('Error reading %s: %r' % aux_node.abspath()) | |||||
continue | |||||
if g_bibtex_re.findall(ct): | |||||
Logs.info('calling bibtex') | |||||
self.env.env = {} | |||||
self.env.env.update(os.environ) | |||||
self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()}) | |||||
self.env.SRCFILE = aux_node.name[:-4] | |||||
self.check_status('error when calling bibtex', self.bibtex_fun()) | |||||
for node in getattr(self, 'multibibs', []): | |||||
self.env.env = {} | |||||
self.env.env.update(os.environ) | |||||
self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()}) | |||||
self.env.SRCFILE = node.name[:-4] | |||||
self.check_status('error when calling bibtex', self.bibtex_fun()) | |||||
def bibunits(self): | |||||
""" | |||||
Parse the *.aux* file to find bibunit files. If there are bibunit files, | |||||
execute :py:meth:`waflib.Tools.tex.tex.bibtex_fun`. | |||||
""" | |||||
try: | |||||
bibunits = bibunitscan(self) | |||||
except OSError: | |||||
Logs.error('error bibunitscan') | |||||
else: | |||||
if bibunits: | |||||
fn = ['bu' + str(i) for i in range(1, len(bibunits) + 1)] | |||||
if fn: | |||||
Logs.info('calling bibtex on bibunits') | |||||
for f in fn: | |||||
self.env.env = {'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()} | |||||
self.env.SRCFILE = f | |||||
self.check_status('error when calling bibtex', self.bibtex_fun()) | |||||
def makeindex(self): | |||||
""" | |||||
Look on the filesystem if there is a *.idx* file to process. If yes, execute | |||||
:py:meth:`waflib.Tools.tex.tex.makeindex_fun` | |||||
""" | |||||
self.idx_node = self.inputs[0].change_ext('.idx') | |||||
try: | |||||
idx_path = self.idx_node.abspath() | |||||
os.stat(idx_path) | |||||
except OSError: | |||||
Logs.info('index file %s absent, not calling makeindex' % idx_path) | |||||
else: | |||||
Logs.info('calling makeindex') | |||||
self.env.SRCFILE = self.idx_node.name | |||||
self.env.env = {} | |||||
self.check_status('error when calling makeindex %s' % idx_path, self.makeindex_fun()) | |||||
def bibtopic(self): | |||||
""" | |||||
Additional .aux files from the bibtopic package | |||||
""" | |||||
p = self.inputs[0].parent.get_bld() | |||||
if os.path.exists(os.path.join(p.abspath(), 'btaux.aux')): | |||||
self.aux_nodes += p.ant_glob('*[0-9].aux') | |||||
def makeglossaries(self): | |||||
src_file = self.inputs[0].abspath() | |||||
base_file = os.path.basename(src_file) | |||||
base, _ = os.path.splitext(base_file) | |||||
for aux_node in self.aux_nodes: | |||||
try: | |||||
ct = aux_node.read() | |||||
except EnvironmentError: | |||||
Logs.error('Error reading %s: %r' % aux_node.abspath()) | |||||
continue | |||||
if g_glossaries_re.findall(ct): | |||||
if not self.env.MAKEGLOSSARIES: | |||||
raise Errors.WafError("The program 'makeglossaries' is missing!") | |||||
Logs.warn('calling makeglossaries') | |||||
self.env.SRCFILE = base | |||||
self.check_status('error when calling makeglossaries %s' % base, self.makeglossaries_fun()) | |||||
return | |||||
def texinputs(self): | |||||
return os.pathsep.join([k.abspath() for k in self.texinputs_nodes]) + os.pathsep | |||||
def run(self): | |||||
""" | |||||
Runs the TeX build process. | |||||
It may require multiple passes, depending on the usage of cross-references, | |||||
bibliographies, content susceptible of needing such passes. | |||||
The appropriate TeX compiler is called until the *.aux* files stop changing. | |||||
Makeindex and bibtex are called if necessary. | |||||
""" | |||||
env = self.env | |||||
if not env['PROMPT_LATEX']: | |||||
env.append_value('LATEXFLAGS', '-interaction=batchmode') | |||||
env.append_value('PDFLATEXFLAGS', '-interaction=batchmode') | |||||
env.append_value('XELATEXFLAGS', '-interaction=batchmode') | |||||
# important, set the cwd for everybody | |||||
self.cwd = self.inputs[0].parent.get_bld().abspath() | |||||
Logs.info('first pass on %s' % self.__class__.__name__) | |||||
# Hash .aux files before even calling the LaTeX compiler | |||||
cur_hash = self.hash_aux_nodes() | |||||
self.call_latex() | |||||
# Find the .aux files again since bibtex processing can require it | |||||
self.hash_aux_nodes() | |||||
self.bibtopic() | |||||
self.bibfile() | |||||
self.bibunits() | |||||
self.makeindex() | |||||
self.makeglossaries() | |||||
for i in range(10): | |||||
# There is no need to call latex again if the .aux hash value has not changed | |||||
prev_hash = cur_hash | |||||
cur_hash = self.hash_aux_nodes() | |||||
if not cur_hash: | |||||
Logs.error('No aux.h to process') | |||||
if cur_hash and cur_hash == prev_hash: | |||||
break | |||||
# run the command | |||||
Logs.info('calling %s' % self.__class__.__name__) | |||||
self.call_latex() | |||||
def hash_aux_nodes(self): | |||||
try: | |||||
nodes = self.aux_nodes | |||||
except AttributeError: | |||||
try: | |||||
self.aux_nodes = self.scan_aux(self.inputs[0].change_ext('.aux')) | |||||
except IOError: | |||||
return None | |||||
return Utils.h_list([Utils.h_file(x.abspath()) for x in self.aux_nodes]) | |||||
def call_latex(self): | |||||
self.env.env = {} | |||||
self.env.env.update(os.environ) | |||||
self.env.env.update({'TEXINPUTS': self.texinputs()}) | |||||
self.env.SRCFILE = self.inputs[0].abspath() | |||||
self.check_status('error when calling latex', self.texfun()) | |||||
class latex(tex): | |||||
texfun, vars = Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}', shell=False) | |||||
class pdflatex(tex): | |||||
texfun, vars = Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}', shell=False) | |||||
class xelatex(tex): | |||||
texfun, vars = Task.compile_fun('${XELATEX} ${XELATEXFLAGS} ${SRCFILE}', shell=False) | |||||
class dvips(Task.Task): | |||||
run_str = '${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}' | |||||
color = 'BLUE' | |||||
after = ['latex', 'pdflatex', 'xelatex'] | |||||
class dvipdf(Task.Task): | |||||
run_str = '${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}' | |||||
color = 'BLUE' | |||||
after = ['latex', 'pdflatex', 'xelatex'] | |||||
class pdf2ps(Task.Task): | |||||
run_str = '${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}' | |||||
color = 'BLUE' | |||||
after = ['latex', 'pdflatex', 'xelatex'] | |||||
@feature('tex') | |||||
@before_method('process_source') | |||||
def apply_tex(self): | |||||
""" | |||||
Create :py:class:`waflib.Tools.tex.tex` objects, and dvips/dvipdf/pdf2ps tasks if necessary (outs='ps', etc). | |||||
""" | |||||
if not getattr(self, 'type', None) in ('latex', 'pdflatex', 'xelatex'): | |||||
self.type = 'pdflatex' | |||||
tree = self.bld | |||||
outs = Utils.to_list(getattr(self, 'outs', [])) | |||||
# prompt for incomplete files (else the batchmode is used) | |||||
self.env['PROMPT_LATEX'] = getattr(self, 'prompt', 1) | |||||
deps_lst = [] | |||||
if getattr(self, 'deps', None): | |||||
deps = self.to_list(self.deps) | |||||
for dep in deps: | |||||
if isinstance(dep, str): | |||||
n = self.path.find_resource(dep) | |||||
if not n: | |||||
self.bld.fatal('Could not find %r for %r' % (dep, self)) | |||||
if not n in deps_lst: | |||||
deps_lst.append(n) | |||||
elif isinstance(dep, Node.Node): | |||||
deps_lst.append(dep) | |||||
for node in self.to_nodes(self.source): | |||||
if self.type == 'latex': | |||||
task = self.create_task('latex', node, node.change_ext('.dvi')) | |||||
elif self.type == 'pdflatex': | |||||
task = self.create_task('pdflatex', node, node.change_ext('.pdf')) | |||||
elif self.type == 'xelatex': | |||||
task = self.create_task('xelatex', node, node.change_ext('.pdf')) | |||||
task.env = self.env | |||||
# add the manual dependencies | |||||
if deps_lst: | |||||
for n in deps_lst: | |||||
if not n in task.dep_nodes: | |||||
task.dep_nodes.append(n) | |||||
# texinputs is a nasty beast | |||||
if hasattr(self, 'texinputs_nodes'): | |||||
task.texinputs_nodes = self.texinputs_nodes | |||||
else: | |||||
task.texinputs_nodes = [node.parent, node.parent.get_bld(), self.path, self.path.get_bld()] | |||||
lst = os.environ.get('TEXINPUTS', '') | |||||
if self.env.TEXINPUTS: | |||||
lst += os.pathsep + self.env.TEXINPUTS | |||||
if lst: | |||||
lst = lst.split(os.pathsep) | |||||
for x in lst: | |||||
if x: | |||||
if os.path.isabs(x): | |||||
p = self.bld.root.find_node(x) | |||||
if p: | |||||
task.texinputs_nodes.append(p) | |||||
else: | |||||
Logs.error('Invalid TEXINPUTS folder %s' % x) | |||||
else: | |||||
Logs.error('Cannot resolve relative paths in TEXINPUTS %s' % x) | |||||
if self.type == 'latex': | |||||
if 'ps' in outs: | |||||
tsk = self.create_task('dvips', task.outputs, node.change_ext('.ps')) | |||||
tsk.env.env = dict(os.environ) | |||||
if 'pdf' in outs: | |||||
tsk = self.create_task('dvipdf', task.outputs, node.change_ext('.pdf')) | |||||
tsk.env.env = dict(os.environ) | |||||
elif self.type == 'pdflatex': | |||||
if 'ps' in outs: | |||||
self.create_task('pdf2ps', task.outputs, node.change_ext('.ps')) | |||||
self.source = [] | |||||
def configure(self): | |||||
""" | |||||
Try to find the programs tex, latex and others. Do not raise any error if they | |||||
are not found. | |||||
""" | |||||
v = self.env | |||||
for p in 'tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps makeglossaries'.split(): | |||||
try: | |||||
self.find_program(p, var=p.upper()) | |||||
except self.errors.ConfigurationError: | |||||
pass | |||||
v['DVIPSFLAGS'] = '-Ppdf' | |||||
@@ -0,0 +1,335 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Ali Sabil, 2007 | |||||
# Radosław Szkodziński, 2010 | |||||
""" | |||||
At this point, vala is still unstable, so do not expect | |||||
this tool to be too stable either (apis, etc) | |||||
""" | |||||
import os.path, shutil, re | |||||
from waflib import Context, Task, Utils, Logs, Options, Errors | |||||
from waflib.TaskGen import extension, taskgen_method | |||||
from waflib.Configure import conf | |||||
class valac(Task.Task): | |||||
""" | |||||
Task to compile vala files. | |||||
""" | |||||
#run_str = "${VALAC} ${VALAFLAGS}" # ideally | |||||
#vars = ['VALAC_VERSION'] | |||||
vars = ["VALAC", "VALAC_VERSION", "VALAFLAGS"] | |||||
ext_out = ['.h'] | |||||
def run(self): | |||||
cmd = self.env.VALAC + self.env.VALAFLAGS | |||||
cmd.extend([a.abspath() for a in self.inputs]) | |||||
ret = self.exec_command(cmd, cwd=self.outputs[0].parent.abspath()) | |||||
if ret: | |||||
return ret | |||||
for x in self.outputs: | |||||
if id(x.parent) != id(self.outputs[0].parent): | |||||
shutil.move(self.outputs[0].parent.abspath() + os.sep + x.name, x.abspath()) | |||||
if self.generator.dump_deps_node: | |||||
self.generator.dump_deps_node.write('\n'.join(self.generator.packages)) | |||||
return ret | |||||
valac = Task.update_outputs(valac) # no decorators for python2 classes | |||||
@taskgen_method | |||||
def init_vala_task(self): | |||||
""" | |||||
Initializes the vala task with the relevant data (acts as a constructor) | |||||
""" | |||||
self.profile = getattr(self, 'profile', 'gobject') | |||||
if self.profile == 'gobject': | |||||
self.uselib = Utils.to_list(getattr(self, 'uselib', [])) | |||||
if not 'GOBJECT' in self.uselib: | |||||
self.uselib.append('GOBJECT') | |||||
def addflags(flags): | |||||
self.env.append_value('VALAFLAGS', flags) | |||||
if self.profile: | |||||
addflags('--profile=%s' % self.profile) | |||||
if hasattr(self, 'threading'): | |||||
if self.profile == 'gobject': | |||||
if not 'GTHREAD' in self.uselib: | |||||
self.uselib.append('GTHREAD') | |||||
else: | |||||
#Vala doesn't have threading support for dova nor posix | |||||
Logs.warn("Profile %s means no threading support" % self.profile) | |||||
self.threading = False | |||||
if self.threading: | |||||
addflags('--threading') | |||||
valatask = self.valatask | |||||
self.is_lib = 'cprogram' not in self.features | |||||
if self.is_lib: | |||||
addflags('--library=%s' % self.target) | |||||
h_node = self.path.find_or_declare('%s.h' % self.target) | |||||
valatask.outputs.append(h_node) | |||||
addflags('--header=%s' % h_node.name) | |||||
valatask.outputs.append(self.path.find_or_declare('%s.vapi' % self.target)) | |||||
if getattr(self, 'gir', None): | |||||
gir_node = self.path.find_or_declare('%s.gir' % self.gir) | |||||
addflags('--gir=%s' % gir_node.name) | |||||
valatask.outputs.append(gir_node) | |||||
self.vala_target_glib = getattr(self, 'vala_target_glib', getattr(Options.options, 'vala_target_glib', None)) | |||||
if self.vala_target_glib: | |||||
addflags('--target-glib=%s' % self.vala_target_glib) | |||||
addflags(['--define=%s' % x for x in getattr(self, 'vala_defines', [])]) | |||||
packages_private = Utils.to_list(getattr(self, 'packages_private', [])) | |||||
addflags(['--pkg=%s' % x for x in packages_private]) | |||||
def _get_api_version(): | |||||
api_version = '1.0' | |||||
if hasattr(Context.g_module, 'API_VERSION'): | |||||
version = Context.g_module.API_VERSION.split(".") | |||||
if version[0] == "0": | |||||
api_version = "0." + version[1] | |||||
else: | |||||
api_version = version[0] + ".0" | |||||
return api_version | |||||
self.includes = Utils.to_list(getattr(self, 'includes', [])) | |||||
self.uselib = self.to_list(getattr(self, 'uselib', [])) | |||||
valatask.install_path = getattr(self, 'install_path', '') | |||||
valatask.vapi_path = getattr(self, 'vapi_path', '${DATAROOTDIR}/vala/vapi') | |||||
valatask.pkg_name = getattr(self, 'pkg_name', self.env['PACKAGE']) | |||||
valatask.header_path = getattr(self, 'header_path', '${INCLUDEDIR}/%s-%s' % (valatask.pkg_name, _get_api_version())) | |||||
valatask.install_binding = getattr(self, 'install_binding', True) | |||||
self.packages = packages = Utils.to_list(getattr(self, 'packages', [])) | |||||
self.vapi_dirs = vapi_dirs = Utils.to_list(getattr(self, 'vapi_dirs', [])) | |||||
includes = [] | |||||
if hasattr(self, 'use'): | |||||
local_packages = Utils.to_list(self.use)[:] # make sure to have a copy | |||||
seen = [] | |||||
while len(local_packages) > 0: | |||||
package = local_packages.pop() | |||||
if package in seen: | |||||
continue | |||||
seen.append(package) | |||||
# check if the package exists | |||||
try: | |||||
package_obj = self.bld.get_tgen_by_name(package) | |||||
except Errors.WafError: | |||||
continue | |||||
package_name = package_obj.target | |||||
package_node = package_obj.path | |||||
package_dir = package_node.path_from(self.path) | |||||
for task in package_obj.tasks: | |||||
for output in task.outputs: | |||||
if output.name == package_name + ".vapi": | |||||
valatask.set_run_after(task) | |||||
if package_name not in packages: | |||||
packages.append(package_name) | |||||
if package_dir not in vapi_dirs: | |||||
vapi_dirs.append(package_dir) | |||||
if package_dir not in includes: | |||||
includes.append(package_dir) | |||||
if hasattr(package_obj, 'use'): | |||||
lst = self.to_list(package_obj.use) | |||||
lst.reverse() | |||||
local_packages = [pkg for pkg in lst if pkg not in seen] + local_packages | |||||
addflags(['--pkg=%s' % p for p in packages]) | |||||
for vapi_dir in vapi_dirs: | |||||
v_node = self.path.find_dir(vapi_dir) | |||||
if not v_node: | |||||
Logs.warn('Unable to locate Vala API directory: %r' % vapi_dir) | |||||
else: | |||||
addflags('--vapidir=%s' % v_node.abspath()) | |||||
addflags('--vapidir=%s' % v_node.get_bld().abspath()) | |||||
self.dump_deps_node = None | |||||
if self.is_lib and self.packages: | |||||
self.dump_deps_node = self.path.find_or_declare('%s.deps' % self.target) | |||||
valatask.outputs.append(self.dump_deps_node) | |||||
self.includes.append(self.bld.srcnode.abspath()) | |||||
self.includes.append(self.bld.bldnode.abspath()) | |||||
for include in includes: | |||||
try: | |||||
self.includes.append(self.path.find_dir(include).abspath()) | |||||
self.includes.append(self.path.find_dir(include).get_bld().abspath()) | |||||
except AttributeError: | |||||
Logs.warn("Unable to locate include directory: '%s'" % include) | |||||
if self.is_lib and valatask.install_binding: | |||||
headers_list = [o for o in valatask.outputs if o.suffix() == ".h"] | |||||
try: | |||||
self.install_vheader.source = headers_list | |||||
except AttributeError: | |||||
self.install_vheader = self.bld.install_files(valatask.header_path, headers_list, self.env) | |||||
vapi_list = [o for o in valatask.outputs if (o.suffix() in (".vapi", ".deps"))] | |||||
try: | |||||
self.install_vapi.source = vapi_list | |||||
except AttributeError: | |||||
self.install_vapi = self.bld.install_files(valatask.vapi_path, vapi_list, self.env) | |||||
gir_list = [o for o in valatask.outputs if o.suffix() == '.gir'] | |||||
try: | |||||
self.install_gir.source = gir_list | |||||
except AttributeError: | |||||
self.install_gir = self.bld.install_files(getattr(self, 'gir_path', '${DATAROOTDIR}/gir-1.0'), gir_list, self.env) | |||||
@extension('.vala', '.gs') | |||||
def vala_file(self, node): | |||||
""" | |||||
Compile a vala file and bind the task to *self.valatask*. If an existing vala task is already set, add the node | |||||
to its inputs. The typical example is:: | |||||
def build(bld): | |||||
bld.program( | |||||
packages = 'gtk+-2.0', | |||||
target = 'vala-gtk-example', | |||||
uselib = 'GTK GLIB', | |||||
source = 'vala-gtk-example.vala foo.vala', | |||||
vala_defines = ['DEBUG'] # adds --define=<xyz> values to the command-line | |||||
# the following arguments are for libraries | |||||
#gir = 'hello-1.0', | |||||
#gir_path = '/tmp', | |||||
#vapi_path = '/tmp', | |||||
#pkg_name = 'hello' | |||||
# disable installing of gir, vapi and header | |||||
#install_binding = False | |||||
# profile = 'xyz' # adds --profile=<xyz> to enable profiling | |||||
# threading = True, # add --threading, except if profile is on or not on 'gobject' | |||||
# vala_target_glib = 'xyz' # adds --target-glib=<xyz>, can be given through the command-line option --vala-target-glib=<xyz> | |||||
) | |||||
:param node: vala file | |||||
:type node: :py:class:`waflib.Node.Node` | |||||
""" | |||||
try: | |||||
valatask = self.valatask | |||||
except AttributeError: | |||||
valatask = self.valatask = self.create_task('valac') | |||||
self.init_vala_task() | |||||
valatask.inputs.append(node) | |||||
c_node = node.change_ext('.c') | |||||
valatask.outputs.append(c_node) | |||||
self.source.append(c_node) | |||||
@conf | |||||
def find_valac(self, valac_name, min_version): | |||||
""" | |||||
Find the valac program, and execute it to store the version | |||||
number in *conf.env.VALAC_VERSION* | |||||
:param valac_name: program name | |||||
:type valac_name: string or list of string | |||||
:param min_version: minimum version acceptable | |||||
:type min_version: tuple of int | |||||
""" | |||||
valac = self.find_program(valac_name, var='VALAC') | |||||
try: | |||||
output = self.cmd_and_log(valac + ['--version']) | |||||
except Exception: | |||||
valac_version = None | |||||
else: | |||||
ver = re.search(r'\d+.\d+.\d+', output).group(0).split('.') | |||||
valac_version = tuple([int(x) for x in ver]) | |||||
self.msg('Checking for %s version >= %r' % (valac_name, min_version), | |||||
valac_version, valac_version and valac_version >= min_version) | |||||
if valac and valac_version < min_version: | |||||
self.fatal("%s version %r is too old, need >= %r" % (valac_name, valac_version, min_version)) | |||||
self.env['VALAC_VERSION'] = valac_version | |||||
return valac | |||||
@conf | |||||
def check_vala(self, min_version=(0,8,0), branch=None): | |||||
""" | |||||
Check if vala compiler from a given branch exists of at least a given | |||||
version. | |||||
:param min_version: minimum version acceptable (0.8.0) | |||||
:type min_version: tuple | |||||
:param branch: first part of the version number, in case a snapshot is used (0, 8) | |||||
:type branch: tuple of int | |||||
""" | |||||
if not branch: | |||||
branch = min_version[:2] | |||||
try: | |||||
find_valac(self, 'valac-%d.%d' % (branch[0], branch[1]), min_version) | |||||
except self.errors.ConfigurationError: | |||||
find_valac(self, 'valac', min_version) | |||||
@conf | |||||
def check_vala_deps(self): | |||||
""" | |||||
Load the gobject and gthread packages if they are missing. | |||||
""" | |||||
if not self.env['HAVE_GOBJECT']: | |||||
pkg_args = {'package': 'gobject-2.0', | |||||
'uselib_store': 'GOBJECT', | |||||
'args': '--cflags --libs'} | |||||
if getattr(Options.options, 'vala_target_glib', None): | |||||
pkg_args['atleast_version'] = Options.options.vala_target_glib | |||||
self.check_cfg(**pkg_args) | |||||
if not self.env['HAVE_GTHREAD']: | |||||
pkg_args = {'package': 'gthread-2.0', | |||||
'uselib_store': 'GTHREAD', | |||||
'args': '--cflags --libs'} | |||||
if getattr(Options.options, 'vala_target_glib', None): | |||||
pkg_args['atleast_version'] = Options.options.vala_target_glib | |||||
self.check_cfg(**pkg_args) | |||||
def configure(self): | |||||
""" | |||||
Use the following to enforce minimum vala version:: | |||||
def configure(conf): | |||||
conf.load('vala', funs='') | |||||
conf.check_vala(min_version=(0,10,0)) | |||||
""" | |||||
self.load('gnu_dirs') | |||||
self.check_vala_deps() | |||||
self.check_vala() | |||||
self.env.VALAFLAGS = ['-C', '--quiet'] | |||||
def options(opt): | |||||
""" | |||||
Load the :py:mod:`waflib.Tools.gnu_dirs` tool and add the ``--vala-target-glib`` command-line option | |||||
""" | |||||
opt.load('gnu_dirs') | |||||
valaopts = opt.add_option_group('Vala Compiler Options') | |||||
valaopts.add_option ('--vala-target-glib', default=None, | |||||
dest='vala_target_glib', metavar='MAJOR.MINOR', | |||||
help='Target version of glib for Vala GObject code generation') | |||||
@@ -0,0 +1,201 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Carlos Rafael Giani, 2006 | |||||
# Thomas Nagy, 2010 | |||||
""" | |||||
Unit testing system for C/C++/D providing test execution: | |||||
* in parallel, by using ``waf -j`` | |||||
* partial (only the tests that have changed) or full (by using ``waf --alltests``) | |||||
The tests are declared by adding the **test** feature to programs:: | |||||
def options(opt): | |||||
opt.load('compiler_cxx waf_unit_test') | |||||
def configure(conf): | |||||
conf.load('compiler_cxx waf_unit_test') | |||||
def build(bld): | |||||
bld(features='cxx cxxprogram test', source='main.cpp', target='app') | |||||
# or | |||||
bld.program(features='test', source='main2.cpp', target='app2') | |||||
When the build is executed, the program 'test' will be built and executed without arguments. | |||||
The success/failure is detected by looking at the return code. The status and the standard output/error | |||||
are stored on the build context. | |||||
The results can be displayed by registering a callback function. Here is how to call | |||||
the predefined callback:: | |||||
def build(bld): | |||||
bld(features='cxx cxxprogram test', source='main.c', target='app') | |||||
from waflib.Tools import waf_unit_test | |||||
bld.add_post_fun(waf_unit_test.summary) | |||||
""" | |||||
import os | |||||
from waflib.TaskGen import feature, after_method, taskgen_method | |||||
from waflib import Utils, Task, Logs, Options | |||||
testlock = Utils.threading.Lock() | |||||
@feature('test') | |||||
@after_method('apply_link') | |||||
def make_test(self): | |||||
"""Create the unit test task. There can be only one unit test task by task generator.""" | |||||
if getattr(self, 'link_task', None): | |||||
self.create_task('utest', self.link_task.outputs) | |||||
@taskgen_method | |||||
def add_test_results(self, tup): | |||||
"""Override and return tup[1] to interrupt the build immediately if a test does not run""" | |||||
Logs.debug("ut: %r", tup) | |||||
self.utest_result = tup | |||||
try: | |||||
self.bld.utest_results.append(tup) | |||||
except AttributeError: | |||||
self.bld.utest_results = [tup] | |||||
class utest(Task.Task): | |||||
""" | |||||
Execute a unit test | |||||
""" | |||||
color = 'PINK' | |||||
after = ['vnum', 'inst'] | |||||
vars = [] | |||||
def runnable_status(self): | |||||
""" | |||||
Always execute the task if `waf --alltests` was used or no | |||||
tests if ``waf --notests`` was used | |||||
""" | |||||
if getattr(Options.options, 'no_tests', False): | |||||
return Task.SKIP_ME | |||||
ret = super(utest, self).runnable_status() | |||||
if ret == Task.SKIP_ME: | |||||
if getattr(Options.options, 'all_tests', False): | |||||
return Task.RUN_ME | |||||
return ret | |||||
def add_path(self, dct, path, var): | |||||
dct[var] = os.pathsep.join(Utils.to_list(path) + [os.environ.get(var, '')]) | |||||
def get_test_env(self): | |||||
""" | |||||
In general, tests may require any library built anywhere in the project. | |||||
Override this method if fewer paths are needed | |||||
""" | |||||
try: | |||||
fu = getattr(self.generator.bld, 'all_test_paths') | |||||
except AttributeError: | |||||
# this operation may be performed by at most #maxjobs | |||||
fu = os.environ.copy() | |||||
lst = [] | |||||
for g in self.generator.bld.groups: | |||||
for tg in g: | |||||
if getattr(tg, 'link_task', None): | |||||
s = tg.link_task.outputs[0].parent.abspath() | |||||
if s not in lst: | |||||
lst.append(s) | |||||
if Utils.is_win32: | |||||
self.add_path(fu, lst, 'PATH') | |||||
elif Utils.unversioned_sys_platform() == 'darwin': | |||||
self.add_path(fu, lst, 'DYLD_LIBRARY_PATH') | |||||
self.add_path(fu, lst, 'LD_LIBRARY_PATH') | |||||
else: | |||||
self.add_path(fu, lst, 'LD_LIBRARY_PATH') | |||||
self.generator.bld.all_test_paths = fu | |||||
return fu | |||||
def run(self): | |||||
""" | |||||
Execute the test. The execution is always successful, and the results | |||||
are stored on ``self.generator.bld.utest_results`` for postprocessing. | |||||
Override ``add_test_results`` to interrupt the build | |||||
""" | |||||
filename = self.inputs[0].abspath() | |||||
self.ut_exec = getattr(self.generator, 'ut_exec', [filename]) | |||||
if getattr(self.generator, 'ut_fun', None): | |||||
self.generator.ut_fun(self) | |||||
cwd = getattr(self.generator, 'ut_cwd', '') or self.inputs[0].parent.abspath() | |||||
testcmd = getattr(self.generator, 'ut_cmd', False) or getattr(Options.options, 'testcmd', False) | |||||
if testcmd: | |||||
self.ut_exec = (testcmd % self.ut_exec[0]).split(' ') | |||||
proc = Utils.subprocess.Popen(self.ut_exec, cwd=cwd, env=self.get_test_env(), stderr=Utils.subprocess.PIPE, stdout=Utils.subprocess.PIPE) | |||||
(stdout, stderr) = proc.communicate() | |||||
tup = (filename, proc.returncode, stdout, stderr) | |||||
testlock.acquire() | |||||
try: | |||||
return self.generator.add_test_results(tup) | |||||
finally: | |||||
testlock.release() | |||||
def summary(bld): | |||||
""" | |||||
Display an execution summary:: | |||||
def build(bld): | |||||
bld(features='cxx cxxprogram test', source='main.c', target='app') | |||||
from waflib.Tools import waf_unit_test | |||||
bld.add_post_fun(waf_unit_test.summary) | |||||
""" | |||||
lst = getattr(bld, 'utest_results', []) | |||||
if lst: | |||||
Logs.pprint('CYAN', 'execution summary') | |||||
total = len(lst) | |||||
tfail = len([x for x in lst if x[1]]) | |||||
Logs.pprint('CYAN', ' tests that pass %d/%d' % (total-tfail, total)) | |||||
for (f, code, out, err) in lst: | |||||
if not code: | |||||
Logs.pprint('CYAN', ' %s' % f) | |||||
Logs.pprint('CYAN', ' tests that fail %d/%d' % (tfail, total)) | |||||
for (f, code, out, err) in lst: | |||||
if code: | |||||
Logs.pprint('CYAN', ' %s' % f) | |||||
def set_exit_code(bld): | |||||
""" | |||||
If any of the tests fail waf will exit with that exit code. | |||||
This is useful if you have an automated build system which need | |||||
to report on errors from the tests. | |||||
You may use it like this: | |||||
def build(bld): | |||||
bld(features='cxx cxxprogram test', source='main.c', target='app') | |||||
from waflib.Tools import waf_unit_test | |||||
bld.add_post_fun(waf_unit_test.set_exit_code) | |||||
""" | |||||
lst = getattr(bld, 'utest_results', []) | |||||
for (f, code, out, err) in lst: | |||||
if code: | |||||
msg = [] | |||||
if out: | |||||
msg.append('stdout:%s%s' % (os.linesep, out.decode('utf-8'))) | |||||
if err: | |||||
msg.append('stderr:%s%s' % (os.linesep, err.decode('utf-8'))) | |||||
bld.fatal(os.linesep.join(msg)) | |||||
def options(opt): | |||||
""" | |||||
Provide the ``--alltests``, ``--notests`` and ``--testcmd`` command-line options. | |||||
""" | |||||
opt.add_option('--notests', action='store_true', default=False, help='Exec no unit tests', dest='no_tests') | |||||
opt.add_option('--alltests', action='store_true', default=False, help='Exec all unit tests', dest='all_tests') | |||||
opt.add_option('--testcmd', action='store', default=False, | |||||
help = 'Run the unit tests using the test-cmd string' | |||||
' example "--test-cmd="valgrind --error-exitcode=1' | |||||
' %s" to run under valgrind', dest='testcmd') | |||||
@@ -0,0 +1,114 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Brant Young, 2007 | |||||
"Process *.rc* files for C/C++: X{.rc -> [.res|.rc.o]}" | |||||
import re, traceback | |||||
from waflib import Task, Logs, Utils | |||||
from waflib.TaskGen import extension | |||||
from waflib.Tools import c_preproc | |||||
@extension('.rc') | |||||
def rc_file(self, node): | |||||
""" | |||||
Bind the .rc extension to a winrc task | |||||
""" | |||||
obj_ext = '.rc.o' | |||||
if self.env['WINRC_TGT_F'] == '/fo': | |||||
obj_ext = '.res' | |||||
rctask = self.create_task('winrc', node, node.change_ext(obj_ext)) | |||||
try: | |||||
self.compiled_tasks.append(rctask) | |||||
except AttributeError: | |||||
self.compiled_tasks = [rctask] | |||||
re_lines = re.compile( | |||||
'(?:^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*?)\s*$)|'\ | |||||
'(?:^\w+[ \t]*(ICON|BITMAP|CURSOR|HTML|FONT|MESSAGETABLE|TYPELIB|REGISTRY|D3DFX)[ \t]*(.*?)\s*$)', | |||||
re.IGNORECASE | re.MULTILINE) | |||||
class rc_parser(c_preproc.c_parser): | |||||
def filter_comments(self, filepath): | |||||
code = Utils.readf(filepath) | |||||
if c_preproc.use_trigraphs: | |||||
for (a, b) in c_preproc.trig_def: code = code.split(a).join(b) | |||||
code = c_preproc.re_nl.sub('', code) | |||||
code = c_preproc.re_cpp.sub(c_preproc.repl, code) | |||||
ret = [] | |||||
for m in re.finditer(re_lines, code): | |||||
if m.group(2): | |||||
ret.append((m.group(2), m.group(3))) | |||||
else: | |||||
ret.append(('include', m.group(5))) | |||||
return ret | |||||
def addlines(self, node): | |||||
self.currentnode_stack.append(node.parent) | |||||
filepath = node.abspath() | |||||
self.count_files += 1 | |||||
if self.count_files > c_preproc.recursion_limit: | |||||
raise c_preproc.PreprocError("recursion limit exceeded") | |||||
pc = self.parse_cache | |||||
Logs.debug('preproc: reading file %r', filepath) | |||||
try: | |||||
lns = pc[filepath] | |||||
except KeyError: | |||||
pass | |||||
else: | |||||
self.lines.extend(lns) | |||||
return | |||||
try: | |||||
lines = self.filter_comments(filepath) | |||||
lines.append((c_preproc.POPFILE, '')) | |||||
lines.reverse() | |||||
pc[filepath] = lines | |||||
self.lines.extend(lines) | |||||
except IOError: | |||||
raise c_preproc.PreprocError("could not read the file %s" % filepath) | |||||
except Exception: | |||||
if Logs.verbose > 0: | |||||
Logs.error("parsing %s failed" % filepath) | |||||
traceback.print_exc() | |||||
class winrc(Task.Task): | |||||
""" | |||||
Task for compiling resource files | |||||
""" | |||||
run_str = '${WINRC} ${WINRCFLAGS} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${WINRC_TGT_F} ${TGT} ${WINRC_SRC_F} ${SRC}' | |||||
color = 'BLUE' | |||||
def scan(self): | |||||
tmp = rc_parser(self.generator.includes_nodes) | |||||
tmp.start(self.inputs[0], self.env) | |||||
nodes = tmp.nodes | |||||
names = tmp.names | |||||
if Logs.verbose: | |||||
Logs.debug('deps: deps for %s: %r; unresolved %r' % (str(self), nodes, names)) | |||||
return (nodes, names) | |||||
def configure(conf): | |||||
""" | |||||
Detect the programs RC or windres, depending on the C/C++ compiler in use | |||||
""" | |||||
v = conf.env | |||||
v['WINRC_TGT_F'] = '-o' | |||||
v['WINRC_SRC_F'] = '-i' | |||||
# find rc.exe | |||||
if not conf.env.WINRC: | |||||
if v.CC_NAME == 'msvc': | |||||
conf.find_program('RC', var='WINRC', path_list = v['PATH']) | |||||
v['WINRC_TGT_F'] = '/fo' | |||||
v['WINRC_SRC_F'] = '' | |||||
else: | |||||
conf.find_program('windres', var='WINRC', path_list = v['PATH']) | |||||
if not conf.env.WINRC: | |||||
conf.fatal('winrc was not found!') | |||||
v['WINRCFLAGS'] = [] | |||||
@@ -0,0 +1,67 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2006-2010 (ita) | |||||
# Ralf Habacker, 2006 (rh) | |||||
# Yinon Ehrlich, 2009 | |||||
# Michael Kuhn, 2009 | |||||
from waflib.Tools import ccroot, ar | |||||
from waflib.Configure import conf | |||||
@conf | |||||
def find_xlc(conf): | |||||
""" | |||||
Detect the Aix C compiler | |||||
""" | |||||
cc = conf.find_program(['xlc_r', 'xlc'], var='CC') | |||||
conf.get_xlc_version(cc) | |||||
conf.env.CC_NAME = 'xlc' | |||||
@conf | |||||
def xlc_common_flags(conf): | |||||
""" | |||||
Flags required for executing the Aix C compiler | |||||
""" | |||||
v = conf.env | |||||
v['CC_SRC_F'] = [] | |||||
v['CC_TGT_F'] = ['-c', '-o'] | |||||
# linker | |||||
if not v['LINK_CC']: v['LINK_CC'] = v['CC'] | |||||
v['CCLNK_SRC_F'] = [] | |||||
v['CCLNK_TGT_F'] = ['-o'] | |||||
v['CPPPATH_ST'] = '-I%s' | |||||
v['DEFINES_ST'] = '-D%s' | |||||
v['LIB_ST'] = '-l%s' # template for adding libs | |||||
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths | |||||
v['STLIB_ST'] = '-l%s' | |||||
v['STLIBPATH_ST'] = '-L%s' | |||||
v['RPATH_ST'] = '-Wl,-rpath,%s' | |||||
v['SONAME_ST'] = [] | |||||
v['SHLIB_MARKER'] = [] | |||||
v['STLIB_MARKER'] = [] | |||||
# program | |||||
v['LINKFLAGS_cprogram'] = ['-Wl,-brtl'] | |||||
v['cprogram_PATTERN'] = '%s' | |||||
# shared library | |||||
v['CFLAGS_cshlib'] = ['-fPIC'] | |||||
v['LINKFLAGS_cshlib'] = ['-G', '-Wl,-brtl,-bexpfull'] | |||||
v['cshlib_PATTERN'] = 'lib%s.so' | |||||
# static lib | |||||
v['LINKFLAGS_cstlib'] = [] | |||||
v['cstlib_PATTERN'] = 'lib%s.a' | |||||
def configure(conf): | |||||
conf.find_xlc() | |||||
conf.find_ar() | |||||
conf.xlc_common_flags() | |||||
conf.cc_load_tools() | |||||
conf.cc_add_flags() | |||||
conf.link_add_flags() | |||||
@@ -0,0 +1,67 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2006-2010 (ita) | |||||
# Ralf Habacker, 2006 (rh) | |||||
# Yinon Ehrlich, 2009 | |||||
# Michael Kuhn, 2009 | |||||
from waflib.Tools import ccroot, ar | |||||
from waflib.Configure import conf | |||||
@conf | |||||
def find_xlcxx(conf): | |||||
""" | |||||
Detect the Aix C++ compiler | |||||
""" | |||||
cxx = conf.find_program(['xlc++_r', 'xlc++'], var='CXX') | |||||
conf.get_xlc_version(cxx) | |||||
conf.env.CXX_NAME = 'xlc++' | |||||
@conf | |||||
def xlcxx_common_flags(conf): | |||||
""" | |||||
Flags required for executing the Aix C++ compiler | |||||
""" | |||||
v = conf.env | |||||
v['CXX_SRC_F'] = [] | |||||
v['CXX_TGT_F'] = ['-c', '-o'] | |||||
# linker | |||||
if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX'] | |||||
v['CXXLNK_SRC_F'] = [] | |||||
v['CXXLNK_TGT_F'] = ['-o'] | |||||
v['CPPPATH_ST'] = '-I%s' | |||||
v['DEFINES_ST'] = '-D%s' | |||||
v['LIB_ST'] = '-l%s' # template for adding libs | |||||
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths | |||||
v['STLIB_ST'] = '-l%s' | |||||
v['STLIBPATH_ST'] = '-L%s' | |||||
v['RPATH_ST'] = '-Wl,-rpath,%s' | |||||
v['SONAME_ST'] = [] | |||||
v['SHLIB_MARKER'] = [] | |||||
v['STLIB_MARKER'] = [] | |||||
# program | |||||
v['LINKFLAGS_cxxprogram']= ['-Wl,-brtl'] | |||||
v['cxxprogram_PATTERN'] = '%s' | |||||
# shared library | |||||
v['CXXFLAGS_cxxshlib'] = ['-fPIC'] | |||||
v['LINKFLAGS_cxxshlib'] = ['-G', '-Wl,-brtl,-bexpfull'] | |||||
v['cxxshlib_PATTERN'] = 'lib%s.so' | |||||
# static lib | |||||
v['LINKFLAGS_cxxstlib'] = [] | |||||
v['cxxstlib_PATTERN'] = 'lib%s.a' | |||||
def configure(conf): | |||||
conf.find_xlcxx() | |||||
conf.find_ar() | |||||
conf.xlcxx_common_flags() | |||||
conf.cxx_load_tools() | |||||
conf.cxx_add_flags() | |||||
conf.link_add_flags() | |||||
@@ -0,0 +1,732 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2005-2010 (ita) | |||||
""" | |||||
Utilities and platform-specific fixes | |||||
The portability fixes try to provide a consistent behavior of the Waf API | |||||
through Python versions 2.3 to 3.X and across different platforms (win32, linux, etc) | |||||
""" | |||||
import os, sys, errno, traceback, inspect, re, shutil, datetime, gc, platform | |||||
import subprocess # <- leave this! | |||||
from collections import deque, defaultdict | |||||
try: | |||||
import _winreg as winreg | |||||
except ImportError: | |||||
try: | |||||
import winreg | |||||
except ImportError: | |||||
winreg = None | |||||
from waflib import Errors | |||||
try: | |||||
from collections import UserDict | |||||
except ImportError: | |||||
from UserDict import UserDict | |||||
try: | |||||
from hashlib import md5 | |||||
except ImportError: | |||||
try: | |||||
from md5 import md5 | |||||
except ImportError: | |||||
# never fail to enable fixes from another module | |||||
pass | |||||
try: | |||||
import threading | |||||
except ImportError: | |||||
if not 'JOBS' in os.environ: | |||||
# no threading :-( | |||||
os.environ['JOBS'] = '1' | |||||
class threading(object): | |||||
""" | |||||
A fake threading class for platforms lacking the threading module. | |||||
Use ``waf -j1`` on those platforms | |||||
""" | |||||
pass | |||||
class Lock(object): | |||||
"""Fake Lock class""" | |||||
def acquire(self): | |||||
pass | |||||
def release(self): | |||||
pass | |||||
threading.Lock = threading.Thread = Lock | |||||
else: | |||||
run_old = threading.Thread.run | |||||
def run(*args, **kwargs): | |||||
try: | |||||
run_old(*args, **kwargs) | |||||
except (KeyboardInterrupt, SystemExit): | |||||
raise | |||||
except Exception: | |||||
sys.excepthook(*sys.exc_info()) | |||||
threading.Thread.run = run | |||||
SIG_NIL = 'iluvcuteoverload'.encode() | |||||
"""Arbitrary null value for a md5 hash. This value must be changed when the hash value is replaced (size)""" | |||||
O644 = 420 | |||||
"""Constant representing the permissions for regular files (0644 raises a syntax error on python 3)""" | |||||
O755 = 493 | |||||
"""Constant representing the permissions for executable files (0755 raises a syntax error on python 3)""" | |||||
rot_chr = ['\\', '|', '/', '-'] | |||||
"List of characters to use when displaying the throbber (progress bar)" | |||||
rot_idx = 0 | |||||
"Index of the current throbber character (progress bar)" | |||||
try: | |||||
from collections import OrderedDict as ordered_iter_dict | |||||
except ImportError: | |||||
class ordered_iter_dict(dict): | |||||
def __init__(self, *k, **kw): | |||||
self.lst = [] | |||||
dict.__init__(self, *k, **kw) | |||||
def clear(self): | |||||
dict.clear(self) | |||||
self.lst = [] | |||||
def __setitem__(self, key, value): | |||||
dict.__setitem__(self, key, value) | |||||
try: | |||||
self.lst.remove(key) | |||||
except ValueError: | |||||
pass | |||||
self.lst.append(key) | |||||
def __delitem__(self, key): | |||||
dict.__delitem__(self, key) | |||||
try: | |||||
self.lst.remove(key) | |||||
except ValueError: | |||||
pass | |||||
def __iter__(self): | |||||
for x in self.lst: | |||||
yield x | |||||
def keys(self): | |||||
return self.lst | |||||
is_win32 = os.sep == '\\' or sys.platform == 'win32' # msys2 | |||||
def readf(fname, m='r', encoding='ISO8859-1'): | |||||
""" | |||||
Read an entire file into a string, use this function instead of os.open() whenever possible. | |||||
In practice the wrapper node.read(..) should be preferred to this function:: | |||||
def build(ctx): | |||||
from waflib import Utils | |||||
txt = Utils.readf(self.path.find_node('wscript').abspath()) | |||||
txt = ctx.path.find_node('wscript').read() | |||||
:type fname: string | |||||
:param fname: Path to file | |||||
:type m: string | |||||
:param m: Open mode | |||||
:type encoding: string | |||||
:param encoding: encoding value, only used for python 3 | |||||
:rtype: string | |||||
:return: Content of the file | |||||
""" | |||||
if sys.hexversion > 0x3000000 and not 'b' in m: | |||||
m += 'b' | |||||
f = open(fname, m) | |||||
try: | |||||
txt = f.read() | |||||
finally: | |||||
f.close() | |||||
if encoding: | |||||
txt = txt.decode(encoding) | |||||
else: | |||||
txt = txt.decode() | |||||
else: | |||||
f = open(fname, m) | |||||
try: | |||||
txt = f.read() | |||||
finally: | |||||
f.close() | |||||
return txt | |||||
def writef(fname, data, m='w', encoding='ISO8859-1'): | |||||
""" | |||||
Write an entire file from a string, use this function instead of os.open() whenever possible. | |||||
In practice the wrapper node.write(..) should be preferred to this function:: | |||||
def build(ctx): | |||||
from waflib import Utils | |||||
txt = Utils.writef(self.path.make_node('i_like_kittens').abspath(), 'some data') | |||||
self.path.make_node('i_like_kittens').write('some data') | |||||
:type fname: string | |||||
:param fname: Path to file | |||||
:type data: string | |||||
:param data: The contents to write to the file | |||||
:type m: string | |||||
:param m: Open mode | |||||
:type encoding: string | |||||
:param encoding: encoding value, only used for python 3 | |||||
""" | |||||
if sys.hexversion > 0x3000000 and not 'b' in m: | |||||
data = data.encode(encoding) | |||||
m += 'b' | |||||
f = open(fname, m) | |||||
try: | |||||
f.write(data) | |||||
finally: | |||||
f.close() | |||||
def h_file(fname): | |||||
""" | |||||
Compute a hash value for a file by using md5. This method may be replaced by | |||||
a faster version if necessary. The following uses the file size and the timestamp value:: | |||||
import stat | |||||
from waflib import Utils | |||||
def h_file(fname): | |||||
st = os.stat(fname) | |||||
if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('not a file') | |||||
m = Utils.md5() | |||||
m.update(str(st.st_mtime)) | |||||
m.update(str(st.st_size)) | |||||
m.update(fname) | |||||
return m.digest() | |||||
Utils.h_file = h_file | |||||
:type fname: string | |||||
:param fname: path to the file to hash | |||||
:return: hash of the file contents | |||||
""" | |||||
f = open(fname, 'rb') | |||||
m = md5() | |||||
try: | |||||
while fname: | |||||
fname = f.read(200000) | |||||
m.update(fname) | |||||
finally: | |||||
f.close() | |||||
return m.digest() | |||||
def readf_win32(f, m='r', encoding='ISO8859-1'): | |||||
flags = os.O_NOINHERIT | os.O_RDONLY | |||||
if 'b' in m: | |||||
flags |= os.O_BINARY | |||||
if '+' in m: | |||||
flags |= os.O_RDWR | |||||
try: | |||||
fd = os.open(f, flags) | |||||
except OSError: | |||||
raise IOError('Cannot read from %r' % f) | |||||
if sys.hexversion > 0x3000000 and not 'b' in m: | |||||
m += 'b' | |||||
f = os.fdopen(fd, m) | |||||
try: | |||||
txt = f.read() | |||||
finally: | |||||
f.close() | |||||
if encoding: | |||||
txt = txt.decode(encoding) | |||||
else: | |||||
txt = txt.decode() | |||||
else: | |||||
f = os.fdopen(fd, m) | |||||
try: | |||||
txt = f.read() | |||||
finally: | |||||
f.close() | |||||
return txt | |||||
def writef_win32(f, data, m='w', encoding='ISO8859-1'): | |||||
if sys.hexversion > 0x3000000 and not 'b' in m: | |||||
data = data.encode(encoding) | |||||
m += 'b' | |||||
flags = os.O_CREAT | os.O_TRUNC | os.O_WRONLY | os.O_NOINHERIT | |||||
if 'b' in m: | |||||
flags |= os.O_BINARY | |||||
if '+' in m: | |||||
flags |= os.O_RDWR | |||||
try: | |||||
fd = os.open(f, flags) | |||||
except OSError: | |||||
raise IOError('Cannot write to %r' % f) | |||||
f = os.fdopen(fd, m) | |||||
try: | |||||
f.write(data) | |||||
finally: | |||||
f.close() | |||||
def h_file_win32(fname): | |||||
try: | |||||
fd = os.open(fname, os.O_BINARY | os.O_RDONLY | os.O_NOINHERIT) | |||||
except OSError: | |||||
raise IOError('Cannot read from %r' % fname) | |||||
f = os.fdopen(fd, 'rb') | |||||
m = md5() | |||||
try: | |||||
while fname: | |||||
fname = f.read(200000) | |||||
m.update(fname) | |||||
finally: | |||||
f.close() | |||||
return m.digest() | |||||
# always save these | |||||
readf_unix = readf | |||||
writef_unix = writef | |||||
h_file_unix = h_file | |||||
if hasattr(os, 'O_NOINHERIT') and sys.hexversion < 0x3040000: | |||||
# replace the default functions | |||||
readf = readf_win32 | |||||
writef = writef_win32 | |||||
h_file = h_file_win32 | |||||
try: | |||||
x = ''.encode('hex') | |||||
except LookupError: | |||||
import binascii | |||||
def to_hex(s): | |||||
ret = binascii.hexlify(s) | |||||
if not isinstance(ret, str): | |||||
ret = ret.decode('utf-8') | |||||
return ret | |||||
else: | |||||
def to_hex(s): | |||||
return s.encode('hex') | |||||
to_hex.__doc__ = """ | |||||
Return the hexadecimal representation of a string | |||||
:param s: string to convert | |||||
:type s: string | |||||
""" | |||||
def listdir_win32(s): | |||||
""" | |||||
List the contents of a folder in a portable manner. | |||||
On Win32, return the list of drive letters: ['C:', 'X:', 'Z:'] | |||||
:type s: string | |||||
:param s: a string, which can be empty on Windows | |||||
""" | |||||
if not s: | |||||
try: | |||||
import ctypes | |||||
except ImportError: | |||||
# there is nothing much we can do | |||||
return [x + ':\\' for x in list('ABCDEFGHIJKLMNOPQRSTUVWXYZ')] | |||||
else: | |||||
dlen = 4 # length of "?:\\x00" | |||||
maxdrives = 26 | |||||
buf = ctypes.create_string_buffer(maxdrives * dlen) | |||||
ndrives = ctypes.windll.kernel32.GetLogicalDriveStringsA(maxdrives*dlen, ctypes.byref(buf)) | |||||
return [ str(buf.raw[4*i:4*i+2].decode('ascii')) for i in range(int(ndrives/dlen)) ] | |||||
if len(s) == 2 and s[1] == ":": | |||||
s += os.sep | |||||
if not os.path.isdir(s): | |||||
e = OSError('%s is not a directory' % s) | |||||
e.errno = errno.ENOENT | |||||
raise e | |||||
return os.listdir(s) | |||||
listdir = os.listdir | |||||
if is_win32: | |||||
listdir = listdir_win32 | |||||
def num2ver(ver): | |||||
""" | |||||
Convert a string, tuple or version number into an integer. The number is supposed to have at most 4 digits:: | |||||
from waflib.Utils import num2ver | |||||
num2ver('1.3.2') == num2ver((1,3,2)) == num2ver((1,3,2,0)) | |||||
:type ver: string or tuple of numbers | |||||
:param ver: a version number | |||||
""" | |||||
if isinstance(ver, str): | |||||
ver = tuple(ver.split('.')) | |||||
if isinstance(ver, tuple): | |||||
ret = 0 | |||||
for i in range(4): | |||||
if i < len(ver): | |||||
ret += 256**(3 - i) * int(ver[i]) | |||||
return ret | |||||
return ver | |||||
def ex_stack(): | |||||
""" | |||||
Extract the stack to display exceptions | |||||
:return: a string represening the last exception | |||||
""" | |||||
exc_type, exc_value, tb = sys.exc_info() | |||||
exc_lines = traceback.format_exception(exc_type, exc_value, tb) | |||||
return ''.join(exc_lines) | |||||
def to_list(sth): | |||||
""" | |||||
Convert a string argument to a list by splitting on spaces, and pass | |||||
through a list argument unchanged:: | |||||
from waflib.Utils import to_list | |||||
lst = to_list("a b c d") | |||||
:param sth: List or a string of items separated by spaces | |||||
:rtype: list | |||||
:return: Argument converted to list | |||||
""" | |||||
if isinstance(sth, str): | |||||
return sth.split() | |||||
else: | |||||
return sth | |||||
def split_path_unix(path): | |||||
return path.split('/') | |||||
def split_path_cygwin(path): | |||||
if path.startswith('//'): | |||||
ret = path.split('/')[2:] | |||||
ret[0] = '/' + ret[0] | |||||
return ret | |||||
return path.split('/') | |||||
re_sp = re.compile('[/\\\\]') | |||||
def split_path_win32(path): | |||||
if path.startswith('\\\\'): | |||||
ret = re.split(re_sp, path)[2:] | |||||
ret[0] = '\\' + ret[0] | |||||
return ret | |||||
return re.split(re_sp, path) | |||||
if sys.platform == 'cygwin': | |||||
split_path = split_path_cygwin | |||||
elif is_win32: | |||||
split_path = split_path_win32 | |||||
else: | |||||
split_path = split_path_unix | |||||
split_path.__doc__ = """ | |||||
Split a path by / or \\. This function is not like os.path.split | |||||
:type path: string | |||||
:param path: path to split | |||||
:return: list of strings | |||||
""" | |||||
def check_dir(path): | |||||
""" | |||||
Ensure that a directory exists (similar to ``mkdir -p``). | |||||
:type path: string | |||||
:param path: Path to directory | |||||
""" | |||||
if not os.path.isdir(path): | |||||
try: | |||||
os.makedirs(path) | |||||
except OSError as e: | |||||
if not os.path.isdir(path): | |||||
raise Errors.WafError('Cannot create the folder %r' % path, ex=e) | |||||
def check_exe(name, env=None): | |||||
""" | |||||
Ensure that a program exists | |||||
:type name: string | |||||
:param name: name or path to program | |||||
:return: path of the program or None | |||||
""" | |||||
if not name: | |||||
raise ValueError('Cannot execute an empty string!') | |||||
def is_exe(fpath): | |||||
return os.path.isfile(fpath) and os.access(fpath, os.X_OK) | |||||
fpath, fname = os.path.split(name) | |||||
if fpath and is_exe(name): | |||||
return os.path.abspath(name) | |||||
else: | |||||
env = env or os.environ | |||||
for path in env["PATH"].split(os.pathsep): | |||||
path = path.strip('"') | |||||
exe_file = os.path.join(path, name) | |||||
if is_exe(exe_file): | |||||
return os.path.abspath(exe_file) | |||||
return None | |||||
def def_attrs(cls, **kw): | |||||
""" | |||||
Set default attributes on a class instance | |||||
:type cls: class | |||||
:param cls: the class to update the given attributes in. | |||||
:type kw: dict | |||||
:param kw: dictionary of attributes names and values. | |||||
""" | |||||
for k, v in kw.items(): | |||||
if not hasattr(cls, k): | |||||
setattr(cls, k, v) | |||||
def quote_define_name(s): | |||||
""" | |||||
Convert a string to an identifier suitable for C defines. | |||||
:type s: string | |||||
:param s: String to convert | |||||
:rtype: string | |||||
:return: Identifier suitable for C defines | |||||
""" | |||||
fu = re.sub('[^a-zA-Z0-9]', '_', s) | |||||
fu = re.sub('_+', '_', fu) | |||||
fu = fu.upper() | |||||
return fu | |||||
def h_list(lst): | |||||
""" | |||||
Hash lists. For tuples, using hash(tup) is much more efficient, | |||||
except on python >= 3.3 where hash randomization assumes everybody is running a web application. | |||||
:param lst: list to hash | |||||
:type lst: list of strings | |||||
:return: hash of the list | |||||
""" | |||||
m = md5() | |||||
m.update(str(lst).encode()) | |||||
return m.digest() | |||||
def h_fun(fun): | |||||
""" | |||||
Hash functions | |||||
:param fun: function to hash | |||||
:type fun: function | |||||
:return: hash of the function | |||||
""" | |||||
try: | |||||
return fun.code | |||||
except AttributeError: | |||||
try: | |||||
h = inspect.getsource(fun) | |||||
except IOError: | |||||
h = "nocode" | |||||
try: | |||||
fun.code = h | |||||
except AttributeError: | |||||
pass | |||||
return h | |||||
reg_subst = re.compile(r"(\\\\)|(\$\$)|\$\{([^}]+)\}") | |||||
def subst_vars(expr, params): | |||||
""" | |||||
Replace ${VAR} with the value of VAR taken from a dict or a config set:: | |||||
from waflib import Utils | |||||
s = Utils.subst_vars('${PREFIX}/bin', env) | |||||
:type expr: string | |||||
:param expr: String to perform substitution on | |||||
:param params: Dictionary or config set to look up variable values. | |||||
""" | |||||
def repl_var(m): | |||||
if m.group(1): | |||||
return '\\' | |||||
if m.group(2): | |||||
return '$' | |||||
try: | |||||
# ConfigSet instances may contain lists | |||||
return params.get_flat(m.group(3)) | |||||
except AttributeError: | |||||
return params[m.group(3)] | |||||
return reg_subst.sub(repl_var, expr) | |||||
def destos_to_binfmt(key): | |||||
""" | |||||
Return the binary format based on the unversioned platform name. | |||||
:param key: platform name | |||||
:type key: string | |||||
:return: string representing the binary format | |||||
""" | |||||
if key == 'darwin': | |||||
return 'mac-o' | |||||
elif key in ('win32', 'cygwin', 'uwin', 'msys'): | |||||
return 'pe' | |||||
return 'elf' | |||||
def unversioned_sys_platform(): | |||||
""" | |||||
Return the unversioned platform name. | |||||
Some Python platform names contain versions, that depend on | |||||
the build environment, e.g. linux2, freebsd6, etc. | |||||
This returns the name without the version number. Exceptions are | |||||
os2 and win32, which are returned verbatim. | |||||
:rtype: string | |||||
:return: Unversioned platform name | |||||
""" | |||||
s = sys.platform | |||||
if s.startswith('java'): | |||||
# The real OS is hidden under the JVM. | |||||
from java.lang import System | |||||
s = System.getProperty('os.name') | |||||
# see http://lopica.sourceforge.net/os.html for a list of possible values | |||||
if s == 'Mac OS X': | |||||
return 'darwin' | |||||
elif s.startswith('Windows '): | |||||
return 'win32' | |||||
elif s == 'OS/2': | |||||
return 'os2' | |||||
elif s == 'HP-UX': | |||||
return 'hp-ux' | |||||
elif s in ('SunOS', 'Solaris'): | |||||
return 'sunos' | |||||
else: s = s.lower() | |||||
# powerpc == darwin for our purposes | |||||
if s == 'powerpc': | |||||
return 'darwin' | |||||
if s == 'win32' or s == 'os2': | |||||
return s | |||||
return re.split('\d+$', s)[0] | |||||
def nada(*k, **kw): | |||||
""" | |||||
A function that does nothing | |||||
:return: None | |||||
""" | |||||
pass | |||||
class Timer(object): | |||||
""" | |||||
Simple object for timing the execution of commands. | |||||
Its string representation is the current time:: | |||||
from waflib.Utils import Timer | |||||
timer = Timer() | |||||
a_few_operations() | |||||
s = str(timer) | |||||
""" | |||||
def __init__(self): | |||||
self.start_time = datetime.datetime.utcnow() | |||||
def __str__(self): | |||||
delta = datetime.datetime.utcnow() - self.start_time | |||||
days = delta.days | |||||
hours, rem = divmod(delta.seconds, 3600) | |||||
minutes, seconds = divmod(rem, 60) | |||||
seconds += delta.microseconds * 1e-6 | |||||
result = '' | |||||
if days: | |||||
result += '%dd' % days | |||||
if days or hours: | |||||
result += '%dh' % hours | |||||
if days or hours or minutes: | |||||
result += '%dm' % minutes | |||||
return '%s%.3fs' % (result, seconds) | |||||
if is_win32: | |||||
old = shutil.copy2 | |||||
def copy2(src, dst): | |||||
""" | |||||
shutil.copy2 does not copy the file attributes on windows, so we | |||||
hack into the shutil module to fix the problem | |||||
""" | |||||
old(src, dst) | |||||
shutil.copystat(src, dst) | |||||
setattr(shutil, 'copy2', copy2) | |||||
if os.name == 'java': | |||||
# Jython cannot disable the gc but they can enable it ... wtf? | |||||
try: | |||||
gc.disable() | |||||
gc.enable() | |||||
except NotImplementedError: | |||||
gc.disable = gc.enable | |||||
def read_la_file(path): | |||||
""" | |||||
Read property files, used by msvc.py | |||||
:param path: file to read | |||||
:type path: string | |||||
""" | |||||
sp = re.compile(r'^([^=]+)=\'(.*)\'$') | |||||
dc = {} | |||||
for line in readf(path).splitlines(): | |||||
try: | |||||
_, left, right, _ = sp.split(line.strip()) | |||||
dc[left] = right | |||||
except ValueError: | |||||
pass | |||||
return dc | |||||
def nogc(fun): | |||||
""" | |||||
Decorator: let a function disable the garbage collector during its execution. | |||||
It is used in the build context when storing/loading the build cache file (pickle) | |||||
:param fun: function to execute | |||||
:type fun: function | |||||
:return: the return value of the function executed | |||||
""" | |||||
def f(*k, **kw): | |||||
try: | |||||
gc.disable() | |||||
ret = fun(*k, **kw) | |||||
finally: | |||||
gc.enable() | |||||
return ret | |||||
f.__doc__ = fun.__doc__ | |||||
return f | |||||
def run_once(fun): | |||||
""" | |||||
Decorator: let a function cache its results, use like this:: | |||||
@run_once | |||||
def foo(k): | |||||
return 345*2343 | |||||
:param fun: function to execute | |||||
:type fun: function | |||||
:return: the return value of the function executed | |||||
""" | |||||
cache = {} | |||||
def wrap(k): | |||||
try: | |||||
return cache[k] | |||||
except KeyError: | |||||
ret = fun(k) | |||||
cache[k] = ret | |||||
return ret | |||||
wrap.__cache__ = cache | |||||
return wrap | |||||
def get_registry_app_path(key, filename): | |||||
if not winreg: | |||||
return None | |||||
try: | |||||
result = winreg.QueryValue(key, "Software\\Microsoft\\Windows\\CurrentVersion\\App Paths\\%s.exe" % filename[0]) | |||||
except WindowsError: | |||||
pass | |||||
else: | |||||
if os.path.isfile(result): | |||||
return result | |||||
def lib64(): | |||||
# default settings for /usr/lib | |||||
if os.sep == '/': | |||||
if platform.architecture()[0] == '64bit': | |||||
if os.path.exists('/usr/lib64') and not os.path.exists('/usr/lib32'): | |||||
return '64' | |||||
return '' | |||||
@@ -0,0 +1,3 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2005-2010 (ita) |
@@ -0,0 +1,342 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
""" | |||||
Emulate a vt100 terminal in cmd.exe | |||||
By wrapping sys.stdout / sys.stderr with Ansiterm, | |||||
the vt100 escape characters will be interpreted and | |||||
the equivalent actions will be performed with Win32 | |||||
console commands. | |||||
""" | |||||
import os, re, sys | |||||
from waflib import Utils | |||||
wlock = Utils.threading.Lock() | |||||
try: | |||||
from ctypes import Structure, windll, c_short, c_ushort, c_ulong, c_int, byref, c_wchar, POINTER, c_long | |||||
except ImportError: | |||||
class AnsiTerm(object): | |||||
def __init__(self, stream): | |||||
self.stream = stream | |||||
try: | |||||
self.errors = self.stream.errors | |||||
except AttributeError: | |||||
pass # python 2.5 | |||||
self.encoding = self.stream.encoding | |||||
def write(self, txt): | |||||
try: | |||||
wlock.acquire() | |||||
self.stream.write(txt) | |||||
self.stream.flush() | |||||
finally: | |||||
wlock.release() | |||||
def fileno(self): | |||||
return self.stream.fileno() | |||||
def flush(self): | |||||
self.stream.flush() | |||||
def isatty(self): | |||||
return self.stream.isatty() | |||||
else: | |||||
class COORD(Structure): | |||||
_fields_ = [("X", c_short), ("Y", c_short)] | |||||
class SMALL_RECT(Structure): | |||||
_fields_ = [("Left", c_short), ("Top", c_short), ("Right", c_short), ("Bottom", c_short)] | |||||
class CONSOLE_SCREEN_BUFFER_INFO(Structure): | |||||
_fields_ = [("Size", COORD), ("CursorPosition", COORD), ("Attributes", c_ushort), ("Window", SMALL_RECT), ("MaximumWindowSize", COORD)] | |||||
class CONSOLE_CURSOR_INFO(Structure): | |||||
_fields_ = [('dwSize', c_ulong), ('bVisible', c_int)] | |||||
try: | |||||
_type = unicode | |||||
except NameError: | |||||
_type = str | |||||
to_int = lambda number, default: number and int(number) or default | |||||
STD_OUTPUT_HANDLE = -11 | |||||
STD_ERROR_HANDLE = -12 | |||||
windll.kernel32.GetStdHandle.argtypes = [c_ulong] | |||||
windll.kernel32.GetStdHandle.restype = c_ulong | |||||
windll.kernel32.GetConsoleScreenBufferInfo.argtypes = [c_ulong, POINTER(CONSOLE_SCREEN_BUFFER_INFO)] | |||||
windll.kernel32.GetConsoleScreenBufferInfo.restype = c_long | |||||
windll.kernel32.SetConsoleTextAttribute.argtypes = [c_ulong, c_ushort] | |||||
windll.kernel32.SetConsoleTextAttribute.restype = c_long | |||||
windll.kernel32.FillConsoleOutputCharacterW.argtypes = [c_ulong, c_wchar, c_ulong, POINTER(COORD), POINTER(c_ulong)] | |||||
windll.kernel32.FillConsoleOutputCharacterW.restype = c_long | |||||
windll.kernel32.FillConsoleOutputAttribute.argtypes = [c_ulong, c_ushort, c_ulong, POINTER(COORD), POINTER(c_ulong) ] | |||||
windll.kernel32.FillConsoleOutputAttribute.restype = c_long | |||||
windll.kernel32.SetConsoleCursorPosition.argtypes = [c_ulong, POINTER(COORD) ] | |||||
windll.kernel32.SetConsoleCursorPosition.restype = c_long | |||||
windll.kernel32.SetConsoleCursorInfo.argtypes = [c_ulong, POINTER(CONSOLE_CURSOR_INFO)] | |||||
windll.kernel32.SetConsoleCursorInfo.restype = c_long | |||||
class AnsiTerm(object): | |||||
""" | |||||
emulate a vt100 terminal in cmd.exe | |||||
""" | |||||
def __init__(self, s): | |||||
self.stream = s | |||||
try: | |||||
self.errors = s.errors | |||||
except AttributeError: | |||||
pass # python2.5 | |||||
self.encoding = s.encoding | |||||
self.cursor_history = [] | |||||
handle = (s.fileno() == 2) and STD_ERROR_HANDLE or STD_OUTPUT_HANDLE | |||||
self.hconsole = windll.kernel32.GetStdHandle(handle) | |||||
self._sbinfo = CONSOLE_SCREEN_BUFFER_INFO() | |||||
self._csinfo = CONSOLE_CURSOR_INFO() | |||||
windll.kernel32.GetConsoleCursorInfo(self.hconsole, byref(self._csinfo)) | |||||
# just to double check that the console is usable | |||||
self._orig_sbinfo = CONSOLE_SCREEN_BUFFER_INFO() | |||||
r = windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(self._orig_sbinfo)) | |||||
self._isatty = r == 1 | |||||
def screen_buffer_info(self): | |||||
""" | |||||
Updates self._sbinfo and returns it | |||||
""" | |||||
windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(self._sbinfo)) | |||||
return self._sbinfo | |||||
def clear_line(self, param): | |||||
mode = param and int(param) or 0 | |||||
sbinfo = self.screen_buffer_info() | |||||
if mode == 1: # Clear from begining of line to cursor position | |||||
line_start = COORD(0, sbinfo.CursorPosition.Y) | |||||
line_length = sbinfo.Size.X | |||||
elif mode == 2: # Clear entire line | |||||
line_start = COORD(sbinfo.CursorPosition.X, sbinfo.CursorPosition.Y) | |||||
line_length = sbinfo.Size.X - sbinfo.CursorPosition.X | |||||
else: # Clear from cursor position to end of line | |||||
line_start = sbinfo.CursorPosition | |||||
line_length = sbinfo.Size.X - sbinfo.CursorPosition.X | |||||
chars_written = c_ulong() | |||||
windll.kernel32.FillConsoleOutputCharacterW(self.hconsole, c_wchar(' '), line_length, line_start, byref(chars_written)) | |||||
windll.kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, line_length, line_start, byref(chars_written)) | |||||
def clear_screen(self, param): | |||||
mode = to_int(param, 0) | |||||
sbinfo = self.screen_buffer_info() | |||||
if mode == 1: # Clear from begining of screen to cursor position | |||||
clear_start = COORD(0, 0) | |||||
clear_length = sbinfo.CursorPosition.X * sbinfo.CursorPosition.Y | |||||
elif mode == 2: # Clear entire screen and return cursor to home | |||||
clear_start = COORD(0, 0) | |||||
clear_length = sbinfo.Size.X * sbinfo.Size.Y | |||||
windll.kernel32.SetConsoleCursorPosition(self.hconsole, clear_start) | |||||
else: # Clear from cursor position to end of screen | |||||
clear_start = sbinfo.CursorPosition | |||||
clear_length = ((sbinfo.Size.X - sbinfo.CursorPosition.X) + sbinfo.Size.X * (sbinfo.Size.Y - sbinfo.CursorPosition.Y)) | |||||
chars_written = c_ulong() | |||||
windll.kernel32.FillConsoleOutputCharacterW(self.hconsole, c_wchar(' '), clear_length, clear_start, byref(chars_written)) | |||||
windll.kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, clear_length, clear_start, byref(chars_written)) | |||||
def push_cursor(self, param): | |||||
sbinfo = self.screen_buffer_info() | |||||
self.cursor_history.append(sbinfo.CursorPosition) | |||||
def pop_cursor(self, param): | |||||
if self.cursor_history: | |||||
old_pos = self.cursor_history.pop() | |||||
windll.kernel32.SetConsoleCursorPosition(self.hconsole, old_pos) | |||||
def set_cursor(self, param): | |||||
y, sep, x = param.partition(';') | |||||
x = to_int(x, 1) - 1 | |||||
y = to_int(y, 1) - 1 | |||||
sbinfo = self.screen_buffer_info() | |||||
new_pos = COORD( | |||||
min(max(0, x), sbinfo.Size.X), | |||||
min(max(0, y), sbinfo.Size.Y) | |||||
) | |||||
windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos) | |||||
def set_column(self, param): | |||||
x = to_int(param, 1) - 1 | |||||
sbinfo = self.screen_buffer_info() | |||||
new_pos = COORD( | |||||
min(max(0, x), sbinfo.Size.X), | |||||
sbinfo.CursorPosition.Y | |||||
) | |||||
windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos) | |||||
def move_cursor(self, x_offset=0, y_offset=0): | |||||
sbinfo = self.screen_buffer_info() | |||||
new_pos = COORD( | |||||
min(max(0, sbinfo.CursorPosition.X + x_offset), sbinfo.Size.X), | |||||
min(max(0, sbinfo.CursorPosition.Y + y_offset), sbinfo.Size.Y) | |||||
) | |||||
windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos) | |||||
def move_up(self, param): | |||||
self.move_cursor(y_offset = -to_int(param, 1)) | |||||
def move_down(self, param): | |||||
self.move_cursor(y_offset = to_int(param, 1)) | |||||
def move_left(self, param): | |||||
self.move_cursor(x_offset = -to_int(param, 1)) | |||||
def move_right(self, param): | |||||
self.move_cursor(x_offset = to_int(param, 1)) | |||||
def next_line(self, param): | |||||
sbinfo = self.screen_buffer_info() | |||||
self.move_cursor( | |||||
x_offset = -sbinfo.CursorPosition.X, | |||||
y_offset = to_int(param, 1) | |||||
) | |||||
def prev_line(self, param): | |||||
sbinfo = self.screen_buffer_info() | |||||
self.move_cursor( | |||||
x_offset = -sbinfo.CursorPosition.X, | |||||
y_offset = -to_int(param, 1) | |||||
) | |||||
def rgb2bgr(self, c): | |||||
return ((c&1) << 2) | (c&2) | ((c&4)>>2) | |||||
def set_color(self, param): | |||||
cols = param.split(';') | |||||
sbinfo = self.screen_buffer_info() | |||||
attr = sbinfo.Attributes | |||||
for c in cols: | |||||
c = to_int(c, 0) | |||||
if 29 < c < 38: # fgcolor | |||||
attr = (attr & 0xfff0) | self.rgb2bgr(c - 30) | |||||
elif 39 < c < 48: # bgcolor | |||||
attr = (attr & 0xff0f) | (self.rgb2bgr(c - 40) << 4) | |||||
elif c == 0: # reset | |||||
attr = self._orig_sbinfo.Attributes | |||||
elif c == 1: # strong | |||||
attr |= 0x08 | |||||
elif c == 4: # blink not available -> bg intensity | |||||
attr |= 0x80 | |||||
elif c == 7: # negative | |||||
attr = (attr & 0xff88) | ((attr & 0x70) >> 4) | ((attr & 0x07) << 4) | |||||
windll.kernel32.SetConsoleTextAttribute(self.hconsole, attr) | |||||
def show_cursor(self,param): | |||||
self._csinfo.bVisible = 1 | |||||
windll.kernel32.SetConsoleCursorInfo(self.hconsole, byref(self._csinfo)) | |||||
def hide_cursor(self,param): | |||||
self._csinfo.bVisible = 0 | |||||
windll.kernel32.SetConsoleCursorInfo(self.hconsole, byref(self._csinfo)) | |||||
ansi_command_table = { | |||||
'A': move_up, | |||||
'B': move_down, | |||||
'C': move_right, | |||||
'D': move_left, | |||||
'E': next_line, | |||||
'F': prev_line, | |||||
'G': set_column, | |||||
'H': set_cursor, | |||||
'f': set_cursor, | |||||
'J': clear_screen, | |||||
'K': clear_line, | |||||
'h': show_cursor, | |||||
'l': hide_cursor, | |||||
'm': set_color, | |||||
's': push_cursor, | |||||
'u': pop_cursor, | |||||
} | |||||
# Match either the escape sequence or text not containing escape sequence | |||||
ansi_tokens = re.compile('(?:\x1b\[([0-9?;]*)([a-zA-Z])|([^\x1b]+))') | |||||
def write(self, text): | |||||
try: | |||||
wlock.acquire() | |||||
if self._isatty: | |||||
for param, cmd, txt in self.ansi_tokens.findall(text): | |||||
if cmd: | |||||
cmd_func = self.ansi_command_table.get(cmd) | |||||
if cmd_func: | |||||
cmd_func(self, param) | |||||
else: | |||||
self.writeconsole(txt) | |||||
else: | |||||
# no support for colors in the console, just output the text: | |||||
# eclipse or msys may be able to interpret the escape sequences | |||||
self.stream.write(text) | |||||
finally: | |||||
wlock.release() | |||||
def writeconsole(self, txt): | |||||
chars_written = c_int() | |||||
writeconsole = windll.kernel32.WriteConsoleA | |||||
if isinstance(txt, _type): | |||||
writeconsole = windll.kernel32.WriteConsoleW | |||||
# MSDN says that there is a shared buffer of 64 KB for the console | |||||
# writes. Attempt to not get ERROR_NOT_ENOUGH_MEMORY, see waf issue #746 | |||||
done = 0 | |||||
todo = len(txt) | |||||
chunk = 32<<10 | |||||
while todo != 0: | |||||
doing = min(chunk, todo) | |||||
buf = txt[done:done+doing] | |||||
r = writeconsole(self.hconsole, buf, doing, byref(chars_written), None) | |||||
if r == 0: | |||||
chunk >>= 1 | |||||
continue | |||||
done += doing | |||||
todo -= doing | |||||
def fileno(self): | |||||
return self.stream.fileno() | |||||
def flush(self): | |||||
pass | |||||
def isatty(self): | |||||
return self._isatty | |||||
if sys.stdout.isatty() or sys.stderr.isatty(): | |||||
handle = sys.stdout.isatty() and STD_OUTPUT_HANDLE or STD_ERROR_HANDLE | |||||
console = windll.kernel32.GetStdHandle(handle) | |||||
sbinfo = CONSOLE_SCREEN_BUFFER_INFO() | |||||
def get_term_cols(): | |||||
windll.kernel32.GetConsoleScreenBufferInfo(console, byref(sbinfo)) | |||||
# TODO Issue 1401 | |||||
return sbinfo.Size.X - 1 | |||||
# just try and see | |||||
try: | |||||
import struct, fcntl, termios | |||||
except ImportError: | |||||
pass | |||||
else: | |||||
if (sys.stdout.isatty() or sys.stderr.isatty()) and os.environ.get('TERM', '') not in ('dumb', 'emacs'): | |||||
FD = sys.stdout.isatty() and sys.stdout.fileno() or sys.stderr.fileno() | |||||
def fun(): | |||||
return struct.unpack("HHHH", fcntl.ioctl(FD, termios.TIOCGWINSZ, struct.pack("HHHH", 0, 0, 0, 0)))[1] | |||||
try: | |||||
fun() | |||||
except Exception as e: | |||||
pass | |||||
else: | |||||
get_term_cols = fun | |||||
@@ -0,0 +1,3 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2005-2010 (ita) |
@@ -0,0 +1,7 @@ | |||||
#! /usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2011 (ita) | |||||
from waflib import Logs | |||||
Logs.warn('This tool has been merged to the main library, remove the references to "add_objects"') | |||||
@@ -0,0 +1,169 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2006-2015 (ita) | |||||
""" | |||||
Build as batches. | |||||
Instead of compiling object files one by one, c/c++ compilers are often able to compile at once: | |||||
cc -c ../file1.c ../file2.c ../file3.c | |||||
Files are output on the directory where the compiler is called, and dependencies are more difficult | |||||
to track (do not run the command on all source files if only one file changes) | |||||
As such, we do as if the files were compiled one by one, but no command is actually run: | |||||
replace each cc/cpp Task by a TaskSlave. A new task called TaskMaster collects the | |||||
signatures from each slave and finds out the command-line to run. | |||||
Just import this module in the configuration (no other change required). | |||||
This is provided as an example, for performance unity builds are recommended (fewer tasks and | |||||
fewer jobs to execute). See waflib/extras/unity.py. | |||||
""" | |||||
from waflib import Task, Utils | |||||
from waflib.TaskGen import extension, feature, after_method | |||||
from waflib.Tools import c, cxx | |||||
MAX_BATCH = 50 | |||||
c_str = '${CC} ${CFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED}' | |||||
c_fun, _ = Task.compile_fun_noshell(c_str) | |||||
cxx_str = '${CXX} ${CXXFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED}' | |||||
cxx_fun, _ = Task.compile_fun_noshell(cxx_str) | |||||
count = 70000 | |||||
class batch_task(Task.Task): | |||||
color = 'PINK' | |||||
after = ['c', 'cxx'] | |||||
before = ['cprogram', 'cshlib', 'cstlib', 'cxxprogram', 'cxxshlib', 'cxxstlib'] | |||||
def uid(self): | |||||
m = Utils.md5() | |||||
m.update(Task.Task.uid(self)) | |||||
m.update(str(self.generator.idx).encode()) | |||||
return m.digest() | |||||
def __str__(self): | |||||
return 'Batch compilation for %d slaves' % len(self.slaves) | |||||
def __init__(self, *k, **kw): | |||||
Task.Task.__init__(self, *k, **kw) | |||||
self.slaves = [] | |||||
self.inputs = [] | |||||
self.hasrun = 0 | |||||
global count | |||||
count += 1 | |||||
self.idx = count | |||||
def add_slave(self, slave): | |||||
self.slaves.append(slave) | |||||
self.set_run_after(slave) | |||||
def runnable_status(self): | |||||
for t in self.run_after: | |||||
if not t.hasrun: | |||||
return Task.ASK_LATER | |||||
for t in self.slaves: | |||||
#if t.executed: | |||||
if t.hasrun != Task.SKIPPED: | |||||
return Task.RUN_ME | |||||
return Task.SKIP_ME | |||||
def run(self): | |||||
self.outputs = [] | |||||
srclst = [] | |||||
slaves = [] | |||||
for t in self.slaves: | |||||
if t.hasrun != Task.SKIPPED: | |||||
slaves.append(t) | |||||
srclst.append(t.inputs[0].abspath()) | |||||
self.env.SRCLST = srclst | |||||
self.cwd = slaves[0].outputs[0].parent.abspath() | |||||
if self.slaves[0].__class__.__name__ == 'c': | |||||
ret = c_fun(self) | |||||
else: | |||||
ret = cxx_fun(self) | |||||
if ret: | |||||
return ret | |||||
for t in slaves: | |||||
t.old_post_run() | |||||
def hook(cls_type): | |||||
def n_hook(self, node): | |||||
ext = '.obj' if self.env.CC_NAME == 'msvc' else '.o' | |||||
name = node.name | |||||
k = name.rfind('.') | |||||
if k >= 0: | |||||
basename = name[:k] + ext | |||||
else: | |||||
basename = name + ext | |||||
outdir = node.parent.get_bld().make_node('%d' % self.idx) | |||||
outdir.mkdir() | |||||
out = outdir.find_or_declare(basename) | |||||
task = self.create_task(cls_type, node, out) | |||||
try: | |||||
self.compiled_tasks.append(task) | |||||
except AttributeError: | |||||
self.compiled_tasks = [task] | |||||
if not getattr(self, 'masters', None): | |||||
self.masters = {} | |||||
self.allmasters = [] | |||||
def fix_path(tsk): | |||||
if self.env.CC_NAME == 'msvc': | |||||
tsk.env.append_unique('CXX_TGT_F_BATCHED', '/Fo%s\\' % outdir.abspath()) | |||||
if not node.parent in self.masters: | |||||
m = self.masters[node.parent] = self.master = self.create_task('batch') | |||||
fix_path(m) | |||||
self.allmasters.append(m) | |||||
else: | |||||
m = self.masters[node.parent] | |||||
if len(m.slaves) > MAX_BATCH: | |||||
m = self.masters[node.parent] = self.master = self.create_task('batch') | |||||
fix_path(m) | |||||
self.allmasters.append(m) | |||||
m.add_slave(task) | |||||
return task | |||||
return n_hook | |||||
extension('.c')(hook('c')) | |||||
extension('.cpp','.cc','.cxx','.C','.c++')(hook('cxx')) | |||||
@feature('cprogram', 'cshlib', 'cstaticlib', 'cxxprogram', 'cxxshlib', 'cxxstlib') | |||||
@after_method('apply_link') | |||||
def link_after_masters(self): | |||||
if getattr(self, 'allmasters', None): | |||||
for m in self.allmasters: | |||||
self.link_task.set_run_after(m) | |||||
# Modify the c and cxx task classes - in theory it would be best to | |||||
# create subclasses and to re-map the c/c++ extensions | |||||
for x in ('c', 'cxx'): | |||||
t = Task.classes[x] | |||||
def run(self): | |||||
pass | |||||
def post_run(self): | |||||
pass | |||||
setattr(t, 'oldrun', getattr(t, 'run', None)) | |||||
setattr(t, 'run', run) | |||||
setattr(t, 'old_post_run', t.post_run) | |||||
setattr(t, 'post_run', post_run) | |||||
@@ -0,0 +1,58 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2011 (ita) | |||||
""" | |||||
Latex processing using "biber" | |||||
""" | |||||
import os | |||||
from waflib import Task, Logs | |||||
from waflib.Tools import tex as texmodule | |||||
class tex(texmodule.tex): | |||||
biber_fun, _ = Task.compile_fun('${BIBER} ${BIBERFLAGS} ${SRCFILE}',shell=False) | |||||
biber_fun.__doc__ = """ | |||||
Execute the program **biber** | |||||
""" | |||||
def bibfile(self): | |||||
return None | |||||
def bibunits(self): | |||||
self.env.env = {} | |||||
self.env.env.update(os.environ) | |||||
self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()}) | |||||
self.env.SRCFILE = self.aux_nodes[0].name[:-4] | |||||
if not self.env['PROMPT_LATEX']: | |||||
self.env.append_unique('BIBERFLAGS', '--quiet') | |||||
path = self.aux_nodes[0].abspath()[:-4] + '.bcf' | |||||
if os.path.isfile(path): | |||||
Logs.warn('calling biber') | |||||
self.check_status('error when calling biber, check %s.blg for errors' % (self.env.SRCFILE), self.biber_fun()) | |||||
else: | |||||
super(tex, self).bibfile() | |||||
super(tex, self).bibunits() | |||||
class latex(tex): | |||||
texfun, vars = Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}', shell=False) | |||||
class pdflatex(tex): | |||||
texfun, vars = Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}', shell=False) | |||||
class xelatex(tex): | |||||
texfun, vars = Task.compile_fun('${XELATEX} ${XELATEXFLAGS} ${SRCFILE}', shell=False) | |||||
def configure(self): | |||||
""" | |||||
Almost the same as in tex.py, but try to detect 'biber' | |||||
""" | |||||
v = self.env | |||||
for p in ' biber tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps'.split(): | |||||
try: | |||||
self.find_program(p, var=p.upper()) | |||||
except self.errors.ConfigurationError: | |||||
pass | |||||
v['DVIPSFLAGS'] = '-Ppdf' | |||||
@@ -0,0 +1,131 @@ | |||||
#! /usr/bin/env python | |||||
# per rosengren 2011 | |||||
from os import sep, readlink | |||||
from os.path import abspath | |||||
from waflib import Logs | |||||
from waflib.TaskGen import feature, after_method | |||||
from waflib.Task import Task, always_run | |||||
def options(opt): | |||||
grp = opt.add_option_group('Bjam Options') | |||||
grp.add_option('--bjam_src', default=None, help='You can find it in <boost root>/tools/jam/src') | |||||
grp.add_option('--bjam_uname', default='linuxx86_64', help='bjam is built in <src>/bin.<uname>/bjam') | |||||
grp.add_option('--bjam_config', default=None) | |||||
grp.add_option('--bjam_toolset', default=None) | |||||
def configure(cnf): | |||||
if not cnf.env.BJAM_SRC: | |||||
cnf.env.BJAM_SRC = cnf.options.bjam_src | |||||
if not cnf.env.BJAM_UNAME: | |||||
cnf.env.BJAM_UNAME = cnf.options.bjam_uname | |||||
try: | |||||
cnf.find_program('bjam', path_list=[ | |||||
cnf.env.BJAM_SRC + sep + 'bin.' + cnf.env.BJAM_UNAME | |||||
]) | |||||
except Exception as e: | |||||
cnf.env.BJAM = None | |||||
if not cnf.env.BJAM_CONFIG: | |||||
cnf.env.BJAM_CONFIG = cnf.options.bjam_config | |||||
if not cnf.env.BJAM_TOOLSET: | |||||
cnf.env.BJAM_TOOLSET = cnf.options.bjam_toolset | |||||
@feature('bjam') | |||||
@after_method('process_rule') | |||||
def process_bjam(self): | |||||
if not self.bld.env.BJAM: | |||||
self.create_task('bjam_creator') | |||||
self.create_task('bjam_build') | |||||
self.create_task('bjam_installer') | |||||
if getattr(self, 'always', False): | |||||
always_run(bjam_creator) | |||||
always_run(bjam_build) | |||||
always_run(bjam_installer) | |||||
class bjam_creator(Task): | |||||
ext_out = 'bjam_exe' | |||||
vars=['BJAM_SRC', 'BJAM_UNAME'] | |||||
def run(self): | |||||
env = self.env | |||||
gen = self.generator | |||||
path = gen.path | |||||
bld = gen.bld | |||||
bjam = gen.bld.root.find_dir(env.BJAM_SRC) | |||||
if not bjam: | |||||
Logs.error('Can not find bjam source') | |||||
return -1 | |||||
bjam_exe_relpath = 'bin.' + env.BJAM_UNAME + '/bjam' | |||||
bjam_exe = bjam.find_resource(bjam_exe_relpath) | |||||
if bjam_exe: | |||||
env.BJAM = bjam_exe.srcpath() | |||||
return 0 | |||||
bjam_cmd = ['./build.sh'] | |||||
Logs.debug('runner: ' + bjam.srcpath() + '> ' + str(bjam_cmd)) | |||||
result = self.exec_command(bjam_cmd, cwd=bjam.srcpath()) | |||||
if not result == 0: | |||||
Logs.error('bjam failed') | |||||
return -1 | |||||
bjam_exe = bjam.find_resource(bjam_exe_relpath) | |||||
if bjam_exe: | |||||
env.BJAM = bjam_exe.srcpath() | |||||
return 0 | |||||
Logs.error('bjam failed') | |||||
return -1 | |||||
class bjam_build(Task): | |||||
ext_in = 'bjam_exe' | |||||
ext_out = 'install' | |||||
vars = ['BJAM_TOOLSET'] | |||||
def run(self): | |||||
env = self.env | |||||
gen = self.generator | |||||
path = gen.path | |||||
bld = gen.bld | |||||
if hasattr(gen, 'root'): | |||||
build_root = path.find_node(gen.root) | |||||
else: | |||||
build_root = path | |||||
jam = bld.srcnode.find_resource(env.BJAM_CONFIG) | |||||
if jam: | |||||
Logs.debug('bjam: Using jam configuration from ' + jam.srcpath()) | |||||
jam_rel = jam.relpath_gen(build_root) | |||||
else: | |||||
Logs.warn('No build configuration in build_config/user-config.jam. Using default') | |||||
jam_rel = None | |||||
bjam_exe = bld.srcnode.find_node(env.BJAM) | |||||
if not bjam_exe: | |||||
Logs.error('env.BJAM is not set') | |||||
return -1 | |||||
bjam_exe_rel = bjam_exe.relpath_gen(build_root) | |||||
cmd = ([bjam_exe_rel] + | |||||
(['--user-config=' + jam_rel] if jam_rel else []) + | |||||
['--stagedir=' + path.get_bld().path_from(build_root)] + | |||||
['--debug-configuration'] + | |||||
['--with-' + lib for lib in self.generator.target] + | |||||
(['toolset=' + env.BJAM_TOOLSET] if env.BJAM_TOOLSET else []) + | |||||
['link=' + 'shared'] + | |||||
['variant=' + 'release'] | |||||
) | |||||
Logs.debug('runner: ' + build_root.srcpath() + '> ' + str(cmd)) | |||||
ret = self.exec_command(cmd, cwd=build_root.srcpath()) | |||||
if ret != 0: | |||||
return ret | |||||
self.set_outputs(path.get_bld().ant_glob('lib/*') + path.get_bld().ant_glob('bin/*')) | |||||
return 0 | |||||
class bjam_installer(Task): | |||||
ext_in = 'install' | |||||
def run(self): | |||||
gen = self.generator | |||||
path = gen.path | |||||
for idir, pat in (('${LIBDIR}', 'lib/*'), ('${BINDIR}', 'bin/*')): | |||||
files = [] | |||||
for n in path.get_bld().ant_glob(pat): | |||||
try: | |||||
t = readlink(n.srcpath()) | |||||
gen.bld.symlink_as(sep.join([idir, n.name]), t, postpone=False) | |||||
except OSError: | |||||
files.append(n) | |||||
gen.bld.install_files(idir, files, postpone=False) | |||||
return 0 | |||||
@@ -0,0 +1,111 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Michal Proszek, 2014 (poxip) | |||||
""" | |||||
Detect the version of Blender, path | |||||
and install the extension: | |||||
def options(opt): | |||||
opt.load('blender') | |||||
def configure(cnf): | |||||
cnf.load('blender') | |||||
def build(bld): | |||||
bld(name='io_mesh_raw', | |||||
feature='blender', | |||||
files=['file1.py', 'file2.py'] | |||||
) | |||||
If name variable is empty, files are installed in scripts/addons, otherwise scripts/addons/name | |||||
Use ./waf configure --system to set the installation directory to system path | |||||
""" | |||||
import os | |||||
import re | |||||
from sys import platform as _platform | |||||
from getpass import getuser | |||||
from waflib import Utils | |||||
from waflib.TaskGen import feature | |||||
from waflib.Configure import conf | |||||
def options(opt): | |||||
opt.add_option( | |||||
'-s', '--system', | |||||
dest='directory_system', | |||||
default=False, | |||||
action='store_true', | |||||
help='determines installation directory (default: user)' | |||||
) | |||||
@conf | |||||
def find_blender(ctx): | |||||
'''Return version number of blender, if not exist return None''' | |||||
blender = ctx.find_program('blender') | |||||
output = ctx.cmd_and_log(blender + ['--version']) | |||||
m = re.search(r'Blender\s*((\d+(\.|))*)', output) | |||||
if not m: | |||||
ctx.fatal('Could not retrieve blender version') | |||||
try: | |||||
blender_version = m.group(1) | |||||
except IndexError: | |||||
ctx.fatal('Could not retrieve blender version') | |||||
ctx.env['BLENDER_VERSION'] = blender_version | |||||
return blender | |||||
@conf | |||||
def configure_paths(ctx): | |||||
"""Setup blender paths""" | |||||
# Get the username | |||||
user = getuser() | |||||
_platform = Utils.unversioned_sys_platform() | |||||
config_path = {'user': '', 'system': ''} | |||||
if _platform.startswith('linux'): | |||||
config_path['user'] = '/home/%s/.config/blender/' % user | |||||
config_path['system'] = '/usr/share/blender/' | |||||
elif _platform == 'darwin': | |||||
# MAC OS X | |||||
config_path['user'] = \ | |||||
'/Users/%s/Library/Application Support/Blender/' % user | |||||
config_path['system'] = '/Library/Application Support/Blender/' | |||||
elif Utils.is_win32: | |||||
# Windows | |||||
appdata_path = ctx.getenv('APPDATA').replace('\\', '/') | |||||
homedrive = ctx.getenv('HOMEDRIVE').replace('\\', '/') | |||||
config_path['user'] = '%s/Blender Foundation/Blender/' % appdata_path | |||||
config_path['system'] = \ | |||||
'%sAll Users/AppData/Roaming/Blender Foundation/Blender/' % homedrive | |||||
else: | |||||
ctx.fatal( | |||||
'Unsupported platform. ' | |||||
'Available platforms: Linux, OSX, MS-Windows.' | |||||
) | |||||
blender_version = ctx.env['BLENDER_VERSION'] | |||||
config_path['user'] += blender_version + '/' | |||||
config_path['system'] += blender_version + '/' | |||||
ctx.env['BLENDER_CONFIG_DIR'] = os.path.abspath(config_path['user']) | |||||
if ctx.options.directory_system: | |||||
ctx.env['BLENDER_CONFIG_DIR'] = config_path['system'] | |||||
ctx.env['BLENDER_ADDONS_DIR'] = os.path.join( | |||||
ctx.env['BLENDER_CONFIG_DIR'], 'scripts/addons' | |||||
) | |||||
Utils.check_dir(ctx.env['BLENDER_ADDONS_DIR']) | |||||
def configure(ctx): | |||||
ctx.find_blender() | |||||
ctx.configure_paths() | |||||
@feature('blender_list') | |||||
def blender(self): | |||||
# Two ways to install a blender extension: as a module or just .py files | |||||
dest_dir = os.path.join(self.env.BLENDER_ADDONS_DIR, self.get_name()) | |||||
Utils.check_dir(dest_dir) | |||||
self.bld.install_files( | |||||
dest_dir, | |||||
getattr(self, 'files', '.') | |||||
) |
@@ -0,0 +1,81 @@ | |||||
#! /usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Yannick LM 2011 | |||||
""" | |||||
Support for the boo programming language, for example:: | |||||
bld(features = "boo", # necessary feature | |||||
source = "src.boo", # list of boo files | |||||
gen = "world.dll", # target | |||||
type = "library", # library/exe ("-target:xyz" flag) | |||||
name = "world" # necessary if the target is referenced by 'use' | |||||
) | |||||
""" | |||||
from waflib import Task | |||||
from waflib.Configure import conf | |||||
from waflib.TaskGen import feature, after_method, before_method, extension | |||||
@extension('.boo') | |||||
def boo_hook(self, node): | |||||
# Nothing here yet ... | |||||
# TODO filter the non-boo source files in 'apply_booc' and remove this method | |||||
pass | |||||
@feature('boo') | |||||
@before_method('process_source') | |||||
def apply_booc(self): | |||||
"""Create a booc task """ | |||||
src_nodes = self.to_nodes(self.source) | |||||
out_node = self.path.find_or_declare(self.gen) | |||||
self.boo_task = self.create_task('booc', src_nodes, [out_node]) | |||||
# Set variables used by the 'booc' task | |||||
self.boo_task.env.OUT = '-o:%s' % out_node.abspath() | |||||
# type is "exe" by default | |||||
type = getattr(self, "type", "exe") | |||||
self.boo_task.env.BOO_TARGET_TYPE = "-target:%s" % type | |||||
@feature('boo') | |||||
@after_method('apply_boo') | |||||
def use_boo(self): | |||||
"""" | |||||
boo applications honor the **use** keyword:: | |||||
""" | |||||
dep_names = self.to_list(getattr(self, 'use', [])) | |||||
for dep_name in dep_names: | |||||
dep_task_gen = self.bld.get_tgen_by_name(dep_name) | |||||
if not dep_task_gen: | |||||
continue | |||||
dep_task_gen.post() | |||||
dep_task = getattr(dep_task_gen, 'boo_task', None) | |||||
if not dep_task: | |||||
# Try a cs task: | |||||
dep_task = getattr(dep_task_gen, 'cs_task', None) | |||||
if not dep_task: | |||||
# Try a link task: | |||||
dep_task = getattr(dep_task, 'link_task', None) | |||||
if not dep_task: | |||||
# Abort ... | |||||
continue | |||||
self.boo_task.set_run_after(dep_task) # order | |||||
self.boo_task.dep_nodes.extend(dep_task.outputs) # dependency | |||||
self.boo_task.env.append_value('BOO_FLAGS', '-reference:%s' % dep_task.outputs[0].abspath()) | |||||
class booc(Task.Task): | |||||
"""Compiles .boo files """ | |||||
color = 'YELLOW' | |||||
run_str = '${BOOC} ${BOO_FLAGS} ${BOO_TARGET_TYPE} ${OUT} ${SRC}' | |||||
@conf | |||||
def check_booc(self): | |||||
self.find_program('booc', 'BOOC') | |||||
self.env.BOO_FLAGS = ['-nologo'] | |||||
def configure(self): | |||||
"""Check that booc is available """ | |||||
self.check_booc() | |||||
@@ -0,0 +1,411 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# | |||||
# partially based on boost.py written by Gernot Vormayr | |||||
# written by Ruediger Sonderfeld <ruediger@c-plusplus.de>, 2008 | |||||
# modified by Bjoern Michaelsen, 2008 | |||||
# modified by Luca Fossati, 2008 | |||||
# rewritten for waf 1.5.1, Thomas Nagy, 2008 | |||||
# rewritten for waf 1.6.2, Sylvain Rouquette, 2011 | |||||
''' | |||||
This is an extra tool, not bundled with the default waf binary. | |||||
To add the boost tool to the waf file: | |||||
$ ./waf-light --tools=compat15,boost | |||||
or, if you have waf >= 1.6.2 | |||||
$ ./waf update --files=boost | |||||
When using this tool, the wscript will look like: | |||||
def options(opt): | |||||
opt.load('compiler_cxx boost') | |||||
def configure(conf): | |||||
conf.load('compiler_cxx boost') | |||||
conf.check_boost(lib='system filesystem') | |||||
def build(bld): | |||||
bld(source='main.cpp', target='app', use='BOOST') | |||||
Options are generated, in order to specify the location of boost includes/libraries. | |||||
The `check_boost` configuration function allows to specify the used boost libraries. | |||||
It can also provide default arguments to the --boost-mt command-line arguments. | |||||
Everything will be packaged together in a BOOST component that you can use. | |||||
When using MSVC, a lot of compilation flags need to match your BOOST build configuration: | |||||
- you may have to add /EHsc to your CXXFLAGS or define boost::throw_exception if BOOST_NO_EXCEPTIONS is defined. | |||||
Errors: C4530 | |||||
- boost libraries will try to be smart and use the (pretty but often not useful) auto-linking feature of MSVC | |||||
So before calling `conf.check_boost` you might want to disabling by adding | |||||
conf.env.DEFINES_BOOST += ['BOOST_ALL_NO_LIB'] | |||||
Errors: | |||||
- boost might also be compiled with /MT, which links the runtime statically. | |||||
If you have problems with redefined symbols, | |||||
self.env['DEFINES_%s' % var] += ['BOOST_ALL_NO_LIB'] | |||||
self.env['CXXFLAGS_%s' % var] += ['/MD', '/EHsc'] | |||||
Passing `--boost-linkage_autodetect` might help ensuring having a correct linkage in some basic cases. | |||||
''' | |||||
import sys | |||||
import re | |||||
from waflib import Utils, Logs, Errors | |||||
from waflib.Configure import conf | |||||
from waflib.TaskGen import feature, after_method | |||||
BOOST_LIBS = ['/usr/lib/x86_64-linux-gnu', '/usr/lib/i386-linux-gnu', | |||||
'/usr/lib', '/usr/local/lib', '/opt/local/lib', '/sw/lib', '/lib'] | |||||
BOOST_INCLUDES = ['/usr/include', '/usr/local/include', '/opt/local/include', '/sw/include'] | |||||
BOOST_VERSION_FILE = 'boost/version.hpp' | |||||
BOOST_VERSION_CODE = ''' | |||||
#include <iostream> | |||||
#include <boost/version.hpp> | |||||
int main() { std::cout << BOOST_LIB_VERSION << std::endl; } | |||||
''' | |||||
BOOST_ERROR_CODE = ''' | |||||
#include <boost/system/error_code.hpp> | |||||
int main() { boost::system::error_code c; } | |||||
''' | |||||
BOOST_THREAD_CODE = ''' | |||||
#include <boost/thread.hpp> | |||||
int main() { boost::thread t; } | |||||
''' | |||||
# toolsets from {boost_dir}/tools/build/v2/tools/common.jam | |||||
PLATFORM = Utils.unversioned_sys_platform() | |||||
detect_intel = lambda env: (PLATFORM == 'win32') and 'iw' or 'il' | |||||
detect_clang = lambda env: (PLATFORM == 'darwin') and 'clang-darwin' or 'clang' | |||||
detect_mingw = lambda env: (re.search('MinGW', env.CXX[0])) and 'mgw' or 'gcc' | |||||
BOOST_TOOLSETS = { | |||||
'borland': 'bcb', | |||||
'clang': detect_clang, | |||||
'como': 'como', | |||||
'cw': 'cw', | |||||
'darwin': 'xgcc', | |||||
'edg': 'edg', | |||||
'g++': detect_mingw, | |||||
'gcc': detect_mingw, | |||||
'icpc': detect_intel, | |||||
'intel': detect_intel, | |||||
'kcc': 'kcc', | |||||
'kylix': 'bck', | |||||
'mipspro': 'mp', | |||||
'mingw': 'mgw', | |||||
'msvc': 'vc', | |||||
'qcc': 'qcc', | |||||
'sun': 'sw', | |||||
'sunc++': 'sw', | |||||
'tru64cxx': 'tru', | |||||
'vacpp': 'xlc' | |||||
} | |||||
def options(opt): | |||||
opt.add_option('--boost-includes', type='string', | |||||
default='', dest='boost_includes', | |||||
help='''path to the boost includes root (~boost root) | |||||
e.g. /path/to/boost_1_47_0''') | |||||
opt.add_option('--boost-libs', type='string', | |||||
default='', dest='boost_libs', | |||||
help='''path to the directory where the boost libs are | |||||
e.g. /path/to/boost_1_47_0/stage/lib''') | |||||
opt.add_option('--boost-mt', action='store_true', | |||||
default=False, dest='boost_mt', | |||||
help='select multi-threaded libraries') | |||||
opt.add_option('--boost-abi', type='string', default='', dest='boost_abi', | |||||
help='''select libraries with tags (gd for debug, static is automatically added), | |||||
see doc Boost, Getting Started, chapter 6.1''') | |||||
opt.add_option('--boost-linkage_autodetect', action="store_true", dest='boost_linkage_autodetect', | |||||
help="auto-detect boost linkage options (don't get used to it / might break other stuff)") | |||||
opt.add_option('--boost-toolset', type='string', | |||||
default='', dest='boost_toolset', | |||||
help='force a toolset e.g. msvc, vc90, \ | |||||
gcc, mingw, mgw45 (default: auto)') | |||||
py_version = '%d%d' % (sys.version_info[0], sys.version_info[1]) | |||||
opt.add_option('--boost-python', type='string', | |||||
default=py_version, dest='boost_python', | |||||
help='select the lib python with this version \ | |||||
(default: %s)' % py_version) | |||||
@conf | |||||
def __boost_get_version_file(self, d): | |||||
if not d: | |||||
return None | |||||
dnode = self.root.find_dir(d) | |||||
if dnode: | |||||
return dnode.find_node(BOOST_VERSION_FILE) | |||||
return None | |||||
@conf | |||||
def boost_get_version(self, d): | |||||
"""silently retrieve the boost version number""" | |||||
node = self.__boost_get_version_file(d) | |||||
if node: | |||||
try: | |||||
txt = node.read() | |||||
except EnvironmentError: | |||||
Logs.error("Could not read the file %r" % node.abspath()) | |||||
else: | |||||
re_but = re.compile('^#define\\s+BOOST_LIB_VERSION\\s+"(.*)"', re.M) | |||||
m = re_but.search(txt) | |||||
if m: | |||||
return m.group(1) | |||||
return self.check_cxx(fragment=BOOST_VERSION_CODE, includes=[d], execute=True, define_ret=True) | |||||
@conf | |||||
def boost_get_includes(self, *k, **kw): | |||||
includes = k and k[0] or kw.get('includes', None) | |||||
if includes and self.__boost_get_version_file(includes): | |||||
return includes | |||||
for d in self.environ.get('INCLUDE', '').split(';') + BOOST_INCLUDES: | |||||
if self.__boost_get_version_file(d): | |||||
return d | |||||
if includes: | |||||
self.end_msg('headers not found in %s' % includes) | |||||
self.fatal('The configuration failed') | |||||
else: | |||||
self.end_msg('headers not found, please provide a --boost-includes argument (see help)') | |||||
self.fatal('The configuration failed') | |||||
@conf | |||||
def boost_get_toolset(self, cc): | |||||
toolset = cc | |||||
if not cc: | |||||
build_platform = Utils.unversioned_sys_platform() | |||||
if build_platform in BOOST_TOOLSETS: | |||||
cc = build_platform | |||||
else: | |||||
cc = self.env.CXX_NAME | |||||
if cc in BOOST_TOOLSETS: | |||||
toolset = BOOST_TOOLSETS[cc] | |||||
return isinstance(toolset, str) and toolset or toolset(self.env) | |||||
@conf | |||||
def __boost_get_libs_path(self, *k, **kw): | |||||
''' return the lib path and all the files in it ''' | |||||
if 'files' in kw: | |||||
return self.root.find_dir('.'), Utils.to_list(kw['files']) | |||||
libs = k and k[0] or kw.get('libs', None) | |||||
if libs: | |||||
path = self.root.find_dir(libs) | |||||
files = path.ant_glob('*boost_*') | |||||
if not libs or not files: | |||||
for d in self.environ.get('LIB', '').split(';') + BOOST_LIBS: | |||||
if not d: | |||||
continue | |||||
path = self.root.find_dir(d) | |||||
if path: | |||||
files = path.ant_glob('*boost_*') | |||||
if files: | |||||
break | |||||
path = self.root.find_dir(d + '64') | |||||
if path: | |||||
files = path.ant_glob('*boost_*') | |||||
if files: | |||||
break | |||||
if not path: | |||||
if libs: | |||||
self.end_msg('libs not found in %s' % libs) | |||||
self.fatal('The configuration failed') | |||||
else: | |||||
self.end_msg('libs not found, please provide a --boost-libs argument (see help)') | |||||
self.fatal('The configuration failed') | |||||
self.to_log('Found the boost path in %r with the libraries:' % path) | |||||
for x in files: | |||||
self.to_log(' %r' % x) | |||||
return path, files | |||||
@conf | |||||
def boost_get_libs(self, *k, **kw): | |||||
''' | |||||
return the lib path and the required libs | |||||
according to the parameters | |||||
''' | |||||
path, files = self.__boost_get_libs_path(**kw) | |||||
files = sorted(files, key=lambda f: (len(f.name), f.name), reverse=True) | |||||
toolset = self.boost_get_toolset(kw.get('toolset', '')) | |||||
toolset_pat = '(-%s[0-9]{0,3})' % toolset | |||||
version = '-%s' % self.env.BOOST_VERSION | |||||
def find_lib(re_lib, files): | |||||
for file in files: | |||||
if re_lib.search(file.name): | |||||
self.to_log('Found boost lib %s' % file) | |||||
return file | |||||
return None | |||||
def format_lib_name(name): | |||||
if name.startswith('lib') and self.env.CC_NAME != 'msvc': | |||||
name = name[3:] | |||||
return name[:name.rfind('.')] | |||||
def match_libs(lib_names, is_static): | |||||
libs = [] | |||||
lib_names = Utils.to_list(lib_names) | |||||
if not lib_names: | |||||
return libs | |||||
t = [] | |||||
if kw.get('mt', False): | |||||
t.append('-mt') | |||||
if kw.get('abi', None): | |||||
t.append('%s%s' % (is_static and '-s' or '-', kw['abi'])) | |||||
elif is_static: | |||||
t.append('-s') | |||||
tags_pat = t and ''.join(t) or '' | |||||
ext = is_static and self.env.cxxstlib_PATTERN or self.env.cxxshlib_PATTERN | |||||
ext = ext.partition('%s')[2] # remove '%s' or 'lib%s' from PATTERN | |||||
for lib in lib_names: | |||||
if lib == 'python': | |||||
# for instance, with python='27', | |||||
# accepts '-py27', '-py2', '27' and '2' | |||||
# but will reject '-py3', '-py26', '26' and '3' | |||||
tags = '({0})?((-py{2})|(-py{1}(?=[^0-9]))|({2})|({1}(?=[^0-9]))|(?=[^0-9])(?!-py))'.format(tags_pat, kw['python'][0], kw['python']) | |||||
else: | |||||
tags = tags_pat | |||||
# Trying libraries, from most strict match to least one | |||||
for pattern in ['boost_%s%s%s%s%s$' % (lib, toolset_pat, tags, version, ext), | |||||
'boost_%s%s%s%s$' % (lib, tags, version, ext), | |||||
# Give up trying to find the right version | |||||
'boost_%s%s%s%s$' % (lib, toolset_pat, tags, ext), | |||||
'boost_%s%s%s$' % (lib, tags, ext), | |||||
'boost_%s%s$' % (lib, ext), | |||||
'boost_%s' % lib]: | |||||
self.to_log('Trying pattern %s' % pattern) | |||||
file = find_lib(re.compile(pattern), files) | |||||
if file: | |||||
libs.append(format_lib_name(file.name)) | |||||
break | |||||
else: | |||||
self.end_msg('lib %s not found in %s' % (lib, path.abspath())) | |||||
self.fatal('The configuration failed') | |||||
return libs | |||||
return path.abspath(), match_libs(kw.get('lib', None), False), match_libs(kw.get('stlib', None), True) | |||||
@conf | |||||
def check_boost(self, *k, **kw): | |||||
""" | |||||
Initialize boost libraries to be used. | |||||
Keywords: you can pass the same parameters as with the command line (without "--boost-"). | |||||
Note that the command line has the priority, and should preferably be used. | |||||
""" | |||||
if not self.env['CXX']: | |||||
self.fatal('load a c++ compiler first, conf.load("compiler_cxx")') | |||||
params = { | |||||
'lib': k and k[0] or kw.get('lib', None), | |||||
'stlib': kw.get('stlib', None) | |||||
} | |||||
for key, value in self.options.__dict__.items(): | |||||
if not key.startswith('boost_'): | |||||
continue | |||||
key = key[len('boost_'):] | |||||
params[key] = value and value or kw.get(key, '') | |||||
var = kw.get('uselib_store', 'BOOST') | |||||
self.start_msg('Checking boost includes') | |||||
self.env['INCLUDES_%s' % var] = inc = self.boost_get_includes(**params) | |||||
self.env.BOOST_VERSION = self.boost_get_version(inc) | |||||
self.end_msg(self.env.BOOST_VERSION) | |||||
if Logs.verbose: | |||||
Logs.pprint('CYAN', ' path : %s' % self.env['INCLUDES_%s' % var]) | |||||
if not params['lib'] and not params['stlib']: | |||||
return | |||||
if 'static' in kw or 'static' in params: | |||||
Logs.warn('boost: static parameter is deprecated, use stlib instead.') | |||||
self.start_msg('Checking boost libs') | |||||
path, libs, stlibs = self.boost_get_libs(**params) | |||||
self.env['LIBPATH_%s' % var] = [path] | |||||
self.env['STLIBPATH_%s' % var] = [path] | |||||
self.env['LIB_%s' % var] = libs | |||||
self.env['STLIB_%s' % var] = stlibs | |||||
self.end_msg('ok') | |||||
if Logs.verbose: | |||||
Logs.pprint('CYAN', ' path : %s' % path) | |||||
Logs.pprint('CYAN', ' shared libs : %s' % libs) | |||||
Logs.pprint('CYAN', ' static libs : %s' % stlibs) | |||||
def try_link(): | |||||
if (params['lib'] and 'system' in params['lib']) or \ | |||||
params['stlib'] and 'system' in params['stlib']: | |||||
self.check_cxx(fragment=BOOST_ERROR_CODE, use=var, execute=False) | |||||
if (params['lib'] and 'thread' in params['lib']) or \ | |||||
params['stlib'] and 'thread' in params['stlib']: | |||||
self.check_cxx(fragment=BOOST_THREAD_CODE, use=var, execute=False) | |||||
if params.get('linkage_autodetect', False): | |||||
self.start_msg("Attempting to detect boost linkage flags") | |||||
toolset = self.boost_get_toolset(kw.get('toolset', '')) | |||||
if toolset in ('vc',): | |||||
# disable auto-linking feature, causing error LNK1181 | |||||
# because the code wants to be linked against | |||||
self.env['DEFINES_%s' % var] += ['BOOST_ALL_NO_LIB'] | |||||
# if no dlls are present, we guess the .lib files are not stubs | |||||
has_dlls = False | |||||
for x in Utils.listdir(path): | |||||
if x.endswith(self.env.cxxshlib_PATTERN % ''): | |||||
has_dlls = True | |||||
break | |||||
if not has_dlls: | |||||
self.env['STLIBPATH_%s' % var] = [path] | |||||
self.env['STLIB_%s' % var] = libs | |||||
del self.env['LIB_%s' % var] | |||||
del self.env['LIBPATH_%s' % var] | |||||
# we attempt to play with some known-to-work CXXFLAGS combinations | |||||
for cxxflags in (['/MD', '/EHsc'], []): | |||||
self.env.stash() | |||||
self.env["CXXFLAGS_%s" % var] += cxxflags | |||||
try: | |||||
try_link() | |||||
self.end_msg("ok: winning cxxflags combination: %s" % (self.env["CXXFLAGS_%s" % var])) | |||||
exc = None | |||||
break | |||||
except Errors.ConfigurationError as e: | |||||
self.env.revert() | |||||
exc = e | |||||
if exc is not None: | |||||
self.end_msg("Could not auto-detect boost linking flags combination, you may report it to boost.py author", ex=exc) | |||||
self.fatal('The configuration failed') | |||||
else: | |||||
self.end_msg("Boost linkage flags auto-detection not implemented (needed ?) for this toolchain") | |||||
self.fatal('The configuration failed') | |||||
else: | |||||
self.start_msg('Checking for boost linkage') | |||||
try: | |||||
try_link() | |||||
except Errors.ConfigurationError as e: | |||||
self.end_msg("Could not link against boost libraries using supplied options") | |||||
self.fatal('The configuration failed') | |||||
self.end_msg('ok') | |||||
@feature('cxx') | |||||
@after_method('apply_link') | |||||
def install_boost(self): | |||||
if install_boost.done or not Utils.is_win32 or not self.bld.cmd.startswith('install'): | |||||
return | |||||
install_boost.done = True | |||||
inst_to = getattr(self, 'install_path', '${BINDIR}') | |||||
for lib in self.env.LIB_BOOST: | |||||
try: | |||||
file = self.bld.find_file(self.env.cxxshlib_PATTERN % lib, self.env.LIBPATH_BOOST) | |||||
self.bld.install_files(inst_to, self.bld.root.find_node(file)) | |||||
except: | |||||
continue | |||||
install_boost.done = False |
@@ -0,0 +1,32 @@ | |||||
#! /usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2015 | |||||
""" | |||||
Force files to depend on the timestamps of those located in the build directory. You may | |||||
want to use this to force partial rebuilds, see playground/track_output_files/ for a working example. | |||||
Note that there is a variety of ways to implement this, one may want use timestamps on source files too for example, | |||||
or one may want to hash the files in the source directory only under certain conditions (md5_tstamp tool) | |||||
or to hash the file in the build directory with its timestamp (similar to 'update_outputs') | |||||
""" | |||||
import os | |||||
from waflib import Node, Utils | |||||
def get_bld_sig(self): | |||||
try: | |||||
return self.cache_sig | |||||
except AttributeError: | |||||
pass | |||||
if not self.is_bld() or self.ctx.bldnode is self.ctx.srcnode: | |||||
self.sig = Utils.h_file(self.abspath()) | |||||
self.cache_sig = ret = self.sig | |||||
else: | |||||
# add the | |||||
self.cache_sig = ret = self.sig + str(os.stat(self.abspath()).st_mtime) | |||||
return ret | |||||
Node.Node.get_bld_sig = get_bld_sig | |||||
@@ -0,0 +1,107 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2013 (ita) | |||||
""" | |||||
A system for recording all outputs to a log file. Just add the following to your wscript file:: | |||||
def init(ctx): | |||||
ctx.load('build_logs') | |||||
""" | |||||
import atexit, sys, time, os, shutil, threading | |||||
from waflib import Logs, Context | |||||
# adding the logs under the build/ directory will clash with the clean/ command | |||||
try: | |||||
up = os.path.dirname(Context.g_module.__file__) | |||||
except AttributeError: | |||||
up = '.' | |||||
LOGFILE = os.path.join(up, 'logs', '%s.log' % time.strftime('%Y_%m_%d_%H_%M')) | |||||
wlock = threading.Lock() | |||||
class log_to_file(object): | |||||
def __init__(self, stream, fileobj, filename): | |||||
self.stream = stream | |||||
self.encoding = self.stream.encoding | |||||
self.fileobj = fileobj | |||||
self.filename = filename | |||||
self.is_valid = True | |||||
def replace_colors(self, data): | |||||
for x in Logs.colors_lst.values(): | |||||
if isinstance(x, str): | |||||
data = data.replace(x, '') | |||||
return data | |||||
def write(self, data): | |||||
try: | |||||
wlock.acquire() | |||||
self.stream.write(data) | |||||
self.stream.flush() | |||||
if self.is_valid: | |||||
self.fileobj.write(self.replace_colors(data)) | |||||
finally: | |||||
wlock.release() | |||||
def fileno(self): | |||||
return self.stream.fileno() | |||||
def flush(self): | |||||
self.stream.flush() | |||||
if self.is_valid: | |||||
self.fileobj.flush() | |||||
def isatty(self): | |||||
return self.stream.isatty() | |||||
def init(ctx): | |||||
global LOGFILE | |||||
filename = os.path.abspath(LOGFILE) | |||||
try: | |||||
os.makedirs(os.path.dirname(os.path.abspath(filename))) | |||||
except OSError: | |||||
pass | |||||
if hasattr(os, 'O_NOINHERIT'): | |||||
fd = os.open(LOGFILE, os.O_CREAT | os.O_TRUNC | os.O_WRONLY | os.O_NOINHERIT) | |||||
fileobj = os.fdopen(fd, 'w') | |||||
else: | |||||
fileobj = open(LOGFILE, 'w') | |||||
old_stderr = sys.stderr | |||||
# sys.stdout has already been replaced, so __stdout__ will be faster | |||||
#sys.stdout = log_to_file(sys.stdout, fileobj, filename) | |||||
#sys.stderr = log_to_file(sys.stderr, fileobj, filename) | |||||
sys.stdout = log_to_file(sys.__stdout__, fileobj, filename) | |||||
sys.stderr = log_to_file(sys.__stderr__, fileobj, filename) | |||||
# now mess with the logging module... | |||||
for x in Logs.log.handlers: | |||||
try: | |||||
stream = x.stream | |||||
except AttributeError: | |||||
pass | |||||
else: | |||||
if id(stream) == id(old_stderr): | |||||
x.stream = sys.stderr | |||||
def exit_cleanup(): | |||||
try: | |||||
fileobj = sys.stdout.fileobj | |||||
except AttributeError: | |||||
pass | |||||
else: | |||||
sys.stdout.is_valid = False | |||||
sys.stderr.is_valid = False | |||||
fileobj.close() | |||||
filename = sys.stdout.filename | |||||
Logs.info('Output logged to %r' % filename) | |||||
# then copy the log file to "latest.log" if possible | |||||
up = os.path.dirname(os.path.abspath(filename)) | |||||
try: | |||||
shutil.copy(filename, os.path.join(up, 'latest.log')) | |||||
except OSError: | |||||
# this may fail on windows due to processes spawned | |||||
# | |||||
pass | |||||
atexit.register(exit_cleanup) | |||||
@@ -0,0 +1,33 @@ | |||||
#! /usr/bin/env python | |||||
# encoding: utf-8 | |||||
# harald at klimachs.de | |||||
""" | |||||
IBM XL Compiler for Blue Gene | |||||
""" | |||||
import os | |||||
from waflib.Tools import ccroot,ar | |||||
from waflib.Configure import conf | |||||
from waflib.Tools import xlc # method xlc_common_flags | |||||
from waflib.Tools.compiler_c import c_compiler | |||||
c_compiler['linux'].append('c_bgxlc') | |||||
@conf | |||||
def find_bgxlc(conf): | |||||
cc = conf.find_program(['bgxlc_r','bgxlc'], var='CC') | |||||
conf.get_xlc_version(cc) | |||||
conf.env.CC = cc | |||||
conf.env.CC_NAME = 'bgxlc' | |||||
def configure(conf): | |||||
conf.find_bgxlc() | |||||
conf.find_ar() | |||||
conf.xlc_common_flags() | |||||
conf.env.LINKFLAGS_cshlib = ['-G','-Wl,-bexpfull'] | |||||
conf.env.LINKFLAGS_cprogram = [] | |||||
conf.cc_load_tools() | |||||
conf.cc_add_flags() | |||||
conf.link_add_flags() | |||||
@@ -0,0 +1,73 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2006-2010 (ita) | |||||
""" | |||||
Dumb C/C++ preprocessor for finding dependencies | |||||
It will look at all include files it can find after removing the comments, so the following | |||||
will always add the dependency on both "a.h" and "b.h":: | |||||
#include "a.h" | |||||
#ifdef B | |||||
#include "b.h" | |||||
#endif | |||||
int main() { | |||||
return 0; | |||||
} | |||||
To use:: | |||||
def configure(conf): | |||||
conf.load('compiler_c') | |||||
conf.load('c_dumbpreproc') | |||||
""" | |||||
import re, sys, os, string, traceback | |||||
from waflib import Logs, Build, Utils, Errors | |||||
from waflib.Logs import debug, error | |||||
from waflib.Tools import c_preproc | |||||
re_inc = re.compile( | |||||
'^[ \t]*(#|%:)[ \t]*(include)[ \t]*[<"](.*)[>"]\r*$', | |||||
re.IGNORECASE | re.MULTILINE) | |||||
def lines_includes(node): | |||||
code = node.read() | |||||
if c_preproc.use_trigraphs: | |||||
for (a, b) in c_preproc.trig_def: code = code.split(a).join(b) | |||||
code = c_preproc.re_nl.sub('', code) | |||||
code = c_preproc.re_cpp.sub(c_preproc.repl, code) | |||||
return [(m.group(2), m.group(3)) for m in re.finditer(re_inc, code)] | |||||
parser = c_preproc.c_parser | |||||
class dumb_parser(parser): | |||||
def addlines(self, node): | |||||
if node in self.nodes[:-1]: | |||||
return | |||||
self.currentnode_stack.append(node.parent) | |||||
# Avoid reading the same files again | |||||
try: | |||||
lines = self.parse_cache[node] | |||||
except KeyError: | |||||
lines = self.parse_cache[node] = lines_includes(node) | |||||
self.lines = lines + [(c_preproc.POPFILE, '')] + self.lines | |||||
def start(self, node, env): | |||||
try: | |||||
self.parse_cache = node.ctx.parse_cache | |||||
except AttributeError: | |||||
self.parse_cache = node.ctx.parse_cache = {} | |||||
self.addlines(node) | |||||
while self.lines: | |||||
(x, y) = self.lines.pop(0) | |||||
if x == c_preproc.POPFILE: | |||||
self.currentnode_stack.pop() | |||||
continue | |||||
self.tryfind(y) | |||||
c_preproc.c_parser = dumb_parser | |||||
@@ -0,0 +1,72 @@ | |||||
#! /usr/bin/env python | |||||
# encoding: utf-8 | |||||
# harald at klimachs.de | |||||
""" | |||||
NEC SX Compiler for SX vector systems | |||||
""" | |||||
import os | |||||
import re | |||||
from waflib import Utils | |||||
from waflib.Tools import ccroot,ar | |||||
from waflib.Configure import conf | |||||
from waflib.Tools import xlc # method xlc_common_flags | |||||
from waflib.Tools.compiler_c import c_compiler | |||||
c_compiler['linux'].append('c_nec') | |||||
@conf | |||||
def find_sxc(conf): | |||||
cc = conf.find_program(['sxcc'], var='CC') | |||||
conf.get_sxc_version(cc) | |||||
conf.env.CC = cc | |||||
conf.env.CC_NAME = 'sxcc' | |||||
@conf | |||||
def get_sxc_version(conf, fc): | |||||
version_re = re.compile(r"C\+\+/SX\s*Version\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search | |||||
cmd = fc + ['-V'] | |||||
p = Utils.subprocess.Popen(cmd, stdin=False, stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE, env=None) | |||||
out, err = p.communicate() | |||||
if out: match = version_re(out) | |||||
else: match = version_re(err) | |||||
if not match: | |||||
conf.fatal('Could not determine the NEC C compiler version.') | |||||
k = match.groupdict() | |||||
conf.env['C_VERSION'] = (k['major'], k['minor']) | |||||
@conf | |||||
def sxc_common_flags(conf): | |||||
v=conf.env | |||||
v['CC_SRC_F']=[] | |||||
v['CC_TGT_F']=['-c','-o'] | |||||
if not v['LINK_CC']:v['LINK_CC']=v['CC'] | |||||
v['CCLNK_SRC_F']=[] | |||||
v['CCLNK_TGT_F']=['-o'] | |||||
v['CPPPATH_ST']='-I%s' | |||||
v['DEFINES_ST']='-D%s' | |||||
v['LIB_ST']='-l%s' | |||||
v['LIBPATH_ST']='-L%s' | |||||
v['STLIB_ST']='-l%s' | |||||
v['STLIBPATH_ST']='-L%s' | |||||
v['RPATH_ST']='' | |||||
v['SONAME_ST']=[] | |||||
v['SHLIB_MARKER']=[] | |||||
v['STLIB_MARKER']=[] | |||||
v['LINKFLAGS_cprogram']=[''] | |||||
v['cprogram_PATTERN']='%s' | |||||
v['CFLAGS_cshlib']=['-fPIC'] | |||||
v['LINKFLAGS_cshlib']=[''] | |||||
v['cshlib_PATTERN']='lib%s.so' | |||||
v['LINKFLAGS_cstlib']=[] | |||||
v['cstlib_PATTERN']='lib%s.a' | |||||
def configure(conf): | |||||
conf.find_sxc() | |||||
conf.find_program('sxar',VAR='AR') | |||||
conf.sxc_common_flags() | |||||
conf.cc_load_tools() | |||||
conf.cc_add_flags() | |||||
conf.link_add_flags() |
@@ -0,0 +1,156 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Anton Feldmann, 2012 | |||||
# "Base for cabal" | |||||
import re | |||||
import time | |||||
from waflib import TaskGen, Task, Utils | |||||
from waflib.Configure import conf | |||||
from waflib.Task import always_run | |||||
from waflib.TaskGen import extension, feature, after, before, before_method | |||||
from waflib.Utils import threading | |||||
from shutil import rmtree | |||||
lock = threading.Lock() | |||||
registering = False | |||||
def configure(self): | |||||
self.find_program('cabal', var='CABAL') | |||||
self.find_program('ghc-pkg', var='GHCPKG') | |||||
pkgconfd = self.bldnode.abspath() + '/package.conf.d' | |||||
self.env.PREFIX = self.bldnode.abspath() + '/dist' | |||||
self.env.PKGCONFD = pkgconfd | |||||
if self.root.find_node(pkgconfd + '/package.cache'): | |||||
self.msg('Using existing package database', pkgconfd, color='CYAN') | |||||
else: | |||||
pkgdir = self.root.find_dir(pkgconfd) | |||||
if pkgdir: | |||||
self.msg('Deleting corrupt package database', pkgdir.abspath(), color ='RED') | |||||
rmtree(pkgdir.abspath()) | |||||
pkgdir = None | |||||
self.cmd_and_log(self.env.GHCPKG + ['init', pkgconfd]) | |||||
self.msg('Created package database', pkgconfd, color = 'YELLOW' if pkgdir else 'GREEN') | |||||
@extension('.cabal') | |||||
def process_cabal(self, node): | |||||
out_dir_node = self.bld.root.find_dir(self.bld.out_dir) | |||||
package_node = node.change_ext('.package') | |||||
package_node = out_dir_node.find_or_declare(package_node.name) | |||||
build_node = node.parent.get_bld() | |||||
build_path = build_node.abspath() | |||||
config_node = build_node.find_or_declare('setup-config') | |||||
inplace_node = build_node.find_or_declare('package.conf.inplace') | |||||
config_task = self.create_task('cabal_configure', node) | |||||
config_task.cwd = node.parent.abspath() | |||||
config_task.depends_on = getattr(self, 'depends_on', '') | |||||
config_task.build_path = build_path | |||||
config_task.set_outputs(config_node) | |||||
build_task = self.create_task('cabal_build', config_node) | |||||
build_task.cwd = node.parent.abspath() | |||||
build_task.build_path = build_path | |||||
build_task.set_outputs(inplace_node) | |||||
copy_task = self.create_task('cabal_copy', inplace_node) | |||||
copy_task.cwd = node.parent.abspath() | |||||
copy_task.depends_on = getattr(self, 'depends_on', '') | |||||
copy_task.build_path = build_path | |||||
last_task = copy_task | |||||
task_list = [config_task, build_task, copy_task] | |||||
if (getattr(self, 'register', False)): | |||||
register_task = self.create_task('cabal_register', inplace_node) | |||||
register_task.cwd = node.parent.abspath() | |||||
register_task.set_run_after(copy_task) | |||||
register_task.build_path = build_path | |||||
pkgreg_task = self.create_task('ghcpkg_register', inplace_node) | |||||
pkgreg_task.cwd = node.parent.abspath() | |||||
pkgreg_task.set_run_after(register_task) | |||||
pkgreg_task.build_path = build_path | |||||
last_task = pkgreg_task | |||||
task_list += [register_task, pkgreg_task] | |||||
touch_task = self.create_task('cabal_touch', inplace_node) | |||||
touch_task.set_run_after(last_task) | |||||
touch_task.set_outputs(package_node) | |||||
touch_task.build_path = build_path | |||||
task_list += [touch_task] | |||||
return task_list | |||||
def get_all_src_deps(node): | |||||
hs_deps = node.ant_glob('**/*.hs') | |||||
hsc_deps = node.ant_glob('**/*.hsc') | |||||
lhs_deps = node.ant_glob('**/*.lhs') | |||||
c_deps = node.ant_glob('**/*.c') | |||||
cpp_deps = node.ant_glob('**/*.cpp') | |||||
proto_deps = node.ant_glob('**/*.proto') | |||||
return sum([hs_deps, hsc_deps, lhs_deps, c_deps, cpp_deps, proto_deps], []) | |||||
class Cabal(Task.Task): | |||||
def scan(self): | |||||
return (get_all_src_deps(self.generator.path), ()) | |||||
class cabal_configure(Cabal): | |||||
run_str = '${CABAL} configure -v0 --prefix=${PREFIX} --global --user --package-db=${PKGCONFD} --builddir=${tsk.build_path}' | |||||
shell = True | |||||
def scan(self): | |||||
out_node = self.generator.bld.root.find_dir(self.generator.bld.out_dir) | |||||
deps = [out_node.find_or_declare(dep).change_ext('.package') for dep in Utils.to_list(self.depends_on)] | |||||
return (deps, ()) | |||||
class cabal_build(Cabal): | |||||
run_str = '${CABAL} build -v1 --builddir=${tsk.build_path}/' | |||||
shell = True | |||||
class cabal_copy(Cabal): | |||||
run_str = '${CABAL} copy -v0 --builddir=${tsk.build_path}' | |||||
shell = True | |||||
class cabal_register(Cabal): | |||||
run_str = '${CABAL} register -v0 --gen-pkg-config=${tsk.build_path}/pkg.config --builddir=${tsk.build_path}' | |||||
shell = True | |||||
class ghcpkg_register(Cabal): | |||||
run_str = '${GHCPKG} update -v0 --global --user --package-conf=${PKGCONFD} ${tsk.build_path}/pkg.config' | |||||
shell = True | |||||
def runnable_status(self): | |||||
global lock, registering | |||||
val = False | |||||
lock.acquire() | |||||
val = registering | |||||
lock.release() | |||||
if val: | |||||
return Task.ASK_LATER | |||||
ret = Task.Task.runnable_status(self) | |||||
if ret == Task.RUN_ME: | |||||
lock.acquire() | |||||
registering = True | |||||
lock.release() | |||||
return ret | |||||
def post_run(self): | |||||
global lock, registering | |||||
lock.acquire() | |||||
registering = False | |||||
lock.release() | |||||
return Task.Task.post_run(self) | |||||
class cabal_touch(Cabal): | |||||
run_str = 'touch ${TGT}' | |||||
@@ -0,0 +1,110 @@ | |||||
#!/usr/bin/python | |||||
# -*- coding: utf-8 -*- | |||||
# Tool to extend c_config.check_cfg() | |||||
__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>" | |||||
__copyright__ = "Jérôme Carretero, 2014" | |||||
""" | |||||
This tool allows to work around the absence of ``*-config`` programs | |||||
on systems, by keeping the same clean configuration syntax but inferring | |||||
values or permitting their modification via the options interface. | |||||
Note that pkg-config can also support setting ``PKG_CONFIG_PATH``, | |||||
so you can put custom files in a folder containing new .pc files. | |||||
This tool could also be implemented by taking advantage of this fact. | |||||
Usage:: | |||||
def options(opt): | |||||
opt.load('c_config_alt') | |||||
opt.add_package_option('package') | |||||
def configure(cfg): | |||||
conf.load('c_config_alt') | |||||
conf.check_cfg(...) | |||||
Known issues: | |||||
- Behavior with different build contexts... | |||||
""" | |||||
import os | |||||
import functools | |||||
from waflib import Task, Utils, TaskGen, Configure, Options, Errors | |||||
def name_to_dest(x): | |||||
return x.lower().replace('-', '_') | |||||
def options(opt): | |||||
def x(opt, param): | |||||
dest = name_to_dest(param) | |||||
gr = opt.get_option_group("configure options") | |||||
gr.add_option('--%s-root' % dest, | |||||
help="path containing include and lib subfolders for %s" \ | |||||
% param, | |||||
) | |||||
opt.add_package_option = functools.partial(x, opt) | |||||
check_cfg_old = getattr(Configure.ConfigurationContext, 'check_cfg') | |||||
@Configure.conf | |||||
def check_cfg(conf, *k, **kw): | |||||
if k: | |||||
lst = k[0].split() | |||||
kw['package'] = lst[0] | |||||
kw['args'] = ' '.join(lst[1:]) | |||||
if not 'package' in kw: | |||||
return check_cfg_old(conf, **kw) | |||||
package = kw['package'] | |||||
package_lo = name_to_dest(package) | |||||
package_hi = package.upper().replace('-', '_') # TODO FIXME | |||||
package_hi = kw.get('uselib_store', package_hi) | |||||
def check_folder(path, name): | |||||
try: | |||||
assert os.path.isdir(path) | |||||
except AssertionError: | |||||
raise Errors.ConfigurationError( | |||||
"%s_%s (%s) is not a folder!" \ | |||||
% (package_lo, name, path)) | |||||
return path | |||||
root = getattr(Options.options, '%s_root' % package_lo, None) | |||||
if root is None: | |||||
return check_cfg_old(conf, **kw) | |||||
else: | |||||
def add_manual_var(k, v): | |||||
conf.start_msg('Adding for %s a manual var' % (package)) | |||||
conf.env["%s_%s" % (k, package_hi)] = v | |||||
conf.end_msg("%s = %s" % (k, v)) | |||||
check_folder(root, 'root') | |||||
pkg_inc = check_folder(os.path.join(root, "include"), 'inc') | |||||
add_manual_var('INCLUDES', [pkg_inc]) | |||||
pkg_lib = check_folder(os.path.join(root, "lib"), 'libpath') | |||||
add_manual_var('LIBPATH', [pkg_lib]) | |||||
add_manual_var('LIB', [package]) | |||||
for x in kw.get('manual_deps', []): | |||||
for k, v in sorted(conf.env.get_merged_dict().items()): | |||||
if k.endswith('_%s' % x): | |||||
k = k.replace('_%s' % x, '') | |||||
conf.start_msg('Adding for %s a manual dep' \ | |||||
%(package)) | |||||
conf.env["%s_%s" % (k, package_hi)] += v | |||||
conf.end_msg('%s += %s' % (k, v)) | |||||
return True | |||||
@@ -0,0 +1,147 @@ | |||||
#!/usr/bin/python | |||||
# -*- coding: utf-8 -*- | |||||
# Tool to provide dedicated variables for cross-compilation | |||||
__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>" | |||||
__copyright__ = "Jérôme Carretero, 2014" | |||||
""" | |||||
This tool allows to use environment variables to define cross-compilation things, | |||||
mostly used when you use build variants. | |||||
Usage: | |||||
- In your build script:: | |||||
def configure(cfg): | |||||
... | |||||
conf.load('c_cross_gnu') | |||||
for variant in x_variants: | |||||
conf.xcheck_host() | |||||
conf.xcheck_host_var('POUET') | |||||
... | |||||
... | |||||
- Then:: | |||||
CHOST=arm-hardfloat-linux-gnueabi waf configure | |||||
env arm-hardfloat-linux-gnueabi-CC="clang -..." waf configure | |||||
CFLAGS=... CHOST=arm-hardfloat-linux-gnueabi HOST_CFLAGS=-g waf configure | |||||
HOST_CC="clang -..." waf configure | |||||
""" | |||||
import os | |||||
from waflib import Utils, Configure | |||||
try: | |||||
from shlex import quote | |||||
except ImportError: | |||||
from pipes import quote | |||||
@Configure.conf | |||||
def xcheck_prog(conf, var, tool, cross=False): | |||||
value = os.environ.get(var, '') | |||||
value = Utils.to_list(value) | |||||
if not value: | |||||
return | |||||
conf.env[var] = value | |||||
if cross: | |||||
pretty = 'cross-compilation %s' % var | |||||
else: | |||||
pretty = var | |||||
conf.msg('Will use %s' % pretty, | |||||
" ".join(quote(x) for x in value)) | |||||
@Configure.conf | |||||
def xcheck_envar(conf, name, wafname=None, cross=False): | |||||
wafname = wafname or name | |||||
value = os.environ.get(name, None) | |||||
value = Utils.to_list(value) | |||||
if not value: | |||||
return | |||||
conf.env[wafname] += value | |||||
if cross: | |||||
pretty = 'cross-compilation %s' % wafname | |||||
else: | |||||
pretty = wafname | |||||
conf.msg('Will use %s' % pretty, | |||||
" ".join(quote(x) for x in value)) | |||||
@Configure.conf | |||||
def xcheck_host_prog(conf, name, tool, wafname=None): | |||||
wafname = wafname or name | |||||
host = conf.env.CHOST | |||||
specific = None | |||||
if host: | |||||
specific = os.environ.get('%s-%s' % (host[0], name), None) | |||||
if specific: | |||||
value = Utils.to_list(specific) | |||||
conf.env[wafname] += value | |||||
conf.msg('Will use cross-compilation %s' % name, | |||||
" ".join(quote(x) for x in value)) | |||||
return | |||||
conf.xcheck_prog('HOST_%s' % name, tool, cross=True) | |||||
if conf.env[wafname]: | |||||
return | |||||
value = None | |||||
if host: | |||||
value = '%s-%s' % (host[0], tool) | |||||
if value: | |||||
conf.env[wafname] = value | |||||
conf.msg('Will use cross-compilation %s' % wafname, value) | |||||
@Configure.conf | |||||
def xcheck_host_envar(conf, name, wafname=None): | |||||
wafname = wafname or name | |||||
host = conf.env.CHOST | |||||
specific = None | |||||
if host: | |||||
specific = os.environ.get('%s-%s' % (host[0], name), None) | |||||
if specific: | |||||
value = Utils.to_list(specific) | |||||
conf.env[wafname] += value | |||||
conf.msg('Will use cross-compilation %s' % name, | |||||
" ".join(quote(x) for x in value)) | |||||
return | |||||
conf.xcheck_envar('HOST_%s' % name, wafname, cross=True) | |||||
@Configure.conf | |||||
def xcheck_host(conf): | |||||
conf.xcheck_envar('CHOST', cross=True) | |||||
conf.xcheck_host_prog('CC', 'gcc') | |||||
conf.xcheck_host_prog('CXX', 'g++') | |||||
conf.xcheck_host_prog('LINK_CC', 'gcc') | |||||
conf.xcheck_host_prog('LINK_CXX', 'g++') | |||||
conf.xcheck_host_prog('AR', 'ar') | |||||
conf.xcheck_host_prog('AS', 'as') | |||||
conf.xcheck_host_prog('LD', 'ld') | |||||
conf.xcheck_host_envar('CFLAGS') | |||||
conf.xcheck_host_envar('CXXFLAGS') | |||||
conf.xcheck_host_envar('LDFLAGS', 'LINKFLAGS') | |||||
conf.xcheck_host_envar('LIB') | |||||
conf.xcheck_host_envar('PKG_CONFIG_PATH') | |||||
# TODO find a better solution than this ugliness | |||||
if conf.env.PKG_CONFIG_PATH: | |||||
conf.find_program('pkg-config', var='PKGCONFIG') | |||||
conf.env.PKGCONFIG = [ | |||||
'env', 'PKG_CONFIG_PATH=%s' % (conf.env.PKG_CONFIG_PATH[0]) | |||||
] + conf.env.PKGCONFIG |
@@ -0,0 +1,66 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Christoph Koke, 2013 | |||||
""" | |||||
Writes the c and cpp compile commands into build/compile_commands.json | |||||
see http://clang.llvm.org/docs/JSONCompilationDatabase.html | |||||
Usage: | |||||
def configure(conf): | |||||
conf.load('compiler_cxx') | |||||
... | |||||
conf.load('clang_compilation_database') | |||||
""" | |||||
import sys, os, json, shlex, pipes | |||||
from waflib import Logs, TaskGen | |||||
from waflib.Tools import c, cxx | |||||
if sys.hexversion >= 0x3030000: | |||||
quote = shlex.quote | |||||
else: | |||||
quote = pipes.quote | |||||
@TaskGen.feature('*') | |||||
@TaskGen.after_method('process_use') | |||||
def collect_compilation_db_tasks(self): | |||||
"Add a compilation database entry for compiled tasks" | |||||
try: | |||||
clang_db = self.bld.clang_compilation_database_tasks | |||||
except AttributeError: | |||||
clang_db = self.bld.clang_compilation_database_tasks = [] | |||||
self.bld.add_post_fun(write_compilation_database) | |||||
for task in getattr(self, 'compiled_tasks', []): | |||||
if isinstance(task, (c.c, cxx.cxx)): | |||||
clang_db.append(task) | |||||
def write_compilation_database(ctx): | |||||
"Write the clang compilation database as JSON" | |||||
database_file = ctx.bldnode.make_node('compile_commands.json') | |||||
Logs.info("Build commands will be stored in %s" % database_file.path_from(ctx.path)) | |||||
try: | |||||
root = json.load(database_file) | |||||
except IOError: | |||||
root = [] | |||||
clang_db = dict((x["file"], x) for x in root) | |||||
for task in getattr(ctx, 'clang_compilation_database_tasks', []): | |||||
try: | |||||
cmd = task.last_cmd | |||||
except AttributeError: | |||||
continue | |||||
directory = getattr(task, 'cwd', ctx.variant_dir) | |||||
f_node = task.inputs[0] | |||||
filename = os.path.relpath(f_node.abspath(), directory) | |||||
cmd = " ".join(map(quote, cmd)) | |||||
entry = { | |||||
"directory": directory, | |||||
"command": cmd, | |||||
"file": filename, | |||||
} | |||||
clang_db[filename] = entry | |||||
root = list(clang_db.values()) | |||||
database_file.write(json.dumps(root, indent=2)) | |||||
@@ -0,0 +1,881 @@ | |||||
#! /usr/bin/env python | |||||
# encoding: utf-8 | |||||
# CodeLite Project | |||||
# Christian Klein (chrikle@berlios.de) | |||||
# Created: Jan 2012 | |||||
# As templete for this file I used the msvs.py | |||||
# I hope this template will work proper | |||||
""" | |||||
Redistribution and use in source and binary forms, with or without | |||||
modification, are permitted provided that the following conditions | |||||
are met: | |||||
1. Redistributions of source code must retain the above copyright | |||||
notice, this list of conditions and the following disclaimer. | |||||
2. Redistributions in binary form must reproduce the above copyright | |||||
notice, this list of conditions and the following disclaimer in the | |||||
documentation and/or other materials provided with the distribution. | |||||
3. The name of the author may not be used to endorse or promote products | |||||
derived from this software without specific prior written permission. | |||||
THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR | |||||
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED | |||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE | |||||
DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, | |||||
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES | |||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR | |||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) | |||||
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, | |||||
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING | |||||
IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE | |||||
POSSIBILITY OF SUCH DAMAGE. | |||||
""" | |||||
""" | |||||
To add this tool to your project: | |||||
def options(conf): | |||||
opt.load('codelite') | |||||
It can be a good idea to add the sync_exec tool too. | |||||
To generate solution files: | |||||
$ waf configure codelite | |||||
To customize the outputs, provide subclasses in your wscript files: | |||||
from waflib.extras import codelite | |||||
class vsnode_target(codelite.vsnode_target): | |||||
def get_build_command(self, props): | |||||
# likely to be required | |||||
return "waf.bat build" | |||||
def collect_source(self): | |||||
# likely to be required | |||||
... | |||||
class codelite_bar(codelite.codelite_generator): | |||||
def init(self): | |||||
codelite.codelite_generator.init(self) | |||||
self.vsnode_target = vsnode_target | |||||
The codelite class re-uses the same build() function for reading the targets (task generators), | |||||
you may therefore specify codelite settings on the context object: | |||||
def build(bld): | |||||
bld.codelite_solution_name = 'foo.workspace' | |||||
bld.waf_command = 'waf.bat' | |||||
bld.projects_dir = bld.srcnode.make_node('') | |||||
bld.projects_dir.mkdir() | |||||
ASSUMPTIONS: | |||||
* a project can be either a directory or a target, project files are written only for targets that have source files | |||||
* each project is a vcxproj file, therefore the project uuid needs only to be a hash of the absolute path | |||||
""" | |||||
import os, re, sys | |||||
import uuid # requires python 2.5 | |||||
from waflib.Build import BuildContext | |||||
from waflib import Utils, TaskGen, Logs, Task, Context, Node, Options | |||||
HEADERS_GLOB = '**/(*.h|*.hpp|*.H|*.inl)' | |||||
PROJECT_TEMPLATE = r'''<?xml version="1.0" encoding="utf-8"?> | |||||
<CodeLite_Project Name="${project.name}" InternalType="Library"> | |||||
<Plugins> | |||||
<Plugin Name="qmake"> | |||||
<![CDATA[00010001N0005Release000000000000]]> | |||||
</Plugin> | |||||
</Plugins> | |||||
<Description/> | |||||
<Dependencies/> | |||||
<VirtualDirectory Name="src"> | |||||
${for x in project.source} | |||||
${if (project.get_key(x)=="sourcefile")} | |||||
<File Name="${x.abspath()}"/> | |||||
${endif} | |||||
${endfor} | |||||
</VirtualDirectory> | |||||
<VirtualDirectory Name="include"> | |||||
${for x in project.source} | |||||
${if (project.get_key(x)=="headerfile")} | |||||
<File Name="${x.abspath()}"/> | |||||
${endif} | |||||
${endfor} | |||||
</VirtualDirectory> | |||||
<Settings Type="Dynamic Library"> | |||||
<GlobalSettings> | |||||
<Compiler Options="" C_Options=""> | |||||
<IncludePath Value="."/> | |||||
</Compiler> | |||||
<Linker Options=""> | |||||
<LibraryPath Value="."/> | |||||
</Linker> | |||||
<ResourceCompiler Options=""/> | |||||
</GlobalSettings> | |||||
<Configuration Name="Release" CompilerType="gnu gcc" ReleasegerType="GNU gdb Releaseger" Type="Dynamic Library" BuildCmpWithGlobalSettings="append" BuildLnkWithGlobalSettings="append" BuildResWithGlobalSettings="append"> | |||||
<Compiler Options="" C_Options="" Required="yes" PreCompiledHeader="" PCHInCommandLine="no" UseDifferentPCHFlags="no" PCHFlags=""> | |||||
<IncludePath Value="."/> | |||||
<IncludePath Value="."/> | |||||
</Compiler> | |||||
<Linker Options="" Required="yes"> | |||||
<LibraryPath Value=""/> | |||||
</Linker> | |||||
<ResourceCompiler Options="" Required="no"/> | |||||
<General OutputFile="${xml:project.build_properties[0].output_file}" IntermediateDirectory="" Command="" CommandArguments="" PauseExecWhenProcTerminates="yes"/> | |||||
<Environment EnvVarSetName="<Use Defaults>" DbgSetName="<Use Defaults>"> | |||||
<![CDATA[]]> | |||||
</Environment> | |||||
<Releaseger IsRemote="no" RemoteHostName="" RemoteHostPort="" ReleasegerPath=""> | |||||
<PostConnectCommands/> | |||||
<StartupCommands/> | |||||
</Releaseger> | |||||
<PreBuild/> | |||||
<PostBuild/> | |||||
<CustomBuild Enabled="yes"> | |||||
$b = project.build_properties[0]} | |||||
<RebuildCommand>${xml:project.get_rebuild_command(project.build_properties[0])}</RebuildCommand> | |||||
<CleanCommand>${xml:project.get_clean_command(project.build_properties[0])}</CleanCommand> | |||||
<BuildCommand>${xml:project.get_build_command(project.build_properties[0])}</BuildCommand> | |||||
<Target Name="Install">${xml:project.get_install_command(project.build_properties[0])}</Target> | |||||
<Target Name="Build and Install">${xml:project.get_build_and_install_command(project.build_properties[0])}</Target> | |||||
<Target Name="Build All">${xml:project.get_build_all_command(project.build_properties[0])}</Target> | |||||
<Target Name="Rebuild All">${xml:project.get_rebuild_all_command(project.build_properties[0])}</Target> | |||||
<Target Name="Clean All">${xml:project.get_clean_all_command(project.build_properties[0])}</Target> | |||||
<Target Name="Build and Install All">${xml:project.get_build_and_install_all_command(project.build_properties[0])}</Target> | |||||
<PreprocessFileCommand/> | |||||
<SingleFileCommand/> | |||||
<MakefileGenerationCommand/> | |||||
<ThirdPartyToolName>None</ThirdPartyToolName> | |||||
<WorkingDirectory/> | |||||
</CustomBuild> | |||||
<AdditionalRules> | |||||
<CustomPostBuild/> | |||||
<CustomPreBuild/> | |||||
</AdditionalRules> | |||||
<Completion> | |||||
<ClangCmpFlags/> | |||||
<ClangPP/> | |||||
<SearchPaths/> | |||||
</Completion> | |||||
</Configuration> | |||||
<Configuration Name="Release" CompilerType="gnu gcc" ReleasegerType="GNU gdb Releaseger" Type="" BuildCmpWithGlobalSettings="append" BuildLnkWithGlobalSettings="append" BuildResWithGlobalSettings="append"> | |||||
<Compiler Options="" C_Options="" Required="yes" PreCompiledHeader="" PCHInCommandLine="no" UseDifferentPCHFlags="no" PCHFlags=""> | |||||
<IncludePath Value="."/> | |||||
</Compiler> | |||||
<Linker Options="" Required="yes"/> | |||||
<ResourceCompiler Options="" Required="no"/> | |||||
<General OutputFile="" IntermediateDirectory="./Release" Command="" CommandArguments="" UseSeparateReleaseArgs="no" ReleaseArguments="" WorkingDirectory="$(IntermediateDirectory)" PauseExecWhenProcTerminates="yes"/> | |||||
<Environment EnvVarSetName="<Use Defaults>" DbgSetName="<Use Defaults>"> | |||||
<![CDATA[ | |||||
]]> | |||||
</Environment> | |||||
<Releaseger IsRemote="no" RemoteHostName="" RemoteHostPort="" ReleasegerPath=""> | |||||
<PostConnectCommands/> | |||||
<StartupCommands/> | |||||
</Releaseger> | |||||
<PreBuild/> | |||||
<PostBuild/> | |||||
<CustomBuild Enabled="no"> | |||||
<RebuildCommand/> | |||||
<CleanCommand/> | |||||
<BuildCommand/> | |||||
<PreprocessFileCommand/> | |||||
<SingleFileCommand/> | |||||
<MakefileGenerationCommand/> | |||||
<ThirdPartyToolName/> | |||||
<WorkingDirectory/> | |||||
</CustomBuild> | |||||
<AdditionalRules> | |||||
<CustomPostBuild/> | |||||
<CustomPreBuild/> | |||||
</AdditionalRules> | |||||
<Completion> | |||||
<ClangCmpFlags/> | |||||
<ClangPP/> | |||||
<SearchPaths/> | |||||
</Completion> | |||||
</Configuration> | |||||
</Settings> | |||||
</CodeLite_Project>''' | |||||
SOLUTION_TEMPLATE = '''<?xml version="1.0" encoding="utf-8"?> | |||||
<CodeLite_Workspace Name="${getattr(project, 'codelite_solution_name', None)[:-10]}" Database="./${getattr(project, 'codelite_solution_name', None)[:-10]}.tags"> | |||||
${for p in project.all_projects} | |||||
<Project Name = "${p.name}" Path = "${p.title}" Active="No"/> | |||||
${endfor} | |||||
<BuildMatrix> | |||||
<WorkspaceConfiguration Name="Release" Selected="yes"> | |||||
${for p in project.all_projects} | |||||
<Project Name="${p.name}" ConfigName="Release"/> | |||||
${endfor} | |||||
</WorkspaceConfiguration> | |||||
</BuildMatrix> | |||||
</CodeLite_Workspace>''' | |||||
COMPILE_TEMPLATE = '''def f(project): | |||||
lst = [] | |||||
def xml_escape(value): | |||||
return value.replace("&", "&").replace('"', """).replace("'", "'").replace("<", "<").replace(">", ">") | |||||
%s | |||||
#f = open('cmd.txt', 'w') | |||||
#f.write(str(lst)) | |||||
#f.close() | |||||
return ''.join(lst) | |||||
''' | |||||
reg_act = re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<code>[^}]*?)\})", re.M) | |||||
def compile_template(line): | |||||
""" | |||||
Compile a template expression into a python function (like jsps, but way shorter) | |||||
""" | |||||
extr = [] | |||||
def repl(match): | |||||
g = match.group | |||||
if g('dollar'): return "$" | |||||
elif g('backslash'): | |||||
return "\\" | |||||
elif g('subst'): | |||||
extr.append(g('code')) | |||||
return "<<|@|>>" | |||||
return None | |||||
line2 = reg_act.sub(repl, line) | |||||
params = line2.split('<<|@|>>') | |||||
assert(extr) | |||||
indent = 0 | |||||
buf = [] | |||||
app = buf.append | |||||
def app(txt): | |||||
buf.append(indent * '\t' + txt) | |||||
for x in range(len(extr)): | |||||
if params[x]: | |||||
app("lst.append(%r)" % params[x]) | |||||
f = extr[x] | |||||
if f.startswith('if') or f.startswith('for'): | |||||
app(f + ':') | |||||
indent += 1 | |||||
elif f.startswith('py:'): | |||||
app(f[3:]) | |||||
elif f.startswith('endif') or f.startswith('endfor'): | |||||
indent -= 1 | |||||
elif f.startswith('else') or f.startswith('elif'): | |||||
indent -= 1 | |||||
app(f + ':') | |||||
indent += 1 | |||||
elif f.startswith('xml:'): | |||||
app('lst.append(xml_escape(%s))' % f[4:]) | |||||
else: | |||||
#app('lst.append((%s) or "cannot find %s")' % (f, f)) | |||||
app('lst.append(%s)' % f) | |||||
if extr: | |||||
if params[-1]: | |||||
app("lst.append(%r)" % params[-1]) | |||||
fun = COMPILE_TEMPLATE % "\n\t".join(buf) | |||||
#print(fun) | |||||
return Task.funex(fun) | |||||
re_blank = re.compile('(\n|\r|\\s)*\n', re.M) | |||||
def rm_blank_lines(txt): | |||||
txt = re_blank.sub('\r\n', txt) | |||||
return txt | |||||
BOM = '\xef\xbb\xbf' | |||||
try: | |||||
BOM = bytes(BOM, 'iso8859-1') # python 3 | |||||
except NameError: | |||||
pass | |||||
def stealth_write(self, data, flags='wb'): | |||||
try: | |||||
x = unicode | |||||
except NameError: | |||||
data = data.encode('utf-8') # python 3 | |||||
else: | |||||
data = data.decode(sys.getfilesystemencoding(), 'replace') | |||||
data = data.encode('utf-8') | |||||
if self.name.endswith('.project') or self.name.endswith('.project'): | |||||
data = BOM + data | |||||
try: | |||||
txt = self.read(flags='rb') | |||||
if txt != data: | |||||
raise ValueError('must write') | |||||
except (IOError, ValueError): | |||||
self.write(data, flags=flags) | |||||
else: | |||||
Logs.debug('codelite: skipping %s' % self.abspath()) | |||||
Node.Node.stealth_write = stealth_write | |||||
re_quote = re.compile("[^a-zA-Z0-9-]") | |||||
def quote(s): | |||||
return re_quote.sub("_", s) | |||||
def xml_escape(value): | |||||
return value.replace("&", "&").replace('"', """).replace("'", "'").replace("<", "<").replace(">", ">") | |||||
def make_uuid(v, prefix = None): | |||||
""" | |||||
simple utility function | |||||
""" | |||||
if isinstance(v, dict): | |||||
keys = list(v.keys()) | |||||
keys.sort() | |||||
tmp = str([(k, v[k]) for k in keys]) | |||||
else: | |||||
tmp = str(v) | |||||
d = Utils.md5(tmp.encode()).hexdigest().upper() | |||||
if prefix: | |||||
d = '%s%s' % (prefix, d[8:]) | |||||
gid = uuid.UUID(d, version = 4) | |||||
return str(gid).upper() | |||||
def diff(node, fromnode): | |||||
# difference between two nodes, but with "(..)" instead of ".." | |||||
c1 = node | |||||
c2 = fromnode | |||||
c1h = c1.height() | |||||
c2h = c2.height() | |||||
lst = [] | |||||
up = 0 | |||||
while c1h > c2h: | |||||
lst.append(c1.name) | |||||
c1 = c1.parent | |||||
c1h -= 1 | |||||
while c2h > c1h: | |||||
up += 1 | |||||
c2 = c2.parent | |||||
c2h -= 1 | |||||
while id(c1) != id(c2): | |||||
lst.append(c1.name) | |||||
up += 1 | |||||
c1 = c1.parent | |||||
c2 = c2.parent | |||||
for i in range(up): | |||||
lst.append('(..)') | |||||
lst.reverse() | |||||
return tuple(lst) | |||||
class build_property(object): | |||||
pass | |||||
class vsnode(object): | |||||
""" | |||||
Abstract class representing visual studio elements | |||||
We assume that all visual studio nodes have a uuid and a parent | |||||
""" | |||||
def __init__(self, ctx): | |||||
self.ctx = ctx # codelite context | |||||
self.name = '' # string, mandatory | |||||
self.vspath = '' # path in visual studio (name for dirs, absolute path for projects) | |||||
self.uuid = '' # string, mandatory | |||||
self.parent = None # parent node for visual studio nesting | |||||
def get_waf(self): | |||||
""" | |||||
Override in subclasses... | |||||
""" | |||||
return '%s/%s' % (self.ctx.srcnode.abspath(), getattr(self.ctx, 'waf_command', 'waf')) | |||||
def ptype(self): | |||||
""" | |||||
Return a special uuid for projects written in the solution file | |||||
""" | |||||
pass | |||||
def write(self): | |||||
""" | |||||
Write the project file, by default, do nothing | |||||
""" | |||||
pass | |||||
def make_uuid(self, val): | |||||
""" | |||||
Alias for creating uuid values easily (the templates cannot access global variables) | |||||
""" | |||||
return make_uuid(val) | |||||
class vsnode_vsdir(vsnode): | |||||
""" | |||||
Nodes representing visual studio folders (which do not match the filesystem tree!) | |||||
""" | |||||
VS_GUID_SOLUTIONFOLDER = "2150E333-8FDC-42A3-9474-1A3956D46DE8" | |||||
def __init__(self, ctx, uuid, name, vspath=''): | |||||
vsnode.__init__(self, ctx) | |||||
self.title = self.name = name | |||||
self.uuid = uuid | |||||
self.vspath = vspath or name | |||||
def ptype(self): | |||||
return self.VS_GUID_SOLUTIONFOLDER | |||||
class vsnode_project(vsnode): | |||||
""" | |||||
Abstract class representing visual studio project elements | |||||
A project is assumed to be writable, and has a node representing the file to write to | |||||
""" | |||||
VS_GUID_VCPROJ = "8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942" | |||||
def ptype(self): | |||||
return self.VS_GUID_VCPROJ | |||||
def __init__(self, ctx, node): | |||||
vsnode.__init__(self, ctx) | |||||
self.path = node | |||||
self.uuid = make_uuid(node.abspath()) | |||||
self.name = node.name | |||||
self.title = self.path.abspath() | |||||
self.source = [] # list of node objects | |||||
self.build_properties = [] # list of properties (nmake commands, output dir, etc) | |||||
def dirs(self): | |||||
""" | |||||
Get the list of parent folders of the source files (header files included) | |||||
for writing the filters | |||||
""" | |||||
lst = [] | |||||
def add(x): | |||||
if x.height() > self.tg.path.height() and x not in lst: | |||||
lst.append(x) | |||||
add(x.parent) | |||||
for x in self.source: | |||||
add(x.parent) | |||||
return lst | |||||
def write(self): | |||||
Logs.debug('codelite: creating %r' % self.path) | |||||
#print "self.name:",self.name | |||||
# first write the project file | |||||
template1 = compile_template(PROJECT_TEMPLATE) | |||||
proj_str = template1(self) | |||||
proj_str = rm_blank_lines(proj_str) | |||||
self.path.stealth_write(proj_str) | |||||
# then write the filter | |||||
#template2 = compile_template(FILTER_TEMPLATE) | |||||
#filter_str = template2(self) | |||||
#filter_str = rm_blank_lines(filter_str) | |||||
#tmp = self.path.parent.make_node(self.path.name + '.filters') | |||||
#tmp.stealth_write(filter_str) | |||||
def get_key(self, node): | |||||
""" | |||||
required for writing the source files | |||||
""" | |||||
name = node.name | |||||
if name.endswith('.cpp') or name.endswith('.c'): | |||||
return 'sourcefile' | |||||
return 'headerfile' | |||||
def collect_properties(self): | |||||
""" | |||||
Returns a list of triplet (configuration, platform, output_directory) | |||||
""" | |||||
ret = [] | |||||
for c in self.ctx.configurations: | |||||
for p in self.ctx.platforms: | |||||
x = build_property() | |||||
x.outdir = '' | |||||
x.configuration = c | |||||
x.platform = p | |||||
x.preprocessor_definitions = '' | |||||
x.includes_search_path = '' | |||||
# can specify "deploy_dir" too | |||||
ret.append(x) | |||||
self.build_properties = ret | |||||
def get_build_params(self, props): | |||||
opt = '' | |||||
return (self.get_waf(), opt) | |||||
def get_build_command(self, props): | |||||
return "%s build %s" % self.get_build_params(props) | |||||
def get_clean_command(self, props): | |||||
return "%s clean %s" % self.get_build_params(props) | |||||
def get_rebuild_command(self, props): | |||||
return "%s clean build %s" % self.get_build_params(props) | |||||
def get_install_command(self, props): | |||||
return "%s install %s" % self.get_build_params(props) | |||||
def get_build_and_install_command(self, props): | |||||
return "%s build install %s" % self.get_build_params(props) | |||||
def get_build_and_install_all_command(self, props): | |||||
return "%s build install" % self.get_build_params(props)[0] | |||||
def get_clean_all_command(self, props): | |||||
return "%s clean" % self.get_build_params(props)[0] | |||||
def get_build_all_command(self, props): | |||||
return "%s build" % self.get_build_params(props)[0] | |||||
def get_rebuild_all_command(self, props): | |||||
return "%s clean build" % self.get_build_params(props)[0] | |||||
def get_filter_name(self, node): | |||||
lst = diff(node, self.tg.path) | |||||
return '\\'.join(lst) or '.' | |||||
class vsnode_alias(vsnode_project): | |||||
def __init__(self, ctx, node, name): | |||||
vsnode_project.__init__(self, ctx, node) | |||||
self.name = name | |||||
self.output_file = '' | |||||
class vsnode_build_all(vsnode_alias): | |||||
""" | |||||
Fake target used to emulate the behaviour of "make all" (starting one process by target is slow) | |||||
This is the only alias enabled by default | |||||
""" | |||||
def __init__(self, ctx, node, name='build_all_projects'): | |||||
vsnode_alias.__init__(self, ctx, node, name) | |||||
self.is_active = True | |||||
class vsnode_install_all(vsnode_alias): | |||||
""" | |||||
Fake target used to emulate the behaviour of "make install" | |||||
""" | |||||
def __init__(self, ctx, node, name='install_all_projects'): | |||||
vsnode_alias.__init__(self, ctx, node, name) | |||||
def get_build_command(self, props): | |||||
return "%s build install %s" % self.get_build_params(props) | |||||
def get_clean_command(self, props): | |||||
return "%s clean %s" % self.get_build_params(props) | |||||
def get_rebuild_command(self, props): | |||||
return "%s clean build install %s" % self.get_build_params(props) | |||||
class vsnode_project_view(vsnode_alias): | |||||
""" | |||||
Fake target used to emulate a file system view | |||||
""" | |||||
def __init__(self, ctx, node, name='project_view'): | |||||
vsnode_alias.__init__(self, ctx, node, name) | |||||
self.tg = self.ctx() # fake one, cannot remove | |||||
self.exclude_files = Node.exclude_regs + ''' | |||||
waf-1.8.* | |||||
waf3-1.8.*/** | |||||
.waf-1.8.* | |||||
.waf3-1.8.*/** | |||||
**/*.sdf | |||||
**/*.suo | |||||
**/*.ncb | |||||
**/%s | |||||
''' % Options.lockfile | |||||
def collect_source(self): | |||||
# this is likely to be slow | |||||
self.source = self.ctx.srcnode.ant_glob('**', excl=self.exclude_files) | |||||
def get_build_command(self, props): | |||||
params = self.get_build_params(props) + (self.ctx.cmd,) | |||||
return "%s %s %s" % params | |||||
def get_clean_command(self, props): | |||||
return "" | |||||
def get_rebuild_command(self, props): | |||||
return self.get_build_command(props) | |||||
class vsnode_target(vsnode_project): | |||||
""" | |||||
CodeLite project representing a targets (programs, libraries, etc) and bound | |||||
to a task generator | |||||
""" | |||||
def __init__(self, ctx, tg): | |||||
""" | |||||
A project is more or less equivalent to a file/folder | |||||
""" | |||||
base = getattr(ctx, 'projects_dir', None) or tg.path | |||||
node = base.make_node(quote(tg.name) + ctx.project_extension) # the project file as a Node | |||||
vsnode_project.__init__(self, ctx, node) | |||||
self.name = quote(tg.name) | |||||
self.tg = tg # task generator | |||||
def get_build_params(self, props): | |||||
""" | |||||
Override the default to add the target name | |||||
""" | |||||
opt = '' | |||||
if getattr(self, 'tg', None): | |||||
opt += " --targets=%s" % self.tg.name | |||||
return (self.get_waf(), opt) | |||||
def collect_source(self): | |||||
tg = self.tg | |||||
source_files = tg.to_nodes(getattr(tg, 'source', [])) | |||||
include_dirs = Utils.to_list(getattr(tg, 'codelite_includes', [])) | |||||
include_files = [] | |||||
for x in include_dirs: | |||||
if isinstance(x, str): | |||||
x = tg.path.find_node(x) | |||||
if x: | |||||
lst = [y for y in x.ant_glob(HEADERS_GLOB, flat=False)] | |||||
include_files.extend(lst) | |||||
# remove duplicates | |||||
self.source.extend(list(set(source_files + include_files))) | |||||
self.source.sort(key=lambda x: x.abspath()) | |||||
def collect_properties(self): | |||||
""" | |||||
CodeLite projects are associated with platforms and configurations (for building especially) | |||||
""" | |||||
super(vsnode_target, self).collect_properties() | |||||
for x in self.build_properties: | |||||
x.outdir = self.path.parent.abspath() | |||||
x.preprocessor_definitions = '' | |||||
x.includes_search_path = '' | |||||
try: | |||||
tsk = self.tg.link_task | |||||
except AttributeError: | |||||
pass | |||||
else: | |||||
x.output_file = tsk.outputs[0].abspath() | |||||
x.preprocessor_definitions = ';'.join(tsk.env.DEFINES) | |||||
x.includes_search_path = ';'.join(self.tg.env.INCPATHS) | |||||
class codelite_generator(BuildContext): | |||||
'''generates a CodeLite workspace''' | |||||
cmd = 'codelite' | |||||
fun = 'build' | |||||
def init(self): | |||||
""" | |||||
Some data that needs to be present | |||||
""" | |||||
if not getattr(self, 'configurations', None): | |||||
self.configurations = ['Release'] # LocalRelease, RemoteDebug, etc | |||||
if not getattr(self, 'platforms', None): | |||||
self.platforms = ['Win32'] | |||||
if not getattr(self, 'all_projects', None): | |||||
self.all_projects = [] | |||||
if not getattr(self, 'project_extension', None): | |||||
self.project_extension = '.project' | |||||
if not getattr(self, 'projects_dir', None): | |||||
self.projects_dir = self.srcnode.make_node('') | |||||
self.projects_dir.mkdir() | |||||
# bind the classes to the object, so that subclass can provide custom generators | |||||
if not getattr(self, 'vsnode_vsdir', None): | |||||
self.vsnode_vsdir = vsnode_vsdir | |||||
if not getattr(self, 'vsnode_target', None): | |||||
self.vsnode_target = vsnode_target | |||||
if not getattr(self, 'vsnode_build_all', None): | |||||
self.vsnode_build_all = vsnode_build_all | |||||
if not getattr(self, 'vsnode_install_all', None): | |||||
self.vsnode_install_all = vsnode_install_all | |||||
if not getattr(self, 'vsnode_project_view', None): | |||||
self.vsnode_project_view = vsnode_project_view | |||||
self.numver = '11.00' | |||||
self.vsver = '2010' | |||||
def execute(self): | |||||
""" | |||||
Entry point | |||||
""" | |||||
self.restore() | |||||
if not self.all_envs: | |||||
self.load_envs() | |||||
self.recurse([self.run_dir]) | |||||
# user initialization | |||||
self.init() | |||||
# two phases for creating the solution | |||||
self.collect_projects() # add project objects into "self.all_projects" | |||||
self.write_files() # write the corresponding project and solution files | |||||
def collect_projects(self): | |||||
""" | |||||
Fill the list self.all_projects with project objects | |||||
Fill the list of build targets | |||||
""" | |||||
self.collect_targets() | |||||
#self.add_aliases() | |||||
#self.collect_dirs() | |||||
default_project = getattr(self, 'default_project', None) | |||||
def sortfun(x): | |||||
if x.name == default_project: | |||||
return '' | |||||
return getattr(x, 'path', None) and x.path.abspath() or x.name | |||||
self.all_projects.sort(key=sortfun) | |||||
def write_files(self): | |||||
""" | |||||
Write the project and solution files from the data collected | |||||
so far. It is unlikely that you will want to change this | |||||
""" | |||||
for p in self.all_projects: | |||||
p.write() | |||||
# and finally write the solution file | |||||
node = self.get_solution_node() | |||||
node.parent.mkdir() | |||||
Logs.warn('Creating %r' % node) | |||||
#a = dir(self.root) | |||||
#for b in a: | |||||
# print b | |||||
#print self.group_names | |||||
#print "Hallo2: ",self.root.listdir() | |||||
#print getattr(self, 'codelite_solution_name', None) | |||||
template1 = compile_template(SOLUTION_TEMPLATE) | |||||
sln_str = template1(self) | |||||
sln_str = rm_blank_lines(sln_str) | |||||
node.stealth_write(sln_str) | |||||
def get_solution_node(self): | |||||
""" | |||||
The solution filename is required when writing the .vcproj files | |||||
return self.solution_node and if it does not exist, make one | |||||
""" | |||||
try: | |||||
return self.solution_node | |||||
except: | |||||
pass | |||||
codelite_solution_name = getattr(self, 'codelite_solution_name', None) | |||||
if not codelite_solution_name: | |||||
codelite_solution_name = getattr(Context.g_module, Context.APPNAME, 'project') + '.workspace' | |||||
setattr(self, 'codelite_solution_name', codelite_solution_name) | |||||
if os.path.isabs(codelite_solution_name): | |||||
self.solution_node = self.root.make_node(codelite_solution_name) | |||||
else: | |||||
self.solution_node = self.srcnode.make_node(codelite_solution_name) | |||||
return self.solution_node | |||||
def project_configurations(self): | |||||
""" | |||||
Helper that returns all the pairs (config,platform) | |||||
""" | |||||
ret = [] | |||||
for c in self.configurations: | |||||
for p in self.platforms: | |||||
ret.append((c, p)) | |||||
return ret | |||||
def collect_targets(self): | |||||
""" | |||||
Process the list of task generators | |||||
""" | |||||
for g in self.groups: | |||||
for tg in g: | |||||
if not isinstance(tg, TaskGen.task_gen): | |||||
continue | |||||
if not hasattr(tg, 'codelite_includes'): | |||||
tg.codelite_includes = tg.to_list(getattr(tg, 'includes', [])) + tg.to_list(getattr(tg, 'export_includes', [])) | |||||
tg.post() | |||||
if not getattr(tg, 'link_task', None): | |||||
continue | |||||
p = self.vsnode_target(self, tg) | |||||
p.collect_source() # delegate this processing | |||||
p.collect_properties() | |||||
self.all_projects.append(p) | |||||
def add_aliases(self): | |||||
""" | |||||
Add a specific target that emulates the "make all" necessary for Visual studio when pressing F7 | |||||
We also add an alias for "make install" (disabled by default) | |||||
""" | |||||
base = getattr(self, 'projects_dir', None) or self.tg.path | |||||
node_project = base.make_node('build_all_projects' + self.project_extension) # Node | |||||
p_build = self.vsnode_build_all(self, node_project) | |||||
p_build.collect_properties() | |||||
self.all_projects.append(p_build) | |||||
node_project = base.make_node('install_all_projects' + self.project_extension) # Node | |||||
p_install = self.vsnode_install_all(self, node_project) | |||||
p_install.collect_properties() | |||||
self.all_projects.append(p_install) | |||||
node_project = base.make_node('project_view' + self.project_extension) # Node | |||||
p_view = self.vsnode_project_view(self, node_project) | |||||
p_view.collect_source() | |||||
p_view.collect_properties() | |||||
self.all_projects.append(p_view) | |||||
n = self.vsnode_vsdir(self, make_uuid(self.srcnode.abspath() + 'build_aliases'), "build_aliases") | |||||
p_build.parent = p_install.parent = p_view.parent = n | |||||
self.all_projects.append(n) | |||||
def collect_dirs(self): | |||||
""" | |||||
Create the folder structure in the CodeLite project view | |||||
""" | |||||
seen = {} | |||||
def make_parents(proj): | |||||
# look at a project, try to make a parent | |||||
if getattr(proj, 'parent', None): | |||||
# aliases already have parents | |||||
return | |||||
x = proj.iter_path | |||||
if x in seen: | |||||
proj.parent = seen[x] | |||||
return | |||||
# There is not vsnode_vsdir for x. | |||||
# So create a project representing the folder "x" | |||||
n = proj.parent = seen[x] = self.vsnode_vsdir(self, make_uuid(x.abspath()), x.name) | |||||
n.iter_path = x.parent | |||||
self.all_projects.append(n) | |||||
# recurse up to the project directory | |||||
if x.height() > self.srcnode.height() + 1: | |||||
make_parents(n) | |||||
for p in self.all_projects[:]: # iterate over a copy of all projects | |||||
if not getattr(p, 'tg', None): | |||||
# but only projects that have a task generator | |||||
continue | |||||
# make a folder for each task generator | |||||
p.iter_path = p.tg.path | |||||
make_parents(p) | |||||
def options(ctx): | |||||
pass | |||||
@@ -0,0 +1,39 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Replaces the default formatter by one which understands GCC output and colorizes it. | |||||
__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>" | |||||
__copyright__ = "Jérôme Carretero, 2012" | |||||
import sys | |||||
from waflib import Logs | |||||
class ColorGCCFormatter(Logs.formatter): | |||||
def __init__(self, colors): | |||||
self.colors = colors | |||||
Logs.formatter.__init__(self) | |||||
def format(self, rec): | |||||
frame = sys._getframe() | |||||
while frame: | |||||
func = frame.f_code.co_name | |||||
if func == 'exec_command': | |||||
cmd = frame.f_locals['cmd'] | |||||
if isinstance(cmd, list) and ('gcc' in cmd[0] or 'g++' in cmd[0]): | |||||
lines = [] | |||||
for line in rec.msg.splitlines(): | |||||
if 'warning: ' in line: | |||||
lines.append(self.colors.YELLOW + line) | |||||
elif 'error: ' in line: | |||||
lines.append(self.colors.RED + line) | |||||
elif 'note: ' in line: | |||||
lines.append(self.colors.CYAN + line) | |||||
else: | |||||
lines.append(line) | |||||
rec.msg = "\n".join(lines) | |||||
frame = frame.f_back | |||||
return Logs.formatter.format(self, rec) | |||||
def options(opt): | |||||
Logs.log.handlers[0].setFormatter(ColorGCCFormatter(Logs.colors)) | |||||
@@ -0,0 +1,51 @@ | |||||
#!/usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Replaces the default formatter by one which understands RVCT output and colorizes it. | |||||
__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>" | |||||
__copyright__ = "Jérôme Carretero, 2012" | |||||
import sys | |||||
import atexit | |||||
from waflib import Logs | |||||
errors = [] | |||||
def show_errors(): | |||||
for i, e in enumerate(errors): | |||||
if i > 5: | |||||
break | |||||
print("Error: %s" % e) | |||||
atexit.register(show_errors) | |||||
class RcvtFormatter(Logs.formatter): | |||||
def __init__(self, colors): | |||||
Logs.formatter.__init__(self) | |||||
self.colors = colors | |||||
def format(self, rec): | |||||
frame = sys._getframe() | |||||
while frame: | |||||
func = frame.f_code.co_name | |||||
if func == 'exec_command': | |||||
cmd = frame.f_locals['cmd'] | |||||
if isinstance(cmd, list) and ('armcc' in cmd[0] or 'armld' in cmd[0]): | |||||
lines = [] | |||||
for line in rec.msg.splitlines(): | |||||
if 'Warning: ' in line: | |||||
lines.append(self.colors.YELLOW + line) | |||||
elif 'Error: ' in line: | |||||
lines.append(self.colors.RED + line) | |||||
errors.append(line) | |||||
elif 'note: ' in line: | |||||
lines.append(self.colors.CYAN + line) | |||||
else: | |||||
lines.append(line) | |||||
rec.msg = "\n".join(lines) | |||||
frame = frame.f_back | |||||
return Logs.formatter.format(self, rec) | |||||
def options(opt): | |||||
Logs.log.handlers[0].setFormatter(RcvtFormatter(Logs.colors)) | |||||
@@ -0,0 +1,370 @@ | |||||
#! /usr/bin/env python | |||||
# encoding: utf-8 | |||||
# Thomas Nagy, 2010 (ita) | |||||
""" | |||||
This file is provided to enable compatibility with waf 1.5 | |||||
It was enabled by default in waf 1.6, but it is not used in waf 1.7 | |||||
""" | |||||
import sys | |||||
from waflib import ConfigSet, Logs, Options, Scripting, Task, Build, Configure, Node, Runner, TaskGen, Utils, Errors, Context | |||||
# the following is to bring some compatibility with waf 1.5 "import waflib.Configure → import Configure" | |||||
sys.modules['Environment'] = ConfigSet | |||||
ConfigSet.Environment = ConfigSet.ConfigSet | |||||
sys.modules['Logs'] = Logs | |||||
sys.modules['Options'] = Options | |||||
sys.modules['Scripting'] = Scripting | |||||
sys.modules['Task'] = Task | |||||
sys.modules['Build'] = Build | |||||
sys.modules['Configure'] = Configure | |||||
sys.modules['Node'] = Node | |||||
sys.modules['Runner'] = Runner | |||||
sys.modules['TaskGen'] = TaskGen | |||||
sys.modules['Utils'] = Utils | |||||
from waflib.Tools import c_preproc | |||||
sys.modules['preproc'] = c_preproc | |||||
from waflib.Tools import c_config | |||||
sys.modules['config_c'] = c_config | |||||
ConfigSet.ConfigSet.copy = ConfigSet.ConfigSet.derive | |||||
ConfigSet.ConfigSet.set_variant = Utils.nada | |||||
Build.BuildContext.add_subdirs = Build.BuildContext.recurse | |||||
Build.BuildContext.new_task_gen = Build.BuildContext.__call__ | |||||
Build.BuildContext.is_install = 0 | |||||
Node.Node.relpath_gen = Node.Node.path_from | |||||
Utils.pproc = Utils.subprocess | |||||
Utils.get_term_cols = Logs.get_term_cols | |||||
def cmd_output(cmd, **kw): | |||||
silent = False | |||||
if 'silent' in kw: | |||||
silent = kw['silent'] | |||||
del(kw['silent']) | |||||
if 'e' in kw: | |||||
tmp = kw['e'] | |||||
del(kw['e']) | |||||
kw['env'] = tmp | |||||
kw['shell'] = isinstance(cmd, str) | |||||
kw['stdout'] = Utils.subprocess.PIPE | |||||
if silent: | |||||
kw['stderr'] = Utils.subprocess.PIPE | |||||
try: | |||||
p = Utils.subprocess.Popen(cmd, **kw) | |||||
output = p.communicate()[0] | |||||
except OSError as e: | |||||
raise ValueError(str(e)) | |||||
if p.returncode: | |||||
if not silent: | |||||
msg = "command execution failed: %s -> %r" % (cmd, str(output)) | |||||
raise ValueError(msg) | |||||
output = '' | |||||
return output | |||||
Utils.cmd_output = cmd_output | |||||
def name_to_obj(self, s, env=None): | |||||
if Logs.verbose: | |||||
Logs.warn('compat: change "name_to_obj(name, env)" by "get_tgen_by_name(name)"') | |||||
return self.get_tgen_by_name(s) | |||||
Build.BuildContext.name_to_obj = name_to_obj | |||||
def env_of_name(self, name): | |||||
try: | |||||
return self.all_envs[name] | |||||
except KeyError: | |||||
Logs.error('no such environment: '+name) | |||||
return None | |||||
Build.BuildContext.env_of_name = env_of_name | |||||
def set_env_name(self, name, env): | |||||
self.all_envs[name] = env | |||||
return env | |||||
Configure.ConfigurationContext.set_env_name = set_env_name | |||||
def retrieve(self, name, fromenv=None): | |||||
try: | |||||
env = self.all_envs[name] | |||||
except KeyError: | |||||
env = ConfigSet.ConfigSet() | |||||
self.prepare_env(env) | |||||
self.all_envs[name] = env | |||||
else: | |||||
if fromenv: | |||||
Logs.warn("The environment %s may have been configured already" % name) | |||||
return env | |||||
Configure.ConfigurationContext.retrieve = retrieve | |||||
Configure.ConfigurationContext.sub_config = Configure.ConfigurationContext.recurse | |||||
Configure.ConfigurationContext.check_tool = Configure.ConfigurationContext.load | |||||
Configure.conftest = Configure.conf | |||||
Configure.ConfigurationError = Errors.ConfigurationError | |||||
Utils.WafError = Errors.WafError | |||||
Options.OptionsContext.sub_options = Options.OptionsContext.recurse | |||||
Options.OptionsContext.tool_options = Context.Context.load | |||||
Options.Handler = Options.OptionsContext | |||||
Task.simple_task_type = Task.task_type_from_func = Task.task_factory | |||||
Task.TaskBase.classes = Task.classes | |||||
def setitem(self, key, value): | |||||
if key.startswith('CCFLAGS'): | |||||
key = key[1:] | |||||
self.table[key] = value | |||||
ConfigSet.ConfigSet.__setitem__ = setitem | |||||
@TaskGen.feature('d') | |||||
@TaskGen.before('apply_incpaths') | |||||
def old_importpaths(self): | |||||
if getattr(self, 'importpaths', []): | |||||
self.includes = self.importpaths | |||||
from waflib import Context | |||||
eld = Context.load_tool | |||||
def load_tool(*k, **kw): | |||||
ret = eld(*k, **kw) | |||||
if 'set_options' in ret.__dict__: | |||||
if Logs.verbose: | |||||
Logs.warn('compat: rename "set_options" to options') | |||||
ret.options = ret.set_options | |||||
if 'detect' in ret.__dict__: | |||||
if Logs.verbose: | |||||
Logs.warn('compat: rename "detect" to "configure"') | |||||
ret.configure = ret.detect | |||||
return ret | |||||
Context.load_tool = load_tool | |||||
def get_curdir(self): | |||||
return self.path.abspath() | |||||
Context.Context.curdir = property(get_curdir, Utils.nada) | |||||
rev = Context.load_module | |||||
def load_module(path, encoding=None): | |||||
ret = rev(path, encoding) | |||||
if 'set_options' in ret.__dict__: | |||||
if Logs.verbose: | |||||
Logs.warn('compat: rename "set_options" to "options" (%r)' % path) | |||||
ret.options = ret.set_options | |||||
if 'srcdir' in ret.__dict__: | |||||
if Logs.verbose: | |||||
Logs.warn('compat: rename "srcdir" to "top" (%r)' % path) | |||||
ret.top = ret.srcdir | |||||
if 'blddir' in ret.__dict__: | |||||
if Logs.verbose: | |||||
Logs.warn('compat: rename "blddir" to "out" (%r)' % path) | |||||
ret.out = ret.blddir | |||||
return ret | |||||
Context.load_module = load_module | |||||
old_post = TaskGen.task_gen.post | |||||
def post(self): | |||||
self.features = self.to_list(self.features) | |||||
if 'cc' in self.features: | |||||
if Logs.verbose: | |||||
Logs.warn('compat: the feature cc does not exist anymore (use "c")') | |||||
self.features.remove('cc') | |||||
self.features.append('c') | |||||
if 'cstaticlib' in self.features: | |||||
if Logs.verbose: | |||||
Logs.warn('compat: the feature cstaticlib does not exist anymore (use "cstlib" or "cxxstlib")') | |||||
self.features.remove('cstaticlib') | |||||
self.features.append(('cxx' in self.features) and 'cxxstlib' or 'cstlib') | |||||
if getattr(self, 'ccflags', None): | |||||
if Logs.verbose: | |||||
Logs.warn('compat: "ccflags" was renamed to "cflags"') | |||||
self.cflags = self.ccflags | |||||
return old_post(self) | |||||
TaskGen.task_gen.post = post | |||||
def waf_version(*k, **kw): | |||||
Logs.warn('wrong version (waf_version was removed in waf 1.6)') | |||||
Utils.waf_version = waf_version | |||||
import os | |||||
@TaskGen.feature('c', 'cxx', 'd') | |||||
@TaskGen.before('apply_incpaths', 'propagate_uselib_vars') | |||||
@TaskGen.after('apply_link', 'process_source') | |||||
def apply_uselib_local(self): | |||||
""" | |||||
process the uselib_local attribute | |||||
execute after apply_link because of the execution order set on 'link_task' | |||||
""" | |||||
env = self.env | |||||
from waflib.Tools.ccroot import stlink_task | |||||
# 1. the case of the libs defined in the project (visit ancestors first) | |||||
# the ancestors external libraries (uselib) will be prepended | |||||
self.uselib = self.to_list(getattr(self, 'uselib', [])) | |||||
self.includes = self.to_list(getattr(self, 'includes', [])) | |||||
names = self.to_list(getattr(self, 'uselib_local', [])) | |||||
get = self.bld.get_tgen_by_name | |||||
seen = set([]) | |||||
seen_uselib = set([]) | |||||
tmp = Utils.deque(names) # consume a copy of the list of names | |||||
if tmp: | |||||
if Logs.verbose: | |||||
Logs.warn('compat: "uselib_local" is deprecated, replace by "use"') | |||||
while tmp: | |||||
lib_name = tmp.popleft() | |||||
# visit dependencies only once | |||||
if lib_name in seen: | |||||
continue | |||||
y = get(lib_name) | |||||
y.post() | |||||
seen.add(lib_name) | |||||
# object has ancestors to process (shared libraries): add them to the end of the list | |||||
if getattr(y, 'uselib_local', None): | |||||
for x in self.to_list(getattr(y, 'uselib_local', [])): | |||||
obj = get(x) | |||||
obj.post() | |||||
if getattr(obj, 'link_task', None): | |||||
if not isinstance(obj.link_task, stlink_task): | |||||
tmp.append(x) | |||||
# link task and flags | |||||
if getattr(y, 'link_task', None): | |||||
link_name = y.target[y.target.rfind(os.sep) + 1:] | |||||
if isinstance(y.link_task, stlink_task): | |||||
env.append_value('STLIB', [link_name]) | |||||
else: | |||||
# some linkers can link against programs | |||||
env.append_value('LIB', [link_name]) | |||||
# the order | |||||
self.link_task.set_run_after(y.link_task) | |||||
# for the recompilation | |||||
self.link_task.dep_nodes += y.link_task.outputs | |||||
# add the link path too | |||||
tmp_path = y.link_task.outputs[0].parent.bldpath() | |||||
if not tmp_path in env['LIBPATH']: | |||||
env.prepend_value('LIBPATH', [tmp_path]) | |||||
# add ancestors uselib too - but only propagate those that have no staticlib defined | |||||
for v in self.to_list(getattr(y, 'uselib', [])): | |||||
if v not in seen_uselib: | |||||
seen_uselib.add(v) | |||||
if not env['STLIB_' + v]: | |||||
if not v in self.uselib: | |||||
self.uselib.insert(0, v) | |||||
# if the library task generator provides 'export_includes', add to the include path | |||||
# the export_includes must be a list of paths relative to the other library | |||||
if getattr(y, 'export_includes', None): | |||||
self.includes.extend(y.to_incnodes(y.export_includes)) | |||||
@TaskGen.feature('cprogram', 'cxxprogram', 'cstlib', 'cxxstlib', 'cshlib', 'cxxshlib', 'dprogram', 'dstlib', 'dshlib') | |||||
@TaskGen.after('apply_link') | |||||
def apply_objdeps(self): | |||||
"add the .o files produced by some other object files in the same manner as uselib_local" | |||||
names = getattr(self, 'add_objects', []) | |||||
if not names: | |||||
return | |||||
names = self.to_list(names) | |||||
get = self.bld.get_tgen_by_name | |||||
seen = [] | |||||
while names: | |||||
x = names[0] | |||||
# visit dependencies only once | |||||
if x in seen: | |||||
names = names[1:] | |||||
continue | |||||
# object does not exist ? | |||||
y = get(x) | |||||
# object has ancestors to process first ? update the list of names | |||||
if getattr(y, 'add_objects', None): | |||||
added = 0 | |||||
lst = y.to_list(y.add_objects) | |||||
lst.reverse() | |||||
for u in lst: | |||||
if u in seen: continue | |||||
added = 1 | |||||
names = [u]+names | |||||
if added: continue # list of names modified, loop | |||||
# safe to process the current object | |||||
y.post() | |||||
seen.append(x) | |||||
for t in getattr(y, 'compiled_tasks', []): | |||||
self.link_task.inputs.extend(t.outputs) | |||||
@TaskGen.after('apply_link') | |||||
def process_obj_files(self): | |||||
if not hasattr(self, 'obj_files'): | |||||
return | |||||
for x in self.obj_files: | |||||
node = self.path.find_resource(x) | |||||
self.link_task.inputs.append(node) | |||||
@TaskGen.taskgen_method | |||||
def add_obj_file(self, file): | |||||
"""Small example on how to link object files as if they were source | |||||
obj = bld.create_obj('cc') | |||||
obj.add_obj_file('foo.o')""" | |||||
if not hasattr(self, 'obj_files'): self.obj_files = [] | |||||
if not 'process_obj_files' in self.meths: self.meths.append('process_obj_files') | |||||
self.obj_files.append(file) | |||||
old_define = Configure.ConfigurationContext.__dict__['define'] | |||||
@Configure.conf | |||||
def define(self, key, val, quote=True): | |||||
old_define(self, key, val, quote) | |||||
if key.startswith('HAVE_'): | |||||
self.env[key] = 1 | |||||
old_undefine = Configure.ConfigurationContext.__dict__['undefine'] | |||||
@Configure.conf | |||||
def undefine(self, key): | |||||
old_undefine(self, key) | |||||
if key.startswith('HAVE_'): | |||||
self.env[key] = 0 | |||||
# some people might want to use export_incdirs, but it was renamed | |||||
def set_incdirs(self, val): | |||||
Logs.warn('compat: change "export_incdirs" by "export_includes"') | |||||
self.export_includes = val | |||||
TaskGen.task_gen.export_incdirs = property(None, set_incdirs) | |||||
def install_dir(self, path): | |||||
if not path: | |||||
return [] | |||||
destpath = Utils.subst_vars(path, self.env) | |||||
if self.is_install > 0: | |||||
Logs.info('* creating %s' % destpath) | |||||
Utils.check_dir(destpath) | |||||
elif self.is_install < 0: | |||||
Logs.info('* removing %s' % destpath) | |||||
try: | |||||
os.remove(destpath) | |||||
except OSError: | |||||
pass | |||||
Build.BuildContext.install_dir = install_dir | |||||
@@ -0,0 +1,547 @@ | |||||
#! /usr/bin/env python | |||||
# -*- encoding: utf-8 -*- | |||||
# Michel Mooij, michel.mooij7@gmail.com | |||||
""" | |||||
Tool Description | |||||
================ | |||||
This module provides a waf wrapper (i.e. waftool) around the C/C++ source code | |||||
checking tool 'cppcheck'. | |||||
See http://cppcheck.sourceforge.net/ for more information on the cppcheck tool | |||||
itself. | |||||
Note that many linux distributions already provide a ready to install version | |||||
of cppcheck. On fedora, for instance, it can be installed using yum: | |||||
'sudo yum install cppcheck' | |||||
Usage | |||||
===== | |||||
In order to use this waftool simply add it to the 'options' and 'configure' | |||||
functions of your main waf script as shown in the example below: | |||||
def options(opt): | |||||
opt.load('cppcheck', tooldir='./waftools') | |||||
def configure(conf): | |||||
conf.load('cppcheck') | |||||
Note that example shown above assumes that the cppcheck waftool is located in | |||||
the sub directory named 'waftools'. | |||||
When configured as shown in the example above, cppcheck will automatically | |||||
perform a source code analysis on all C/C++ build tasks that have been | |||||
defined in your waf build system. | |||||
The example shown below for a C program will be used as input for cppcheck when | |||||
building the task. | |||||
def build(bld): | |||||
bld.program(name='foo', src='foobar.c') | |||||
The result of the source code analysis will be stored both as xml and html | |||||
files in the build location for the task. Should any error be detected by | |||||
cppcheck the build will be aborted and a link to the html report will be shown. | |||||
When needed source code checking by cppcheck can be disabled per task, per | |||||
detected error or warning for a particular task. It can be also be disabled for | |||||
all tasks. | |||||
In order to exclude a task from source code checking add the skip option to the | |||||
task as shown below: | |||||
def build(bld): | |||||
bld.program( | |||||
name='foo', | |||||
src='foobar.c' | |||||
cppcheck_skip=True | |||||
) | |||||
When needed problems detected by cppcheck may be suppressed using a file | |||||
containing a list of suppression rules. The relative or absolute path to this | |||||
file can be added to the build task as shown in the example below: | |||||
bld.program( | |||||
name='bar', | |||||
src='foobar.c', | |||||
cppcheck_suppress='bar.suppress' | |||||
) | |||||
A cppcheck suppress file should contain one suppress rule per line. Each of | |||||
these rules will be passed as an '--suppress=<rule>' argument to cppcheck. | |||||
Dependencies | |||||
================ | |||||
This waftool depends on the python pygments module, it is used for source code | |||||
syntax highlighting when creating the html reports. see http://pygments.org/ for | |||||
more information on this package. | |||||
Remarks | |||||
================ | |||||
The generation of the html report is originally based on the cppcheck-htmlreport.py | |||||
script that comes shipped with the cppcheck tool. | |||||
""" | |||||
import os, sys | |||||
import xml.etree.ElementTree as ElementTree | |||||
from waflib import Task, TaskGen, Logs, Context | |||||
PYGMENTS_EXC_MSG= ''' | |||||
The required module 'pygments' could not be found. Please install it using your | |||||
platform package manager (e.g. apt-get or yum), using 'pip' or 'easy_install', | |||||
see 'http://pygments.org/download/' for installation instructions. | |||||
''' | |||||
try: | |||||
import pygments | |||||
from pygments import formatters, lexers | |||||
except ImportError as e: | |||||
Logs.warn(PYGMENTS_EXC_MSG) | |||||
raise e | |||||
def options(opt): | |||||
opt.add_option('--cppcheck-skip', dest='cppcheck_skip', | |||||
default=False, action='store_true', | |||||
help='do not check C/C++ sources (default=False)') | |||||
opt.add_option('--cppcheck-err-resume', dest='cppcheck_err_resume', | |||||
default=False, action='store_true', | |||||
help='continue in case of errors (default=False)') | |||||
opt.add_option('--cppcheck-bin-enable', dest='cppcheck_bin_enable', | |||||
default='warning,performance,portability,style,unusedFunction', action='store', | |||||
help="cppcheck option '--enable=' for binaries (default=warning,performance,portability,style,unusedFunction)") | |||||
opt.add_option('--cppcheck-lib-enable', dest='cppcheck_lib_enable', | |||||
default='warning,performance,portability,style', action='store', | |||||
help="cppcheck option '--enable=' for libraries (default=warning,performance,portability,style)") | |||||
opt.add_option('--cppcheck-std-c', dest='cppcheck_std_c', | |||||
default='c99', action='store', | |||||
help='cppcheck standard to use when checking C (default=c99)') | |||||
opt.add_option('--cppcheck-std-cxx', dest='cppcheck_std_cxx', | |||||
default='c++03', action='store', | |||||
help='cppcheck standard to use when checking C++ (default=c++03)') | |||||
opt.add_option('--cppcheck-check-config', dest='cppcheck_check_config', | |||||
default=False, action='store_true', | |||||
help='forced check for missing buildin include files, e.g. stdio.h (default=False)') | |||||
opt.add_option('--cppcheck-max-configs', dest='cppcheck_max_configs', | |||||
default='20', action='store', | |||||
help='maximum preprocessor (--max-configs) define iterations (default=20)') | |||||
def configure(conf): | |||||
if conf.options.cppcheck_skip: | |||||
conf.env.CPPCHECK_SKIP = [True] | |||||
conf.env.CPPCHECK_STD_C = conf.options.cppcheck_std_c | |||||
conf.env.CPPCHECK_STD_CXX = conf.options.cppcheck_std_cxx | |||||
conf.env.CPPCHECK_MAX_CONFIGS = conf.options.cppcheck_max_configs | |||||
conf.env.CPPCHECK_BIN_ENABLE = conf.options.cppcheck_bin_enable | |||||
conf.env.CPPCHECK_LIB_ENABLE = conf.options.cppcheck_lib_enable | |||||
conf.find_program('cppcheck', var='CPPCHECK') | |||||
@TaskGen.feature('c') | |||||
@TaskGen.feature('cxx') | |||||
def cppcheck_execute(self): | |||||
if len(self.env.CPPCHECK_SKIP) or self.bld.options.cppcheck_skip: | |||||
return | |||||
if getattr(self, 'cppcheck_skip', False): | |||||
return | |||||
task = self.create_task('cppcheck') | |||||
task.cmd = _tgen_create_cmd(self) | |||||
task.fatal = [] | |||||
if not self.bld.options.cppcheck_err_resume: | |||||
task.fatal.append('error') | |||||
def _tgen_create_cmd(self): | |||||
features = getattr(self, 'features', []) | |||||
std_c = self.env.CPPCHECK_STD_C | |||||
std_cxx = self.env.CPPCHECK_STD_CXX | |||||
max_configs = self.env.CPPCHECK_MAX_CONFIGS | |||||
bin_enable = self.env.CPPCHECK_BIN_ENABLE | |||||
lib_enable = self.env.CPPCHECK_LIB_ENABLE | |||||
cmd = '%s' % self.env.CPPCHECK | |||||
args = ['--inconclusive','--report-progress','--verbose','--xml','--xml-version=2'] | |||||
args.append('--max-configs=%s' % max_configs) | |||||
if 'cxx' in features: | |||||
args.append('--language=c++') | |||||
args.append('--std=%s' % std_cxx) | |||||
else: | |||||
args.append('--language=c') | |||||
args.append('--std=%s' % std_c) | |||||
if self.bld.options.cppcheck_check_config: | |||||
args.append('--check-config') | |||||
if set(['cprogram','cxxprogram']) & set(features): | |||||
args.append('--enable=%s' % bin_enable) | |||||
else: | |||||
args.append('--enable=%s' % lib_enable) | |||||
for src in self.to_list(getattr(self, 'source', [])): | |||||
args.append('%r' % src) | |||||
for inc in self.to_incnodes(self.to_list(getattr(self, 'includes', []))): | |||||
args.append('-I%r' % inc) | |||||
for inc in self.to_incnodes(self.to_list(self.env.INCLUDES)): | |||||
args.append('-I%r' % inc) | |||||
return '%s %s' % (cmd, ' '.join(args)) | |||||
class cppcheck(Task.Task): | |||||
quiet = True | |||||
def run(self): | |||||
stderr = self.generator.bld.cmd_and_log(self.cmd, quiet=Context.STDERR, output=Context.STDERR) | |||||
self._save_xml_report(stderr) | |||||
defects = self._get_defects(stderr) | |||||
index = self._create_html_report(defects) | |||||
self._errors_evaluate(defects, index) | |||||
return 0 | |||||
def _save_xml_report(self, s): | |||||
'''use cppcheck xml result string, add the command string used to invoke cppcheck | |||||
and save as xml file. | |||||
''' | |||||
header = '%s\n' % s.splitlines()[0] | |||||
root = ElementTree.fromstring(s) | |||||
cmd = ElementTree.SubElement(root.find('cppcheck'), 'cmd') | |||||
cmd.text = str(self.cmd) | |||||
body = ElementTree.tostring(root) | |||||
node = self.generator.path.get_bld().find_or_declare('cppcheck.xml') | |||||
node.write(header + body) | |||||
def _get_defects(self, xml_string): | |||||
'''evaluate the xml string returned by cppcheck (on sdterr) and use it to create | |||||
a list of defects. | |||||
''' | |||||
defects = [] | |||||
for error in ElementTree.fromstring(xml_string).iter('error'): | |||||
defect = {} | |||||
defect['id'] = error.get('id') | |||||
defect['severity'] = error.get('severity') | |||||
defect['msg'] = str(error.get('msg')).replace('<','<') | |||||
defect['verbose'] = error.get('verbose') | |||||
for location in error.findall('location'): | |||||
defect['file'] = location.get('file') | |||||
defect['line'] = str(int(location.get('line')) - 1) | |||||
defects.append(defect) | |||||
return defects | |||||
def _create_html_report(self, defects): | |||||
files, css_style_defs = self._create_html_files(defects) | |||||
index = self._create_html_index(files) | |||||
self._create_css_file(css_style_defs) | |||||
return index | |||||
def _create_html_files(self, defects): | |||||
sources = {} | |||||
defects = [defect for defect in defects if defect.has_key('file')] | |||||
for defect in defects: | |||||
name = defect['file'] | |||||
if not sources.has_key(name): | |||||
sources[name] = [defect] | |||||
else: | |||||
sources[name].append(defect) | |||||
files = {} | |||||
css_style_defs = None | |||||
bpath = self.generator.path.get_bld().abspath() | |||||
names = sources.keys() | |||||
for i in range(0,len(names)): | |||||
name = names[i] | |||||
htmlfile = 'cppcheck/%i.html' % (i) | |||||
errors = sources[name] | |||||
files[name] = { 'htmlfile': '%s/%s' % (bpath, htmlfile), 'errors': errors } | |||||
css_style_defs = self._create_html_file(name, htmlfile, errors) | |||||
return files, css_style_defs | |||||
def _create_html_file(self, sourcefile, htmlfile, errors): | |||||
name = self.generator.get_name() | |||||
root = ElementTree.fromstring(CPPCHECK_HTML_FILE) | |||||
title = root.find('head/title') | |||||
title.text = 'cppcheck - report - %s' % name | |||||
body = root.find('body') | |||||
for div in body.findall('div'): | |||||
if div.get('id') == 'page': | |||||
page = div | |||||
break | |||||
for div in page.findall('div'): | |||||
if div.get('id') == 'header': | |||||
h1 = div.find('h1') | |||||
h1.text = 'cppcheck report - %s' % name | |||||
if div.get('id') == 'content': | |||||
content = div | |||||
srcnode = self.generator.bld.root.find_node(sourcefile) | |||||
hl_lines = [e['line'] for e in errors if e.has_key('line')] | |||||
formatter = CppcheckHtmlFormatter(linenos=True, style='colorful', hl_lines=hl_lines, lineanchors='line') | |||||
formatter.errors = [e for e in errors if e.has_key('line')] | |||||
css_style_defs = formatter.get_style_defs('.highlight') | |||||
lexer = pygments.lexers.guess_lexer_for_filename(sourcefile, "") | |||||
s = pygments.highlight(srcnode.read(), lexer, formatter) | |||||
table = ElementTree.fromstring(s) | |||||
content.append(table) | |||||
s = ElementTree.tostring(root, method='html') | |||||
s = CCPCHECK_HTML_TYPE + s | |||||
node = self.generator.path.get_bld().find_or_declare(htmlfile) | |||||
node.write(s) | |||||
return css_style_defs | |||||
def _create_html_index(self, files): | |||||
name = self.generator.get_name() | |||||
root = ElementTree.fromstring(CPPCHECK_HTML_FILE) | |||||
title = root.find('head/title') | |||||
title.text = 'cppcheck - report - %s' % name | |||||
body = root.find('body') | |||||
for div in body.findall('div'): | |||||
if div.get('id') == 'page': | |||||
page = div | |||||
break | |||||
for div in page.findall('div'): | |||||
if div.get('id') == 'header': | |||||
h1 = div.find('h1') | |||||
h1.text = 'cppcheck report - %s' % name | |||||
if div.get('id') == 'content': | |||||
content = div | |||||
self._create_html_table(content, files) | |||||
s = ElementTree.tostring(root, method='html') | |||||
s = CCPCHECK_HTML_TYPE + s | |||||
node = self.generator.path.get_bld().find_or_declare('cppcheck/index.html') | |||||
node.write(s) | |||||
return node | |||||
def _create_html_table(self, content, files): | |||||
table = ElementTree.fromstring(CPPCHECK_HTML_TABLE) | |||||
for name, val in files.items(): | |||||
f = val['htmlfile'] | |||||
s = '<tr><td colspan="4"><a href="%s">%s</a></td></tr>\n' % (f,name) | |||||
row = ElementTree.fromstring(s) | |||||
table.append(row) | |||||
errors = sorted(val['errors'], key=lambda e: int(e['line']) if e.has_key('line') else sys.maxint) | |||||
for e in errors: | |||||
if not e.has_key('line'): | |||||
s = '<tr><td></td><td>%s</td><td>%s</td><td>%s</td></tr>\n' % (e['id'], e['severity'], e['msg']) | |||||
else: | |||||
attr = '' | |||||
if e['severity'] == 'error': | |||||
attr = 'class="error"' | |||||
s = '<tr><td><a href="%s#line-%s">%s</a></td>' % (f, e['line'], e['line']) | |||||
s+= '<td>%s</td><td>%s</td><td %s>%s</td></tr>\n' % (e['id'], e['severity'], attr, e['msg']) | |||||
row = ElementTree.fromstring(s) | |||||
table.append(row) | |||||
content.append(table) | |||||
def _create_css_file(self, css_style_defs): | |||||
css = str(CPPCHECK_CSS_FILE) | |||||
if css_style_defs: | |||||
css = "%s\n%s\n" % (css, css_style_defs) | |||||
node = self.generator.path.get_bld().find_or_declare('cppcheck/style.css') | |||||
node.write(css) | |||||
def _errors_evaluate(self, errors, http_index): | |||||
name = self.generator.get_name() | |||||
fatal = self.fatal | |||||
severity = [err['severity'] for err in errors] | |||||
problems = [err for err in errors if err['severity'] != 'information'] | |||||
if set(fatal) & set(severity): | |||||
exc = "\n" | |||||
exc += "\nccpcheck detected fatal error(s) in task '%s', see report for details:" % name | |||||
exc += "\n file://%r" % (http_index) | |||||
exc += "\n" | |||||
self.generator.bld.fatal(exc) | |||||
elif len(problems): | |||||
msg = "\nccpcheck detected (possible) problem(s) in task '%s', see report for details:" % name | |||||
msg += "\n file://%r" % http_index | |||||
msg += "\n" | |||||
Logs.error(msg) | |||||
class CppcheckHtmlFormatter(pygments.formatters.HtmlFormatter): | |||||
errors = [] | |||||
def wrap(self, source, outfile): | |||||
line_no = 1 | |||||
for i, t in super(CppcheckHtmlFormatter, self).wrap(source, outfile): | |||||
# If this is a source code line we want to add a span tag at the end. | |||||
if i == 1: | |||||
for error in self.errors: | |||||
if int(error['line']) == line_no: | |||||
t = t.replace('\n', CPPCHECK_HTML_ERROR % error['msg']) | |||||
line_no = line_no + 1 | |||||
yield i, t | |||||
CCPCHECK_HTML_TYPE = \ | |||||
'<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">\n' | |||||
CPPCHECK_HTML_FILE = """ | |||||
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd" [<!ENTITY nbsp " ">]> | |||||
<html> | |||||
<head> | |||||
<title>cppcheck - report - XXX</title> | |||||
<link href="style.css" rel="stylesheet" type="text/css" /> | |||||
<style type="text/css"> | |||||
</style> | |||||
</head> | |||||
<body class="body"> | |||||
<div id="page-header"> </div> | |||||
<div id="page"> | |||||
<div id="header"> | |||||
<h1>cppcheck report - XXX</h1> | |||||
</div> | |||||
<div id="menu"> | |||||
<a href="index.html">Defect list</a> | |||||
</div> | |||||
<div id="content"> | |||||
</div> | |||||
<div id="footer"> | |||||
<div>cppcheck - a tool for static C/C++ code analysis</div> | |||||
<div> | |||||
Internet: <a href="http://cppcheck.sourceforge.net">http://cppcheck.sourceforge.net</a><br/> | |||||
Forum: <a href="http://apps.sourceforge.net/phpbb/cppcheck/">http://apps.sourceforge.net/phpbb/cppcheck/</a><br/> | |||||
IRC: #cppcheck at irc.freenode.net | |||||
</div> | |||||
| |||||
</div> | |||||
| |||||
</div> | |||||
<div id="page-footer"> </div> | |||||
</body> | |||||
</html> | |||||
""" | |||||
CPPCHECK_HTML_TABLE = """ | |||||
<table> | |||||
<tr> | |||||
<th>Line</th> | |||||
<th>Id</th> | |||||
<th>Severity</th> | |||||
<th>Message</th> | |||||
</tr> | |||||
</table> | |||||
""" | |||||
CPPCHECK_HTML_ERROR = \ | |||||
'<span style="background: #ffaaaa;padding: 3px;"><--- %s</span>\n' | |||||
CPPCHECK_CSS_FILE = """ | |||||
body.body { | |||||
font-family: Arial; | |||||
font-size: 13px; | |||||
background-color: black; | |||||
padding: 0px; | |||||
margin: 0px; | |||||
} | |||||
.error { | |||||
font-family: Arial; | |||||
font-size: 13px; | |||||
background-color: #ffb7b7; | |||||
padding: 0px; | |||||
margin: 0px; | |||||
} | |||||
th, td { | |||||
min-width: 100px; | |||||
text-align: left; | |||||
} | |||||
#page-header { | |||||
clear: both; | |||||
width: 1200px; | |||||
margin: 20px auto 0px auto; | |||||
height: 10px; | |||||
border-bottom-width: 2px; | |||||
border-bottom-style: solid; | |||||
border-bottom-color: #aaaaaa; | |||||
} | |||||
#page { | |||||
width: 1160px; | |||||
margin: auto; | |||||
border-left-width: 2px; | |||||
border-left-style: solid; | |||||
border-left-color: #aaaaaa; | |||||
border-right-width: 2px; | |||||
border-right-style: solid; | |||||
border-right-color: #aaaaaa; | |||||
background-color: White; | |||||
padding: 20px; | |||||
} | |||||
#page-footer { | |||||
clear: both; | |||||
width: 1200px; | |||||
margin: auto; | |||||
height: 10px; | |||||
border-top-width: 2px; | |||||
border-top-style: solid; | |||||
border-top-color: #aaaaaa; | |||||
} | |||||
#header { | |||||
width: 100%; | |||||
height: 70px; | |||||
background-image: url(logo.png); | |||||
background-repeat: no-repeat; | |||||
background-position: left top; | |||||
border-bottom-style: solid; | |||||
border-bottom-width: thin; | |||||
border-bottom-color: #aaaaaa; | |||||
} | |||||
#menu { | |||||
margin-top: 5px; | |||||
text-align: left; | |||||
float: left; | |||||
width: 100px; | |||||
height: 300px; | |||||
} | |||||
#menu > a { | |||||
margin-left: 10px; | |||||
display: block; | |||||
} | |||||
#content { | |||||
float: left; | |||||
width: 1020px; | |||||
margin: 5px; | |||||
padding: 0px 10px 10px 10px; | |||||
border-left-style: solid; | |||||
border-left-width: thin; | |||||
border-left-color: #aaaaaa; | |||||
} | |||||
#footer { | |||||
padding-bottom: 5px; | |||||
padding-top: 5px; | |||||
border-top-style: solid; | |||||
border-top-width: thin; | |||||
border-top-color: #aaaaaa; | |||||
clear: both; | |||||
font-size: 10px; | |||||
} | |||||
#footer > div { | |||||
float: left; | |||||
width: 33%; | |||||
} | |||||
""" | |||||
@@ -0,0 +1,224 @@ | |||||
#! /usr/bin/env python | |||||
# encoding: utf-8 | |||||
# | |||||
# written by Sylvain Rouquette, 2014 | |||||
''' | |||||
This is an extra tool, not bundled with the default waf binary. | |||||
To add the cpplint tool to the waf file: | |||||
$ ./waf-light --tools=compat15,cpplint | |||||
or, if you have waf >= 1.6.2 | |||||
$ ./waf update --files=cpplint | |||||
this tool also requires cpplint for python. | |||||
If you have PIP, you can install it like this: pip install cpplint | |||||
But I'd recommend getting the latest version from the SVN, | |||||
the PIP version is outdated. | |||||
https://code.google.com/p/google-styleguide/source/browse/trunk/cpplint/cpplint.py | |||||
Apply this patch if you want to run it with Python 3: | |||||
https://code.google.com/p/google-styleguide/issues/detail?id=19 | |||||
When using this tool, the wscript will look like: | |||||
def options(opt): | |||||
opt.load('compiler_cxx cpplint') | |||||
def configure(conf): | |||||
conf.load('compiler_cxx cpplint') | |||||
# optional, you can also specify them on the command line | |||||
conf.env.CPPLINT_FILTERS = ','.join(( | |||||
'-whitespace/newline', # c++11 lambda | |||||
'-readability/braces', # c++11 constructor | |||||
'-whitespace/braces', # c++11 constructor | |||||
'-build/storage_class', # c++11 for-range | |||||
'-whitespace/blank_line', # user pref | |||||
'-whitespace/labels' # user pref | |||||
)) | |||||
def build(bld): | |||||
bld(features='cpplint', source='main.cpp', target='app') | |||||
# add include files, because they aren't usually built | |||||
bld(features='cpplint', source=bld.path.ant_glob('**/*.hpp')) | |||||
''' | |||||
import sys, re | |||||
import logging | |||||
import threading | |||||
from waflib import Task, Build, TaskGen, Logs, Utils | |||||
try: | |||||
from cpplint.cpplint import ProcessFile, _cpplint_state | |||||
except ImportError: | |||||
pass | |||||
critical_errors = 0 | |||||
CPPLINT_FORMAT = '[CPPLINT] %(filename)s:\nline %(linenum)s, severity %(confidence)s, category: %(category)s\n%(message)s\n' | |||||
RE_EMACS = re.compile('(?P<filename>.*):(?P<linenum>\d+): (?P<message>.*) \[(?P<category>.*)\] \[(?P<confidence>\d+)\]'); | |||||
CPPLINT_RE = { | |||||
'waf': RE_EMACS, | |||||
'emacs': RE_EMACS, | |||||
'vs7': re.compile('(?P<filename>.*)\((?P<linenum>\d+)\): (?P<message>.*) \[(?P<category>.*)\] \[(?P<confidence>\d+)\]'), | |||||
'eclipse': re.compile('(?P<filename>.*):(?P<linenum>\d+): warning: (?P<message>.*) \[(?P<category>.*)\] \[(?P<confidence>\d+)\]'), | |||||
} | |||||
def init_env_from_options(env): | |||||
from waflib.Options import options | |||||
for key, value in options.__dict__.items(): | |||||
if not key.startswith('CPPLINT_') or env[key]: | |||||
continue | |||||
env[key] = value | |||||
if env.CPPLINT_OUTPUT != 'waf': | |||||
_cpplint_state.output_format = env.CPPLINT_OUTPUT | |||||
def options(opt): | |||||
opt.add_option('--cpplint-filters', type='string', | |||||
default='', dest='CPPLINT_FILTERS', | |||||
help='add filters to cpplint') | |||||
opt.add_option('--cpplint-level', default=1, type='int', dest='CPPLINT_LEVEL', | |||||
help='specify the log level (default: 1)') | |||||
opt.add_option('--cpplint-break', default=5, type='int', dest='CPPLINT_BREAK', | |||||
help='break the build if error >= level (default: 5)') | |||||
opt.add_option('--cpplint-skip', action='store_true', | |||||
default=False, dest='CPPLINT_SKIP', | |||||
help='skip cpplint during build') | |||||
opt.add_option('--cpplint-output', type='string', | |||||
default='waf', dest='CPPLINT_OUTPUT', | |||||
help='select output format (waf, emacs, vs7)') | |||||
def configure(conf): | |||||
conf.start_msg('Checking cpplint') | |||||
try: | |||||
import cpplint | |||||
conf.end_msg('ok') | |||||
except ImportError: | |||||
conf.env.CPPLINT_SKIP = True | |||||
conf.end_msg('not found, skipping it.') | |||||
class cpplint_formatter(Logs.formatter): | |||||
def __init__(self, fmt): | |||||
logging.Formatter.__init__(self, CPPLINT_FORMAT) | |||||
self.fmt = fmt | |||||
def format(self, rec): | |||||
if self.fmt == 'waf': | |||||
result = CPPLINT_RE[self.fmt].match(rec.msg).groupdict() | |||||
rec.msg = CPPLINT_FORMAT % result | |||||
if rec.levelno <= logging.INFO: | |||||
rec.c1 = Logs.colors.CYAN | |||||
return super(cpplint_formatter, self).format(rec) | |||||
class cpplint_handler(Logs.log_handler): | |||||
def __init__(self, stream=sys.stderr, **kw): | |||||
super(cpplint_handler, self).__init__(stream, **kw) | |||||
self.stream = stream | |||||
def emit(self, rec): | |||||
rec.stream = self.stream | |||||
self.emit_override(rec) | |||||
self.flush() | |||||
class cpplint_wrapper(object): | |||||
stream = None | |||||
tasks_count = 0 | |||||
lock = threading.RLock() | |||||
def __init__(self, logger, threshold, fmt): | |||||
self.logger = logger | |||||
self.threshold = threshold | |||||
self.error_count = 0 | |||||
self.fmt = fmt | |||||
def __enter__(self): | |||||
with cpplint_wrapper.lock: | |||||
cpplint_wrapper.tasks_count += 1 | |||||
if cpplint_wrapper.tasks_count == 1: | |||||
sys.stderr.flush() | |||||
cpplint_wrapper.stream = sys.stderr | |||||
sys.stderr = self | |||||
return self | |||||
def __exit__(self, exc_type, exc_value, traceback): | |||||
with cpplint_wrapper.lock: | |||||
cpplint_wrapper.tasks_count -= 1 | |||||
if cpplint_wrapper.tasks_count == 0: | |||||
sys.stderr = cpplint_wrapper.stream | |||||
sys.stderr.flush() | |||||
def isatty(self): | |||||
return True | |||||
def write(self, message): | |||||
global critical_errors | |||||
result = CPPLINT_RE[self.fmt].match(message) | |||||
if not result: | |||||
return | |||||
level = int(result.groupdict()['confidence']) | |||||
if level >= self.threshold: | |||||
critical_errors += 1 | |||||
if level <= 2: | |||||
self.logger.info(message) | |||||
elif level <= 4: | |||||
self.logger.warning(message) | |||||
else: | |||||
self.logger.error(message) | |||||
cpplint_logger = None | |||||
def get_cpplint_logger(fmt): | |||||
global cpplint_logger | |||||
if cpplint_logger: | |||||
return cpplint_logger | |||||
cpplint_logger = logging.getLogger('cpplint') | |||||
hdlr = cpplint_handler() | |||||
hdlr.setFormatter(cpplint_formatter(fmt)) | |||||
cpplint_logger.addHandler(hdlr) | |||||
cpplint_logger.setLevel(logging.DEBUG) | |||||
return cpplint_logger | |||||
class cpplint(Task.Task): | |||||
color = 'PINK' | |||||
def __init__(self, *k, **kw): | |||||
super(cpplint, self).__init__(*k, **kw) | |||||
def run(self): | |||||
global critical_errors | |||||
_cpplint_state.SetFilters(self.env.CPPLINT_FILTERS) | |||||
break_level = self.env.CPPLINT_BREAK | |||||
verbosity = self.env.CPPLINT_LEVEL | |||||
with cpplint_wrapper(get_cpplint_logger(self.env.CPPLINT_OUTPUT), | |||||
break_level, self.env.CPPLINT_OUTPUT): | |||||
ProcessFile(self.inputs[0].abspath(), verbosity) | |||||
return critical_errors | |||||
@TaskGen.extension('.h', '.hh', '.hpp', '.hxx') | |||||
def cpplint_includes(self, node): | |||||
pass | |||||
@TaskGen.feature('cpplint') | |||||
@TaskGen.before_method('process_source') | |||||
def run_cpplint(self): | |||||
if not self.env.CPPLINT_INITIALIZED: | |||||
self.env.CPPLINT_INITIALIZED = True | |||||
init_env_from_options(self.env) | |||||
if self.env.CPPLINT_SKIP: | |||||
return | |||||
if not self.env.CPPLINT_OUTPUT in CPPLINT_RE: | |||||
return | |||||
for src in self.to_list(getattr(self, 'source', [])): | |||||
if isinstance(src, str): | |||||
self.create_task('cpplint', self.path.find_or_declare(src)) | |||||
else: | |||||
self.create_task('cpplint', src) |