Browse Source

Strip unnecessary tools and extras from waflib

tags/v1.9.11-RC1
Karl Linden 9 years ago
parent
commit
a3ad1ff94b
No known key found for this signature in database GPG Key ID: 1961C090EF542D56
100 changed files with 136 additions and 18059 deletions
  1. +136
    -0
      .wafupdaterc
  2. +0
    -75
      waflib/Tools/asm.py
  3. +0
    -49
      waflib/Tools/bison.py
  4. +0
    -79
      waflib/Tools/compiler_d.py
  5. +0
    -67
      waflib/Tools/compiler_fc.py
  6. +0
    -222
      waflib/Tools/cs.py
  7. +0
    -97
      waflib/Tools/d.py
  8. +0
    -63
      waflib/Tools/d_config.py
  9. +0
    -209
      waflib/Tools/d_scan.py
  10. +0
    -70
      waflib/Tools/dbus.py
  11. +0
    -88
      waflib/Tools/dmd.py
  12. +0
    -198
      waflib/Tools/fc.py
  13. +0
    -470
      waflib/Tools/fc_config.py
  14. +0
    -121
      waflib/Tools/fc_scan.py
  15. +0
    -53
      waflib/Tools/flex.py
  16. +0
    -66
      waflib/Tools/g95.py
  17. +0
    -18
      waflib/Tools/gas.py
  18. +0
    -60
      waflib/Tools/gdc.py
  19. +0
    -90
      waflib/Tools/gfortran.py
  20. +0
    -488
      waflib/Tools/glib2.py
  21. +0
    -131
      waflib/Tools/gnu_dirs.py
  22. +0
    -60
      waflib/Tools/ifort.py
  23. +0
    -220
      waflib/Tools/intltool.py
  24. +0
    -477
      waflib/Tools/javaw.py
  25. +0
    -90
      waflib/Tools/kde4.py
  26. +0
    -59
      waflib/Tools/ldc2.py
  27. +0
    -38
      waflib/Tools/lua.py
  28. +0
    -26
      waflib/Tools/nasm.py
  29. +0
    -163
      waflib/Tools/perl.py
  30. +0
    -594
      waflib/Tools/python.py
  31. +0
    -692
      waflib/Tools/qt4.py
  32. +0
    -744
      waflib/Tools/qt5.py
  33. +0
    -193
      waflib/Tools/ruby.py
  34. +0
    -515
      waflib/Tools/tex.py
  35. +0
    -335
      waflib/Tools/vala.py
  36. +0
    -114
      waflib/Tools/winres.py
  37. +0
    -7
      waflib/extras/add_objects.py
  38. +0
    -58
      waflib/extras/biber.py
  39. +0
    -131
      waflib/extras/bjam.py
  40. +0
    -111
      waflib/extras/blender.py
  41. +0
    -81
      waflib/extras/boo.py
  42. +0
    -411
      waflib/extras/boost.py
  43. +0
    -73
      waflib/extras/c_dumbpreproc.py
  44. +0
    -156
      waflib/extras/cabal.py
  45. +0
    -110
      waflib/extras/cfg_altoptions.py
  46. +0
    -147
      waflib/extras/cfg_cross_gnu.py
  47. +0
    -66
      waflib/extras/clang_compilation_database.py
  48. +0
    -881
      waflib/extras/codelite.py
  49. +0
    -39
      waflib/extras/color_gcc.py
  50. +0
    -51
      waflib/extras/color_rvct.py
  51. +0
    -370
      waflib/extras/compat15.py
  52. +0
    -547
      waflib/extras/cppcheck.py
  53. +0
    -224
      waflib/extras/cpplint.py
  54. +0
    -149
      waflib/extras/cython.py
  55. +0
    -72
      waflib/extras/dcc.py
  56. +0
    -432
      waflib/extras/distnet.py
  57. +0
    -219
      waflib/extras/doxygen.py
  58. +0
    -87
      waflib/extras/dpapi.py
  59. +0
    -317
      waflib/extras/eclipse.py
  60. +0
    -19
      waflib/extras/erlang.py
  61. +0
    -33
      waflib/extras/fc_bgxlf.py
  62. +0
    -50
      waflib/extras/fc_cray.py
  63. +0
    -59
      waflib/extras/fc_nag.py
  64. +0
    -57
      waflib/extras/fc_nec.py
  65. +0
    -56
      waflib/extras/fc_open64.py
  66. +0
    -65
      waflib/extras/fc_pgfortran.py
  67. +0
    -60
      waflib/extras/fc_solstudio.py
  68. +0
    -63
      waflib/extras/fc_xlf.py
  69. +0
    -150
      waflib/extras/file_to_object.py
  70. +0
    -30
      waflib/extras/fluid.py
  71. +0
    -74
      waflib/extras/freeimage.py
  72. +0
    -31
      waflib/extras/fsb.py
  73. +0
    -65
      waflib/extras/fsc.py
  74. +0
    -206
      waflib/extras/gccdeps.py
  75. +0
    -257
      waflib/extras/go.py
  76. +0
    -17
      waflib/extras/gob2.py
  77. +0
    -150
      waflib/extras/halide.py
  78. +0
    -19
      waflib/extras/local_rpath.py
  79. +0
    -142
      waflib/extras/make.py
  80. +0
    -70
      waflib/extras/md5_tstamp.py
  81. +0
    -111
      waflib/extras/mem_reducer.py
  82. +0
    -85
      waflib/extras/midl.py
  83. +0
    -413
      waflib/extras/misc.py
  84. +0
    -262
      waflib/extras/msvcdeps.py
  85. +0
    -1025
      waflib/extras/msvs.py
  86. +0
    -383
      waflib/extras/netcache_client.py
  87. +0
    -24
      waflib/extras/nobuild.py
  88. +0
    -54
      waflib/extras/objcopy.py
  89. +0
    -331
      waflib/extras/ocaml.py
  90. +0
    -76
      waflib/extras/package.py
  91. +0
    -442
      waflib/extras/parallel_debug.py
  92. +0
    -148
      waflib/extras/pch.py
  93. +0
    -106
      waflib/extras/pep8.py
  94. +0
    -68
      waflib/extras/pgicc.py
  95. +0
    -22
      waflib/extras/pgicxx.py
  96. +0
    -402
      waflib/extras/prefork.py
  97. +0
    -241
      waflib/extras/preforkjava.py
  98. +0
    -318
      waflib/extras/preforkunix.py
  99. +0
    -46
      waflib/extras/print_commands.py
  100. +0
    -56
      waflib/extras/proc.py

+ 136
- 0
.wafupdaterc View File

@@ -0,0 +1,136 @@
# This file is sourced by wafupdate when waf is updated.
# wafupdate home page: https://gitlab.com/lilrc/wafupdate

WAFLIB_STRIP_TOOLS="
asm
bison
compiler_d
compiler_fc
cs
d
d_config
d_scan
dbus
dmd
fc
fc_config
fc_scan
flex
g95
gas
gdc
gfortran
glib2
gnu_dirs
ifort
intltool
javaw
kde4
ldc2
lua
nasm
perl
python
qt4
qt5
ruby
tex
vala
winres
"

WAFLIB_STRIP_EXTRAS="
add_objects
biber
bjam
blender
boo
boost
c_dumbpreproc
cabal
cfg_altoptions
cfg_cross_gnu
clang_compilation_database
codelite
compat15
color_gcc
color_rvct
cppcheck
cpplint
cython
dcc
distnet
doxygen
dpapi
eclipse
erlang
fc_bgxlf
fc_cray
fc_nag
fc_nec
fc_open64
fc_pgfortran
fc_solstudio
fc_xlf
file_to_object
fluid
freeimage
fsb
fsc
gccdeps
go
gob2
halide
local_rpath
make
md5_tstamp
mem_reducer
midl
misc
msvcdeps
msvs
netcache_client
nobuild
objcopy
ocaml
package
parallel_debug
pch
pep8
pgicc
pgicxx
prefork
preforkjava
preforkunix
print_commands
proc
protoc
qnxnto
relocation
remote
resx
review
rst
run_do_script
run_m_script
run_py_script
run_r_script
sas
scala
slow_qt4
smart_continue
softlink_libs
stale
stracedeps
swig
syms
sync_exec
ticgt
unc
unity
use_config
valadoc
why
win32_opts
xcode
"

+ 0
- 75
waflib/Tools/asm.py View File

@@ -1,75 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2008-2010 (ita)

"""
Assembly support, used by tools such as gas and nasm

To declare targets using assembly::

def configure(conf):
conf.load('gcc gas')

def build(bld):
bld(
features='c cstlib asm',
source = 'test.S',
target = 'asmtest')

bld(
features='asm asmprogram',
source = 'test.S',
target = 'asmtest')

Support for pure asm programs and libraries should also work::

def configure(conf):
conf.load('nasm')
conf.find_program('ld', 'ASLINK')

def build(bld):
bld(
features='asm asmprogram',
source = 'test.S',
target = 'asmtest')
"""

import os, sys
from waflib import Task, Utils
import waflib.Task
from waflib.Tools.ccroot import link_task, stlink_task
from waflib.TaskGen import extension, feature

class asm(Task.Task):
"""
Compile asm files by gas/nasm/yasm/...
"""
color = 'BLUE'
run_str = '${AS} ${ASFLAGS} ${ASMPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${AS_SRC_F}${SRC} ${AS_TGT_F}${TGT}'

@extension('.s', '.S', '.asm', '.ASM', '.spp', '.SPP')
def asm_hook(self, node):
"""
Bind the asm extension to the asm task

:param node: input file
:type node: :py:class:`waflib.Node.Node`
"""
return self.create_compiled_task('asm', node)

class asmprogram(link_task):
"Link object files into a c program"
run_str = '${ASLINK} ${ASLINKFLAGS} ${ASLNK_TGT_F}${TGT} ${ASLNK_SRC_F}${SRC}'
ext_out = ['.bin']
inst_to = '${BINDIR}'

class asmshlib(asmprogram):
"Link object files into a c shared library"
inst_to = '${LIBDIR}'

class asmstlib(stlink_task):
"Link object files into a c static library"
pass # do not remove

def configure(conf):
conf.env['ASMPATH_ST'] = '-I%s'

+ 0
- 49
waflib/Tools/bison.py View File

@@ -1,49 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# John O'Meara, 2006
# Thomas Nagy 2009-2010 (ita)

"""
The **bison** program is a code generator which creates C or C++ files.
The generated files are compiled into object files.
"""

from waflib import Task
from waflib.TaskGen import extension

class bison(Task.Task):
"""Compile bison files"""
color = 'BLUE'
run_str = '${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}'
ext_out = ['.h'] # just to make sure

@extension('.y', '.yc', '.yy')
def big_bison(self, node):
"""
Create a bison task, which must be executed from the directory of the output file.
"""
has_h = '-d' in self.env['BISONFLAGS']

outs = []
if node.name.endswith('.yc'):
outs.append(node.change_ext('.tab.cc'))
if has_h:
outs.append(node.change_ext('.tab.hh'))
else:
outs.append(node.change_ext('.tab.c'))
if has_h:
outs.append(node.change_ext('.tab.h'))

tsk = self.create_task('bison', node, outs)
tsk.cwd = node.parent.get_bld().abspath()

# and the c/cxx file must be compiled too
self.source.append(outs[0])

def configure(conf):
"""
Detect the *bison* program
"""
conf.find_program('bison', var='BISON')
conf.env.BISONFLAGS = ['-d']


+ 0
- 79
waflib/Tools/compiler_d.py View File

@@ -1,79 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Carlos Rafael Giani, 2007 (dv)
# Thomas Nagy, 2010 (ita)

"""
Try to detect a D compiler from the list of supported compilers::

def options(opt):
opt.load('compiler_d')
def configure(cnf):
cnf.load('compiler_d')
def build(bld):
bld.program(source='main.d', target='app')

Only three D compilers are really present at the moment:

* gdc
* dmd, the ldc compiler having a very similar command-line interface
* ldc2
"""

import os, sys, imp, types, re
from waflib import Utils, Configure, Options, Logs

d_compiler = {
'default' : ['gdc', 'dmd', 'ldc2']
}
"""
Dict mapping the platform names to lists of names of D compilers to try, in order of preference::

from waflib.Tools.compiler_d import d_compiler
d_compiler['default'] = ['gdc', 'dmd', 'ldc2']
"""

def default_compilers():
build_platform = Utils.unversioned_sys_platform()
possible_compiler_list = d_compiler.get(build_platform, d_compiler['default'])
return ' '.join(possible_compiler_list)

def configure(conf):
"""
Try to find a suitable D compiler or raise a :py:class:`waflib.Errors.ConfigurationError`.
"""
try: test_for_compiler = conf.options.check_d_compiler or default_compilers()
except AttributeError: conf.fatal("Add options(opt): opt.load('compiler_d')")

for compiler in re.split('[ ,]+', test_for_compiler):
conf.env.stash()
conf.start_msg('Checking for %r (D compiler)' % compiler)
try:
conf.load(compiler)
except conf.errors.ConfigurationError as e:
conf.env.revert()
conf.end_msg(False)
Logs.debug('compiler_d: %r' % e)
else:
if conf.env.D:
conf.end_msg(conf.env.get_flat('D'))
conf.env['COMPILER_D'] = compiler
break
conf.end_msg(False)
else:
conf.fatal('could not configure a D compiler!')

def options(opt):
"""
Restrict the compiler detection from the command-line::

$ waf configure --check-d-compiler=dmd
"""
test_for_compiler = default_compilers()
d_compiler_opts = opt.add_option_group('Configuration options')
d_compiler_opts.add_option('--check-d-compiler', default=None,
help='list of D compilers to try [%s]' % test_for_compiler, dest='check_d_compiler')

for x in test_for_compiler.split():
opt.load('%s' % x)


+ 0
- 67
waflib/Tools/compiler_fc.py View File

@@ -1,67 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8

import os, sys, imp, types, re
from waflib import Utils, Configure, Options, Logs, Errors
from waflib.Tools import fc

fc_compiler = {
'win32' : ['gfortran','ifort'],
'darwin' : ['gfortran', 'g95', 'ifort'],
'linux' : ['gfortran', 'g95', 'ifort'],
'java' : ['gfortran', 'g95', 'ifort'],
'default': ['gfortran'],
'aix' : ['gfortran']
}
"""
Dict mapping the platform names to lists of names of Fortran compilers to try, in order of preference::

from waflib.Tools.compiler_c import c_compiler
c_compiler['linux'] = ['gfortran', 'g95', 'ifort']
"""

def default_compilers():
build_platform = Utils.unversioned_sys_platform()
possible_compiler_list = fc_compiler.get(build_platform, fc_compiler['default'])
return ' '.join(possible_compiler_list)

def configure(conf):
"""
Try to find a suitable Fortran compiler or raise a :py:class:`waflib.Errors.ConfigurationError`.
"""
try: test_for_compiler = conf.options.check_fortran_compiler or default_compilers()
except AttributeError: conf.fatal("Add options(opt): opt.load('compiler_fc')")
for compiler in re.split('[ ,]+', test_for_compiler):
conf.env.stash()
conf.start_msg('Checking for %r (Fortran compiler)' % compiler)
try:
conf.load(compiler)
except conf.errors.ConfigurationError as e:
conf.env.revert()
conf.end_msg(False)
Logs.debug('compiler_fortran: %r' % e)
else:
if conf.env['FC']:
conf.end_msg(conf.env.get_flat('FC'))
conf.env.COMPILER_FORTRAN = compiler
break
conf.end_msg(False)
else:
conf.fatal('could not configure a Fortran compiler!')

def options(opt):
"""
Restrict the compiler detection from the command-line::

$ waf configure --check-fortran-compiler=ifort
"""
test_for_compiler = default_compilers()
opt.load_special_tools('fc_*.py')
fortran_compiler_opts = opt.add_option_group('Configuration options')
fortran_compiler_opts.add_option('--check-fortran-compiler', default=None,
help='list of Fortran compiler to try [%s]' % test_for_compiler,
dest="check_fortran_compiler")

for x in test_for_compiler.split():
opt.load('%s' % x)


+ 0
- 222
waflib/Tools/cs.py View File

@@ -1,222 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)

"""
C# support. A simple example::

def configure(conf):
conf.load('cs')
def build(bld):
bld(features='cs', source='main.cs', gen='foo')

Note that the configuration may compile C# snippets::

FRAG = '''
namespace Moo {
public class Test { public static int Main(string[] args) { return 0; } }
}'''
def configure(conf):
conf.check(features='cs', fragment=FRAG, compile_filename='test.cs', gen='test.exe',
bintype='exe', csflags=['-pkg:gtk-sharp-2.0'], msg='Checking for Gtksharp support')
"""

from waflib import Utils, Task, Options, Logs, Errors
from waflib.TaskGen import before_method, after_method, feature
from waflib.Tools import ccroot
from waflib.Configure import conf
import os, tempfile

ccroot.USELIB_VARS['cs'] = set(['CSFLAGS', 'ASSEMBLIES', 'RESOURCES'])
ccroot.lib_patterns['csshlib'] = ['%s']

@feature('cs')
@before_method('process_source')
def apply_cs(self):
"""
Create a C# task bound to the attribute *cs_task*. There can be only one C# task by task generator.
"""
cs_nodes = []
no_nodes = []
for x in self.to_nodes(self.source):
if x.name.endswith('.cs'):
cs_nodes.append(x)
else:
no_nodes.append(x)
self.source = no_nodes

bintype = getattr(self, 'bintype', self.gen.endswith('.dll') and 'library' or 'exe')
self.cs_task = tsk = self.create_task('mcs', cs_nodes, self.path.find_or_declare(self.gen))
tsk.env.CSTYPE = '/target:%s' % bintype
tsk.env.OUT = '/out:%s' % tsk.outputs[0].abspath()
self.env.append_value('CSFLAGS', '/platform:%s' % getattr(self, 'platform', 'anycpu'))

inst_to = getattr(self, 'install_path', bintype=='exe' and '${BINDIR}' or '${LIBDIR}')
if inst_to:
# note: we are making a copy, so the files added to cs_task.outputs won't be installed automatically
mod = getattr(self, 'chmod', bintype=='exe' and Utils.O755 or Utils.O644)
self.install_task = self.bld.install_files(inst_to, self.cs_task.outputs[:], env=self.env, chmod=mod)

@feature('cs')
@after_method('apply_cs')
def use_cs(self):
"""
C# applications honor the **use** keyword::

def build(bld):
bld(features='cs', source='My.cs', bintype='library', gen='my.dll', name='mylib')
bld(features='cs', source='Hi.cs', includes='.', bintype='exe', gen='hi.exe', use='mylib', name='hi')
"""
names = self.to_list(getattr(self, 'use', []))
get = self.bld.get_tgen_by_name
for x in names:
try:
y = get(x)
except Errors.WafError:
self.env.append_value('CSFLAGS', '/reference:%s' % x)
continue
y.post()

tsk = getattr(y, 'cs_task', None) or getattr(y, 'link_task', None)
if not tsk:
self.bld.fatal('cs task has no link task for use %r' % self)
self.cs_task.dep_nodes.extend(tsk.outputs) # dependency
self.cs_task.set_run_after(tsk) # order (redundant, the order is infered from the nodes inputs/outputs)
self.env.append_value('CSFLAGS', '/reference:%s' % tsk.outputs[0].abspath())

@feature('cs')
@after_method('apply_cs', 'use_cs')
def debug_cs(self):
"""
The C# targets may create .mdb or .pdb files::

def build(bld):
bld(features='cs', source='My.cs', bintype='library', gen='my.dll', csdebug='full')
# csdebug is a value in (True, 'full', 'pdbonly')
"""
csdebug = getattr(self, 'csdebug', self.env.CSDEBUG)
if not csdebug:
return

node = self.cs_task.outputs[0]
if self.env.CS_NAME == 'mono':
out = node.parent.find_or_declare(node.name + '.mdb')
else:
out = node.change_ext('.pdb')
self.cs_task.outputs.append(out)
try:
self.install_task.source.append(out)
except AttributeError:
pass

if csdebug == 'pdbonly':
val = ['/debug+', '/debug:pdbonly']
elif csdebug == 'full':
val = ['/debug+', '/debug:full']
else:
val = ['/debug-']
self.env.append_value('CSFLAGS', val)


class mcs(Task.Task):
"""
Compile C# files
"""
color = 'YELLOW'
run_str = '${MCS} ${CSTYPE} ${CSFLAGS} ${ASS_ST:ASSEMBLIES} ${RES_ST:RESOURCES} ${OUT} ${SRC}'

def exec_command(self, cmd, **kw):
bld = self.generator.bld

try:
if not kw.get('cwd', None):
kw['cwd'] = bld.cwd
except AttributeError:
bld.cwd = kw['cwd'] = bld.variant_dir

try:
tmp = None
if isinstance(cmd, list) and len(' '.join(cmd)) >= 8192:
program = cmd[0] #unquoted program name, otherwise exec_command will fail
cmd = [self.quote_response_command(x) for x in cmd]
(fd, tmp) = tempfile.mkstemp()
os.write(fd, '\r\n'.join(i.replace('\\', '\\\\') for i in cmd[1:]).encode())
os.close(fd)
cmd = [program, '@' + tmp]
# no return here, that's on purpose
ret = self.generator.bld.exec_command(cmd, **kw)
finally:
if tmp:
try:
os.remove(tmp)
except OSError:
pass # anti-virus and indexers can keep the files open -_-
return ret

def quote_response_command(self, flag):
# /noconfig is not allowed when using response files
if flag.lower() == '/noconfig':
return ''

if flag.find(' ') > -1:
for x in ('/r:', '/reference:', '/resource:', '/lib:', '/out:'):
if flag.startswith(x):
flag = '%s"%s"' % (x, '","'.join(flag[len(x):].split(',')))
break
else:
flag = '"%s"' % flag
return flag

def configure(conf):
"""
Find a C# compiler, set the variable MCS for the compiler and CS_NAME (mono or csc)
"""
csc = getattr(Options.options, 'cscbinary', None)
if csc:
conf.env.MCS = csc
conf.find_program(['csc', 'mcs', 'gmcs'], var='MCS')
conf.env.ASS_ST = '/r:%s'
conf.env.RES_ST = '/resource:%s'

conf.env.CS_NAME = 'csc'
if str(conf.env.MCS).lower().find('mcs') > -1:
conf.env.CS_NAME = 'mono'

def options(opt):
"""
Add a command-line option for the configuration::

$ waf configure --with-csc-binary=/foo/bar/mcs
"""
opt.add_option('--with-csc-binary', type='string', dest='cscbinary')

class fake_csshlib(Task.Task):
"""
Task used for reading a foreign .net assembly and adding the dependency on it
"""
color = 'YELLOW'
inst_to = None

def runnable_status(self):
for x in self.outputs:
x.sig = Utils.h_file(x.abspath())
return Task.SKIP_ME

@conf
def read_csshlib(self, name, paths=[]):
"""
Read a foreign .net assembly for the *use* system::

def build(bld):
bld.read_csshlib('ManagedLibrary.dll', paths=[bld.env.mylibrarypath])
bld(features='cs', source='Hi.cs', bintype='exe', gen='hi.exe', use='ManagedLibrary.dll')

:param name: Name of the library
:type name: string
:param paths: Folders in which the library may be found
:type paths: list of string
:return: A task generator having the feature *fake_lib* which will call :py:func:`waflib.Tools.ccroot.process_lib`
:rtype: :py:class:`waflib.TaskGen.task_gen`
"""
return self(name=name, features='fake_lib', lib_paths=paths, lib_type='csshlib')


+ 0
- 97
waflib/Tools/d.py View File

@@ -1,97 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Carlos Rafael Giani, 2007 (dv)
# Thomas Nagy, 2007-2010 (ita)

from waflib import Utils, Task, Errors
from waflib.TaskGen import taskgen_method, feature, extension
from waflib.Tools import d_scan, d_config
from waflib.Tools.ccroot import link_task, stlink_task

class d(Task.Task):
"Compile a d file into an object file"
color = 'GREEN'
run_str = '${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_SRC_F:SRC} ${D_TGT_F:TGT}'
scan = d_scan.scan

class d_with_header(d):
"Compile a d file and generate a header"
run_str = '${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_HDR_F:tgt.outputs[1].bldpath()} ${D_SRC_F:SRC} ${D_TGT_F:tgt.outputs[0].bldpath()}'

class d_header(Task.Task):
"Compile d headers"
color = 'BLUE'
run_str = '${D} ${D_HEADER} ${SRC}'

class dprogram(link_task):
"Link object files into a d program"
run_str = '${D_LINKER} ${LINKFLAGS} ${DLNK_SRC_F}${SRC} ${DLNK_TGT_F:TGT} ${RPATH_ST:RPATH} ${DSTLIB_MARKER} ${DSTLIBPATH_ST:STLIBPATH} ${DSTLIB_ST:STLIB} ${DSHLIB_MARKER} ${DLIBPATH_ST:LIBPATH} ${DSHLIB_ST:LIB}'
inst_to = '${BINDIR}'

class dshlib(dprogram):
"Link object files into a d shared library"
inst_to = '${LIBDIR}'

class dstlib(stlink_task):
"Link object files into a d static library"
pass # do not remove

@extension('.d', '.di', '.D')
def d_hook(self, node):
"""
Compile *D* files. To get .di files as well as .o files, set the following::

def build(bld):
bld.program(source='foo.d', target='app', generate_headers=True)

"""
ext = Utils.destos_to_binfmt(self.env.DEST_OS) == 'pe' and 'obj' or 'o'
out = '%s.%d.%s' % (node.name, self.idx, ext)
def create_compiled_task(self, name, node):
task = self.create_task(name, node, node.parent.find_or_declare(out))
try:
self.compiled_tasks.append(task)
except AttributeError:
self.compiled_tasks = [task]
return task

if getattr(self, 'generate_headers', None):
tsk = create_compiled_task(self, 'd_with_header', node)
tsk.outputs.append(node.change_ext(self.env['DHEADER_ext']))
else:
tsk = create_compiled_task(self, 'd', node)
return tsk

@taskgen_method
def generate_header(self, filename):
"""
See feature request #104::

def build(bld):
tg = bld.program(source='foo.d', target='app')
tg.generate_header('blah.d')
# is equivalent to:
#tg = bld.program(source='foo.d', target='app', header_lst='blah.d')

:param filename: header to create
:type filename: string
"""
try:
self.header_lst.append([filename, self.install_path])
except AttributeError:
self.header_lst = [[filename, self.install_path]]

@feature('d')
def process_header(self):
"""
Process the attribute 'header_lst' to create the d header compilation tasks::

def build(bld):
bld.program(source='foo.d', target='app', header_lst='blah.d')
"""
for i in getattr(self, 'header_lst', []):
node = self.path.find_resource(i[0])
if not node:
raise Errors.WafError('file %r not found on d obj' % i[0])
self.create_task('d_header', node, node.change_ext('.di'))


+ 0
- 63
waflib/Tools/d_config.py View File

@@ -1,63 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2010 (ita)

from waflib import Utils
from waflib.Configure import conf

@conf
def d_platform_flags(self):
"""
Set the extensions dll/so for d programs and libraries
"""
v = self.env
if not v.DEST_OS:
v.DEST_OS = Utils.unversioned_sys_platform()
binfmt = Utils.destos_to_binfmt(self.env.DEST_OS)
if binfmt == 'pe':
v['dprogram_PATTERN'] = '%s.exe'
v['dshlib_PATTERN'] = 'lib%s.dll'
v['dstlib_PATTERN'] = 'lib%s.a'
elif binfmt == 'mac-o':
v['dprogram_PATTERN'] = '%s'
v['dshlib_PATTERN'] = 'lib%s.dylib'
v['dstlib_PATTERN'] = 'lib%s.a'
else:
v['dprogram_PATTERN'] = '%s'
v['dshlib_PATTERN'] = 'lib%s.so'
v['dstlib_PATTERN'] = 'lib%s.a'

DLIB = '''
version(D_Version2) {
import std.stdio;
int main() {
writefln("phobos2");
return 0;
}
} else {
version(Tango) {
import tango.stdc.stdio;
int main() {
printf("tango");
return 0;
}
} else {
import std.stdio;
int main() {
writefln("phobos1");
return 0;
}
}
}
'''
"""Detection string for the D standard library"""

@conf
def check_dlibrary(self, execute=True):
"""
Detect the kind of standard library that comes with the compiler, will set conf.env.DLIBRARY to tango, phobos1 or phobos2.
"""
ret = self.check_cc(features='d dprogram', fragment=DLIB, compile_filename='test.d', execute=execute, define_ret=True)
if execute:
self.env.DLIBRARY = ret.strip()


+ 0
- 209
waflib/Tools/d_scan.py View File

@@ -1,209 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2010 (ita)

"""
Provide a scanner for finding dependencies on d files
"""

import re
from waflib import Utils, Logs

def filter_comments(filename):
"""
:param filename: d file name
:type filename: string
:rtype: list
:return: a list of characters
"""
txt = Utils.readf(filename)
i = 0
buf = []
max = len(txt)
begin = 0
while i < max:
c = txt[i]
if c == '"' or c == "'": # skip a string or character literal
buf.append(txt[begin:i])
delim = c
i += 1
while i < max:
c = txt[i]
if c == delim: break
elif c == '\\': # skip the character following backslash
i += 1
i += 1
i += 1
begin = i
elif c == '/': # try to replace a comment with whitespace
buf.append(txt[begin:i])
i += 1
if i == max: break
c = txt[i]
if c == '+': # eat nesting /+ +/ comment
i += 1
nesting = 1
c = None
while i < max:
prev = c
c = txt[i]
if prev == '/' and c == '+':
nesting += 1
c = None
elif prev == '+' and c == '/':
nesting -= 1
if nesting == 0: break
c = None
i += 1
elif c == '*': # eat /* */ comment
i += 1
c = None
while i < max:
prev = c
c = txt[i]
if prev == '*' and c == '/': break
i += 1
elif c == '/': # eat // comment
i += 1
while i < max and txt[i] != '\n':
i += 1
else: # no comment
begin = i - 1
continue
i += 1
begin = i
buf.append(' ')
else:
i += 1
buf.append(txt[begin:])
return buf

class d_parser(object):
"""
Parser for d files
"""
def __init__(self, env, incpaths):
#self.code = ''
#self.module = ''
#self.imports = []

self.allnames = []

self.re_module = re.compile("module\s+([^;]+)")
self.re_import = re.compile("import\s+([^;]+)")
self.re_import_bindings = re.compile("([^:]+):(.*)")
self.re_import_alias = re.compile("[^=]+=(.+)")

self.env = env

self.nodes = []
self.names = []

self.incpaths = incpaths

def tryfind(self, filename):
"""
Search file a file matching an module/import directive

:param filename: file to read
:type filename: string
"""
found = 0
for n in self.incpaths:
found = n.find_resource(filename.replace('.', '/') + '.d')
if found:
self.nodes.append(found)
self.waiting.append(found)
break
if not found:
if not filename in self.names:
self.names.append(filename)

def get_strings(self, code):
"""
:param code: d code to parse
:type code: string
:return: the modules that the code uses
:rtype: a list of match objects
"""
#self.imports = []
self.module = ''
lst = []

# get the module name (if present)

mod_name = self.re_module.search(code)
if mod_name:
self.module = re.sub('\s+', '', mod_name.group(1)) # strip all whitespaces

# go through the code, have a look at all import occurrences

# first, lets look at anything beginning with "import" and ending with ";"
import_iterator = self.re_import.finditer(code)
if import_iterator:
for import_match in import_iterator:
import_match_str = re.sub('\s+', '', import_match.group(1)) # strip all whitespaces

# does this end with an import bindings declaration?
# (import bindings always terminate the list of imports)
bindings_match = self.re_import_bindings.match(import_match_str)
if bindings_match:
import_match_str = bindings_match.group(1)
# if so, extract the part before the ":" (since the module declaration(s) is/are located there)

# split the matching string into a bunch of strings, separated by a comma
matches = import_match_str.split(',')

for match in matches:
alias_match = self.re_import_alias.match(match)
if alias_match:
# is this an alias declaration? (alias = module name) if so, extract the module name
match = alias_match.group(1)

lst.append(match)
return lst

def start(self, node):
"""
The parsing starts here

:param node: input file
:type node: :py:class:`waflib.Node.Node`
"""
self.waiting = [node]
# while the stack is not empty, add the dependencies
while self.waiting:
nd = self.waiting.pop(0)
self.iter(nd)

def iter(self, node):
"""
Find all the modules that a file depends on, uses :py:meth:`waflib.Tools.d_scan.d_parser.tryfind` to process dependent files

:param node: input file
:type node: :py:class:`waflib.Node.Node`
"""
path = node.abspath() # obtain the absolute path
code = "".join(filter_comments(path)) # read the file and filter the comments
names = self.get_strings(code) # obtain the import strings
for x in names:
# optimization
if x in self.allnames: continue
self.allnames.append(x)

# for each name, see if it is like a node or not
self.tryfind(x)

def scan(self):
"look for .d/.di used by a d file"
env = self.env
gruik = d_parser(env, self.generator.includes_nodes)
node = self.inputs[0]
gruik.start(node)
nodes = gruik.nodes
names = gruik.names

if Logs.verbose:
Logs.debug('deps: deps for %s: %r; unresolved %r' % (str(node), nodes, names))
return (nodes, names)


+ 0
- 70
waflib/Tools/dbus.py View File

@@ -1,70 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Ali Sabil, 2007

"""
Compile dbus files with **dbus-binding-tool**

Typical usage::

def options(opt):
opt.load('compiler_c dbus')
def configure(conf):
conf.load('compiler_c dbus')
def build(bld):
tg = bld.program(
includes = '.',
source = bld.path.ant_glob('*.c'),
target = 'gnome-hello')
tg.add_dbus_file('test.xml', 'test_prefix', 'glib-server')
"""

from waflib import Task, Errors
from waflib.TaskGen import taskgen_method, before_method

@taskgen_method
def add_dbus_file(self, filename, prefix, mode):
"""
Add a dbus file to the list of dbus files to process. Store them in the attribute *dbus_lst*.

:param filename: xml file to compile
:type filename: string
:param prefix: dbus binding tool prefix (--prefix=prefix)
:type prefix: string
:param mode: dbus binding tool mode (--mode=mode)
:type mode: string
"""
if not hasattr(self, 'dbus_lst'):
self.dbus_lst = []
if not 'process_dbus' in self.meths:
self.meths.append('process_dbus')
self.dbus_lst.append([filename, prefix, mode])

@before_method('apply_core')
def process_dbus(self):
"""
Process the dbus files stored in the attribute *dbus_lst* to create :py:class:`waflib.Tools.dbus.dbus_binding_tool` instances.
"""
for filename, prefix, mode in getattr(self, 'dbus_lst', []):
node = self.path.find_resource(filename)
if not node:
raise Errors.WafError('file not found ' + filename)
tsk = self.create_task('dbus_binding_tool', node, node.change_ext('.h'))
tsk.env.DBUS_BINDING_TOOL_PREFIX = prefix
tsk.env.DBUS_BINDING_TOOL_MODE = mode

class dbus_binding_tool(Task.Task):
"""
Compile a dbus file
"""
color = 'BLUE'
ext_out = ['.h']
run_str = '${DBUS_BINDING_TOOL} --prefix=${DBUS_BINDING_TOOL_PREFIX} --mode=${DBUS_BINDING_TOOL_MODE} --output=${TGT} ${SRC}'
shell = True # temporary workaround for #795

def configure(conf):
"""
Detect the program dbus-binding-tool and set the *conf.env.DBUS_BINDING_TOOL*
"""
dbus_binding_tool = conf.find_program('dbus-binding-tool', var='DBUS_BINDING_TOOL')


+ 0
- 88
waflib/Tools/dmd.py View File

@@ -1,88 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Carlos Rafael Giani, 2007 (dv)
# Thomas Nagy, 2008-2010 (ita)

import sys
from waflib.Tools import ar, d
from waflib.Configure import conf

@conf
def find_dmd(conf):
"""
Find the program *dmd*, *dmd2*, or *ldc* and set the variable *D*
"""
conf.find_program(['dmd', 'dmd2', 'ldc'], var='D')

# make sure that we're dealing with dmd1, dmd2, or ldc(1)
out = conf.cmd_and_log(conf.env.D + ['--help'])
if out.find("D Compiler v") == -1:
out = conf.cmd_and_log(conf.env.D + ['-version'])
if out.find("based on DMD v1.") == -1:
conf.fatal("detected compiler is not dmd/ldc")

@conf
def common_flags_ldc(conf):
"""
Set the D flags required by *ldc*
"""
v = conf.env
v['DFLAGS'] = ['-d-version=Posix']
v['LINKFLAGS'] = []
v['DFLAGS_dshlib'] = ['-relocation-model=pic']

@conf
def common_flags_dmd(conf):
"""
Set the flags required by *dmd* or *dmd2*
"""

v = conf.env

# _DFLAGS _DIMPORTFLAGS

# Compiler is dmd so 'gdc' part will be ignored, just
# ensure key is there, so wscript can append flags to it
#v['DFLAGS'] = ['-version=Posix']

v['D_SRC_F'] = ['-c']
v['D_TGT_F'] = '-of%s'

# linker
v['D_LINKER'] = v['D']
v['DLNK_SRC_F'] = ''
v['DLNK_TGT_F'] = '-of%s'
v['DINC_ST'] = '-I%s'

v['DSHLIB_MARKER'] = v['DSTLIB_MARKER'] = ''
v['DSTLIB_ST'] = v['DSHLIB_ST'] = '-L-l%s'
v['DSTLIBPATH_ST'] = v['DLIBPATH_ST'] = '-L-L%s'

v['LINKFLAGS_dprogram']= ['-quiet']

v['DFLAGS_dshlib'] = ['-fPIC']
v['LINKFLAGS_dshlib'] = ['-L-shared']

v['DHEADER_ext'] = '.di'
v.DFLAGS_d_with_header = ['-H', '-Hf']
v['D_HDR_F'] = '%s'

def configure(conf):
"""
Configuration for *dmd*, *dmd2*, and *ldc*
"""
conf.find_dmd()

if sys.platform == 'win32':
out = conf.cmd_and_log(conf.env.D + ['--help'])
if out.find("D Compiler v2.") > -1:
conf.fatal('dmd2 on Windows is not supported, use gdc or ldc2 instead')

conf.load('ar')
conf.load('d')
conf.common_flags_dmd()
conf.d_platform_flags()

if str(conf.env.D).find('ldc') > -1:
conf.common_flags_ldc()


+ 0
- 198
waflib/Tools/fc.py View File

@@ -1,198 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# DC 2008
# Thomas Nagy 2010 (ita)

"""
fortran support
"""

from waflib import Utils, Task, Logs
from waflib.Tools import ccroot, fc_config, fc_scan
from waflib.TaskGen import feature, extension
from waflib.Configure import conf

ccroot.USELIB_VARS['fc'] = set(['FCFLAGS', 'DEFINES', 'INCLUDES'])
ccroot.USELIB_VARS['fcprogram_test'] = ccroot.USELIB_VARS['fcprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS'])
ccroot.USELIB_VARS['fcshlib'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS'])
ccroot.USELIB_VARS['fcstlib'] = set(['ARFLAGS', 'LINKDEPS'])

@feature('fcprogram', 'fcshlib', 'fcstlib', 'fcprogram_test')
def dummy(self):
"""
Unused function that does nothing (TODO: remove in waf 1.9)
"""
pass

@extension('.f', '.f90', '.F', '.F90', '.for', '.FOR')
def fc_hook(self, node):
"Bind the typical Fortran file extensions to the creation of a :py:class:`waflib.Tools.fc.fc` instance"
return self.create_compiled_task('fc', node)

@conf
def modfile(conf, name):
"""
Turn a module name into the right module file name.
Defaults to all lower case.
"""
return {'lower' :name.lower() + '.mod',
'lower.MOD' :name.upper() + '.MOD',
'UPPER.mod' :name.upper() + '.mod',
'UPPER' :name.upper() + '.MOD'}[conf.env.FC_MOD_CAPITALIZATION or 'lower']

def get_fortran_tasks(tsk):
"""
Obtain all other fortran tasks from the same build group. Those tasks must not have
the attribute 'nomod' or 'mod_fortran_done'
"""
bld = tsk.generator.bld
tasks = bld.get_tasks_group(bld.get_group_idx(tsk.generator))
return [x for x in tasks if isinstance(x, fc) and not getattr(x, 'nomod', None) and not getattr(x, 'mod_fortran_done', None)]

class fc(Task.Task):
"""
The fortran tasks can only run when all fortran tasks in the current group are ready to be executed
This may cause a deadlock if another fortran task is waiting for something that cannot happen (circular dependency)
in this case, set the 'nomod=True' on those tasks instances to break the loop
"""

color = 'GREEN'
run_str = '${FC} ${FCFLAGS} ${FCINCPATH_ST:INCPATHS} ${FCDEFINES_ST:DEFINES} ${_FCMODOUTFLAGS} ${FC_TGT_F}${TGT[0].abspath()} ${FC_SRC_F}${SRC[0].abspath()}'
vars = ["FORTRANMODPATHFLAG"]

def scan(self):
"""scanner for fortran dependencies"""
tmp = fc_scan.fortran_parser(self.generator.includes_nodes)
tmp.task = self
tmp.start(self.inputs[0])
if Logs.verbose:
Logs.debug('deps: deps for %r: %r; unresolved %r' % (self.inputs, tmp.nodes, tmp.names))
return (tmp.nodes, tmp.names)

def runnable_status(self):
"""
Set the mod file outputs and the dependencies on the mod files over all the fortran tasks
executed by the main thread so there are no concurrency issues
"""
if getattr(self, 'mod_fortran_done', None):
return super(fc, self).runnable_status()

# now, if we reach this part it is because this fortran task is the first in the list
bld = self.generator.bld

# obtain the fortran tasks
lst = get_fortran_tasks(self)

# disable this method for other tasks
for tsk in lst:
tsk.mod_fortran_done = True

# wait for all the .f tasks to be ready for execution
# and ensure that the scanners are called at least once
for tsk in lst:
ret = tsk.runnable_status()
if ret == Task.ASK_LATER:
# we have to wait for one of the other fortran tasks to be ready
# this may deadlock if there are dependencies between the fortran tasks
# but this should not happen (we are setting them here!)
for x in lst:
x.mod_fortran_done = None

# TODO sort the list of tasks in bld.producer.outstanding to put all fortran tasks at the end
return Task.ASK_LATER

ins = Utils.defaultdict(set)
outs = Utils.defaultdict(set)

# the .mod files to create
for tsk in lst:
key = tsk.uid()
for x in bld.raw_deps[key]:
if x.startswith('MOD@'):
name = bld.modfile(x.replace('MOD@', ''))
node = bld.srcnode.find_or_declare(name)
if not hasattr(node, 'sig'):
node.sig = Utils.SIG_NIL
tsk.set_outputs(node)
outs[id(node)].add(tsk)

# the .mod files to use
for tsk in lst:
key = tsk.uid()
for x in bld.raw_deps[key]:
if x.startswith('USE@'):
name = bld.modfile(x.replace('USE@', ''))
node = bld.srcnode.find_resource(name)
if node and node not in tsk.outputs:
if not node in bld.node_deps[key]:
bld.node_deps[key].append(node)
ins[id(node)].add(tsk)

# if the intersection matches, set the order
for k in ins.keys():
for a in ins[k]:
a.run_after.update(outs[k])

# the scanner cannot output nodes, so we have to set them
# ourselves as task.dep_nodes (additional input nodes)
tmp = []
for t in outs[k]:
tmp.extend(t.outputs)
a.dep_nodes.extend(tmp)
a.dep_nodes.sort(key=lambda x: x.abspath())

# the task objects have changed: clear the signature cache
for tsk in lst:
try:
delattr(tsk, 'cache_sig')
except AttributeError:
pass

return super(fc, self).runnable_status()

class fcprogram(ccroot.link_task):
"""Link fortran programs"""
color = 'YELLOW'
run_str = '${FC} ${LINKFLAGS} ${FCLNK_SRC_F}${SRC} ${FCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FCSTLIB_MARKER} ${FCSTLIBPATH_ST:STLIBPATH} ${FCSTLIB_ST:STLIB} ${FCSHLIB_MARKER} ${FCLIBPATH_ST:LIBPATH} ${FCLIB_ST:LIB} ${LDFLAGS}'
inst_to = '${BINDIR}'

class fcshlib(fcprogram):
"""Link fortran libraries"""
inst_to = '${LIBDIR}'

class fcprogram_test(fcprogram):
"""Custom link task to obtain the compiler outputs for fortran configuration tests"""

def runnable_status(self):
"""This task is always executed"""
ret = super(fcprogram_test, self).runnable_status()
if ret == Task.SKIP_ME:
ret = Task.RUN_ME
return ret

def exec_command(self, cmd, **kw):
"""Store the compiler std our/err onto the build context, to bld.out + bld.err"""
bld = self.generator.bld

kw['shell'] = isinstance(cmd, str)
kw['stdout'] = kw['stderr'] = Utils.subprocess.PIPE
kw['cwd'] = bld.variant_dir
bld.out = bld.err = ''

bld.to_log('command: %s\n' % cmd)

kw['output'] = 0
try:
(bld.out, bld.err) = bld.cmd_and_log(cmd, **kw)
except Exception:
return -1

if bld.out:
bld.to_log("out: %s\n" % bld.out)
if bld.err:
bld.to_log("err: %s\n" % bld.err)

class fcstlib(ccroot.stlink_task):
"""Link fortran static libraries (uses ar by default)"""
pass # do not remove the pass statement


+ 0
- 470
waflib/Tools/fc_config.py View File

@@ -1,470 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# DC 2008
# Thomas Nagy 2010 (ita)

"""
Fortran configuration helpers
"""

import re, os, sys, shlex
from waflib.Configure import conf
from waflib.TaskGen import feature, before_method
from waflib import Utils

FC_FRAGMENT = ' program main\n end program main\n'
FC_FRAGMENT2 = ' PROGRAM MAIN\n END\n' # what's the actual difference between these?

@conf
def fc_flags(conf):
"""
Define common fortran configuration flags and file extensions
"""
v = conf.env

v['FC_SRC_F'] = []
v['FC_TGT_F'] = ['-c', '-o']
v['FCINCPATH_ST'] = '-I%s'
v['FCDEFINES_ST'] = '-D%s'

if not v['LINK_FC']: v['LINK_FC'] = v['FC']
v['FCLNK_SRC_F'] = []
v['FCLNK_TGT_F'] = ['-o']

v['FCFLAGS_fcshlib'] = ['-fpic']
v['LINKFLAGS_fcshlib'] = ['-shared']
v['fcshlib_PATTERN'] = 'lib%s.so'

v['fcstlib_PATTERN'] = 'lib%s.a'

v['FCLIB_ST'] = '-l%s'
v['FCLIBPATH_ST'] = '-L%s'
v['FCSTLIB_ST'] = '-l%s'
v['FCSTLIBPATH_ST'] = '-L%s'
v['FCSTLIB_MARKER'] = '-Wl,-Bstatic'
v['FCSHLIB_MARKER'] = '-Wl,-Bdynamic'

v['SONAME_ST'] = '-Wl,-h,%s'

@conf
def fc_add_flags(conf):
"""
Add FCFLAGS / LDFLAGS / LINKFLAGS from os.environ to conf.env
"""
conf.add_os_flags('FCFLAGS')
conf.add_os_flags('LINKFLAGS')
conf.add_os_flags('LDFLAGS')

@conf
def check_fortran(self, *k, **kw):
"""See if the fortran compiler works by compiling a simple fortran program"""
self.check_cc(
fragment = FC_FRAGMENT,
compile_filename = 'test.f',
features = 'fc fcprogram',
msg = 'Compiling a simple fortran app')

@conf
def check_fc(self, *k, **kw):
"""
Same as :py:func:`waflib.Tools.c_config.check` but default to the *Fortran* programming language
(Overriding the C defaults in :py:func:`waflib.Tools.c_config.validate_c` here)
"""
kw['compiler'] = 'fc'
if not 'compile_mode' in kw:
kw['compile_mode'] = 'fc'
if not 'type' in kw:
kw['type'] = 'fcprogram'
if not 'compile_filename' in kw:
kw['compile_filename'] = 'test.f90'
if not 'code' in kw:
kw['code'] = FC_FRAGMENT
return self.check(*k, **kw)

# ------------------------------------------------------------------------
# --- These are the default platform modifiers, refactored here for
# convenience. gfortran and g95 have much overlap.
# ------------------------------------------------------------------------

@conf
def fortran_modifier_darwin(conf):
"""
Define fortran flags and extensions for the OSX systems
"""
v = conf.env
v['FCFLAGS_fcshlib'] = ['-fPIC']
v['LINKFLAGS_fcshlib'] = ['-dynamiclib', '-Wl,-compatibility_version,1', '-Wl,-current_version,1']
v['fcshlib_PATTERN'] = 'lib%s.dylib'
v['FRAMEWORKPATH_ST'] = '-F%s'
v['FRAMEWORK_ST'] = '-framework %s'

v['LINKFLAGS_fcstlib'] = []

v['FCSHLIB_MARKER'] = ''
v['FCSTLIB_MARKER'] = ''
v['SONAME_ST'] = ''


@conf
def fortran_modifier_win32(conf):
"""Define fortran flags for the windows platforms"""
v = conf.env
v['fcprogram_PATTERN'] = v['fcprogram_test_PATTERN'] = '%s.exe'

v['fcshlib_PATTERN'] = '%s.dll'
v['implib_PATTERN'] = 'lib%s.dll.a'
v['IMPLIB_ST'] = '-Wl,--out-implib,%s'

v['FCFLAGS_fcshlib'] = []

v.append_value('FCFLAGS_fcshlib', ['-DDLL_EXPORT']) # TODO adding nonstandard defines like this DLL_EXPORT is not a good idea

# Auto-import is enabled by default even without this option,
# but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
# that the linker emits otherwise.
v.append_value('LINKFLAGS', ['-Wl,--enable-auto-import'])

@conf
def fortran_modifier_cygwin(conf):
"""Define fortran flags for use on cygwin"""
fortran_modifier_win32(conf)
v = conf.env
v['fcshlib_PATTERN'] = 'cyg%s.dll'
v.append_value('LINKFLAGS_fcshlib', ['-Wl,--enable-auto-image-base'])
v['FCFLAGS_fcshlib'] = []
# ------------------------------------------------------------------------

@conf
def check_fortran_dummy_main(self, *k, **kw):
"""
Guess if a main function is needed by compiling a code snippet with
the C compiler and link with the Fortran compiler

TODO: (DC)
- handling dialects (F77, F90, etc... -> needs core support first)
- fix dummy main check (AC_FC_DUMMY_MAIN vs AC_FC_MAIN)

TODO: what does the above mean? (ita)
"""

if not self.env.CC:
self.fatal('A c compiler is required for check_fortran_dummy_main')

lst = ['MAIN__', '__MAIN', '_MAIN', 'MAIN_', 'MAIN']
lst.extend([m.lower() for m in lst])
lst.append('')

self.start_msg('Detecting whether we need a dummy main')
for main in lst:
kw['fortran_main'] = main
try:
self.check_cc(
fragment = 'int %s() { return 0; }\n' % (main or 'test'),
features = 'c fcprogram',
mandatory = True
)
if not main:
self.env.FC_MAIN = -1
self.end_msg('no')
else:
self.env.FC_MAIN = main
self.end_msg('yes %s' % main)
break
except self.errors.ConfigurationError:
pass
else:
self.end_msg('not found')
self.fatal('could not detect whether fortran requires a dummy main, see the config.log')

# ------------------------------------------------------------------------

GCC_DRIVER_LINE = re.compile('^Driving:')
POSIX_STATIC_EXT = re.compile('\S+\.a')
POSIX_LIB_FLAGS = re.compile('-l\S+')

@conf
def is_link_verbose(self, txt):
"""Return True if 'useful' link options can be found in txt"""
assert isinstance(txt, str)
for line in txt.splitlines():
if not GCC_DRIVER_LINE.search(line):
if POSIX_STATIC_EXT.search(line) or POSIX_LIB_FLAGS.search(line):
return True
return False

@conf
def check_fortran_verbose_flag(self, *k, **kw):
"""
Check what kind of verbose (-v) flag works, then set it to env.FC_VERBOSE_FLAG
"""
self.start_msg('fortran link verbose flag')
for x in ('-v', '--verbose', '-verbose', '-V'):
try:
self.check_cc(
features = 'fc fcprogram_test',
fragment = FC_FRAGMENT2,
compile_filename = 'test.f',
linkflags = [x],
mandatory=True
)
except self.errors.ConfigurationError:
pass
else:
# output is on stderr or stdout (for xlf)
if self.is_link_verbose(self.test_bld.err) or self.is_link_verbose(self.test_bld.out):
self.end_msg(x)
break
else:
self.end_msg('failure')
self.fatal('Could not obtain the fortran link verbose flag (see config.log)')

self.env.FC_VERBOSE_FLAG = x
return x

# ------------------------------------------------------------------------

# linkflags which match those are ignored
LINKFLAGS_IGNORED = [r'-lang*', r'-lcrt[a-zA-Z0-9\.]*\.o', r'-lc$', r'-lSystem', r'-libmil', r'-LIST:*', r'-LNO:*']
if os.name == 'nt':
LINKFLAGS_IGNORED.extend([r'-lfrt*', r'-luser32', r'-lkernel32', r'-ladvapi32', r'-lmsvcrt', r'-lshell32', r'-lmingw', r'-lmoldname'])
else:
LINKFLAGS_IGNORED.append(r'-lgcc*')
RLINKFLAGS_IGNORED = [re.compile(f) for f in LINKFLAGS_IGNORED]

def _match_ignore(line):
"""Returns True if the line should be ignored (fortran test for verbosity)."""
for i in RLINKFLAGS_IGNORED:
if i.match(line):
return True
return False

def parse_fortran_link(lines):
"""Given the output of verbose link of Fortran compiler, this returns a
list of flags necessary for linking using the standard linker."""
# TODO: On windows ?
final_flags = []
for line in lines:
if not GCC_DRIVER_LINE.match(line):
_parse_flink_line(line, final_flags)
return final_flags

SPACE_OPTS = re.compile('^-[LRuYz]$')
NOSPACE_OPTS = re.compile('^-[RL]')

def _parse_flink_token(lexer, token, tmp_flags):
# Here we go (convention for wildcard is shell, not regex !)
# 1 TODO: we first get some root .a libraries
# 2 TODO: take everything starting by -bI:*
# 3 Ignore the following flags: -lang* | -lcrt*.o | -lc |
# -lgcc* | -lSystem | -libmil | -LANG:=* | -LIST:* | -LNO:*)
# 4 take into account -lkernel32
# 5 For options of the kind -[[LRuYz]], as they take one argument
# after, the actual option is the next token
# 6 For -YP,*: take and replace by -Larg where arg is the old
# argument
# 7 For -[lLR]*: take

# step 3
if _match_ignore(token):
pass
# step 4
elif token.startswith('-lkernel32') and sys.platform == 'cygwin':
tmp_flags.append(token)
# step 5
elif SPACE_OPTS.match(token):
t = lexer.get_token()
if t.startswith('P,'):
t = t[2:]
for opt in t.split(os.pathsep):
tmp_flags.append('-L%s' % opt)
# step 6
elif NOSPACE_OPTS.match(token):
tmp_flags.append(token)
# step 7
elif POSIX_LIB_FLAGS.match(token):
tmp_flags.append(token)
else:
# ignore anything not explicitely taken into account
pass

t = lexer.get_token()
return t

def _parse_flink_line(line, final_flags):
"""private"""
lexer = shlex.shlex(line, posix = True)
lexer.whitespace_split = True

t = lexer.get_token()
tmp_flags = []
while t:
t = _parse_flink_token(lexer, t, tmp_flags)

final_flags.extend(tmp_flags)
return final_flags

@conf
def check_fortran_clib(self, autoadd=True, *k, **kw):
"""
Obtain the flags for linking with the C library
if this check works, add uselib='CLIB' to your task generators
"""
if not self.env.FC_VERBOSE_FLAG:
self.fatal('env.FC_VERBOSE_FLAG is not set: execute check_fortran_verbose_flag?')

self.start_msg('Getting fortran runtime link flags')
try:
self.check_cc(
fragment = FC_FRAGMENT2,
compile_filename = 'test.f',
features = 'fc fcprogram_test',
linkflags = [self.env.FC_VERBOSE_FLAG]
)
except Exception:
self.end_msg(False)
if kw.get('mandatory', True):
conf.fatal('Could not find the c library flags')
else:
out = self.test_bld.err
flags = parse_fortran_link(out.splitlines())
self.end_msg('ok (%s)' % ' '.join(flags))
self.env.LINKFLAGS_CLIB = flags
return flags
return []

def getoutput(conf, cmd, stdin=False):
"""
TODO a bit redundant, can be removed anytime
"""
if stdin:
stdin = Utils.subprocess.PIPE
else:
stdin = None
env = conf.env.env or None
try:
p = Utils.subprocess.Popen(cmd, stdin=stdin, stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE, env=env)
if stdin:
p.stdin.write('\n'.encode())
out, err = p.communicate()
except Exception:
conf.fatal('could not determine the compiler version %r' % cmd)
if not isinstance(out, str):
out = out.decode(sys.stdout.encoding or 'iso8859-1')
if not isinstance(err, str):
err = err.decode(sys.stdout.encoding or 'iso8859-1')
return (out, err)

# ------------------------------------------------------------------------

ROUTINES_CODE = """\
subroutine foobar()
return
end
subroutine foo_bar()
return
end
"""

MAIN_CODE = """
void %(dummy_func_nounder)s(void);
void %(dummy_func_under)s(void);
int %(main_func_name)s() {
%(dummy_func_nounder)s();
%(dummy_func_under)s();
return 0;
}
"""

@feature('link_main_routines_func')
@before_method('process_source')
def link_main_routines_tg_method(self):
"""
The configuration test declares a unique task generator,
so we create other task generators from there for fortran link tests
"""
def write_test_file(task):
task.outputs[0].write(task.generator.code)
bld = self.bld
bld(rule=write_test_file, target='main.c', code=MAIN_CODE % self.__dict__)
bld(rule=write_test_file, target='test.f', code=ROUTINES_CODE)
bld(features='fc fcstlib', source='test.f', target='test')
bld(features='c fcprogram', source='main.c', target='app', use='test')

def mangling_schemes():
"""
Generate triplets for use with mangle_name
(used in check_fortran_mangling)
the order is tuned for gfortan
"""
for u in ('_', ''):
for du in ('', '_'):
for c in ("lower", "upper"):
yield (u, du, c)

def mangle_name(u, du, c, name):
"""Mangle a name from a triplet (used in check_fortran_mangling)"""
return getattr(name, c)() + u + (name.find('_') != -1 and du or '')

@conf
def check_fortran_mangling(self, *k, **kw):
"""
Detect the mangling scheme, sets FORTRAN_MANGLING to the triplet found

This test will compile a fortran static library, then link a c app against it
"""
if not self.env.CC:
self.fatal('A c compiler is required for link_main_routines')
if not self.env.FC:
self.fatal('A fortran compiler is required for link_main_routines')
if not self.env.FC_MAIN:
self.fatal('Checking for mangling requires self.env.FC_MAIN (execute "check_fortran_dummy_main" first?)')

self.start_msg('Getting fortran mangling scheme')
for (u, du, c) in mangling_schemes():
try:
self.check_cc(
compile_filename = [],
features = 'link_main_routines_func',
msg = 'nomsg',
errmsg = 'nomsg',
mandatory=True,
dummy_func_nounder = mangle_name(u, du, c, "foobar"),
dummy_func_under = mangle_name(u, du, c, "foo_bar"),
main_func_name = self.env.FC_MAIN
)
except self.errors.ConfigurationError:
pass
else:
self.end_msg("ok ('%s', '%s', '%s-case')" % (u, du, c))
self.env.FORTRAN_MANGLING = (u, du, c)
break
else:
self.end_msg(False)
self.fatal('mangler not found')

return (u, du, c)

@feature('pyext')
@before_method('propagate_uselib_vars', 'apply_link')
def set_lib_pat(self):
"""Set the fortran flags for linking with the python library"""
self.env['fcshlib_PATTERN'] = self.env['pyext_PATTERN']

@conf
def detect_openmp(self):
for x in ('-fopenmp','-openmp','-mp','-xopenmp','-omp','-qsmp=omp'):
try:
self.check_fc(
msg='Checking for OpenMP flag %s' % x,
fragment='program main\n call omp_get_num_threads()\nend program main',
fcflags=x,
linkflags=x,
uselib_store='OPENMP'
)
except self.errors.ConfigurationError:
pass
else:
break
else:
self.fatal('Could not find OpenMP')


+ 0
- 121
waflib/Tools/fc_scan.py View File

@@ -1,121 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# DC 2008
# Thomas Nagy 2010 (ita)

import re

from waflib import Utils, Task, TaskGen, Logs
from waflib.TaskGen import feature, before_method, after_method, extension
from waflib.Configure import conf

INC_REGEX = """(?:^|['">]\s*;)\s*(?:|#\s*)INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])"""
USE_REGEX = """(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"""
MOD_REGEX = """(?:^|;)\s*MODULE(?!\s*PROCEDURE)(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"""

re_inc = re.compile(INC_REGEX, re.I)
re_use = re.compile(USE_REGEX, re.I)
re_mod = re.compile(MOD_REGEX, re.I)

class fortran_parser(object):
"""
This parser will return:

* the nodes corresponding to the module names that will be produced
* the nodes corresponding to the include files used
* the module names used by the fortran file
"""

def __init__(self, incpaths):
self.seen = []
"""Files already parsed"""

self.nodes = []
"""List of :py:class:`waflib.Node.Node` representing the dependencies to return"""

self.names = []
"""List of module names to return"""

self.incpaths = incpaths
"""List of :py:class:`waflib.Node.Node` representing the include paths"""

def find_deps(self, node):
"""
Parse a fortran file to read the dependencies used and provided

:param node: fortran file to read
:type node: :py:class:`waflib.Node.Node`
:return: lists representing the includes, the modules used, and the modules created by a fortran file
:rtype: tuple of list of strings
"""
txt = node.read()
incs = []
uses = []
mods = []
for line in txt.splitlines():
# line by line regexp search? optimize?
m = re_inc.search(line)
if m:
incs.append(m.group(1))
m = re_use.search(line)
if m:
uses.append(m.group(1))
m = re_mod.search(line)
if m:
mods.append(m.group(1))
return (incs, uses, mods)

def start(self, node):
"""
Start the parsing. Use the stack self.waiting to hold the nodes to iterate on

:param node: fortran file
:type node: :py:class:`waflib.Node.Node`
"""
self.waiting = [node]
while self.waiting:
nd = self.waiting.pop(0)
self.iter(nd)

def iter(self, node):
"""
Process a single file in the search for dependencies, extract the files used
the modules used, and the modules provided.
"""
path = node.abspath()
incs, uses, mods = self.find_deps(node)
for x in incs:
if x in self.seen:
continue
self.seen.append(x)
self.tryfind_header(x)

for x in uses:
name = "USE@%s" % x
if not name in self.names:
self.names.append(name)

for x in mods:
name = "MOD@%s" % x
if not name in self.names:
self.names.append(name)

def tryfind_header(self, filename):
"""
Try to find an include and add it the nodes to process

:param filename: file name
:type filename: string
"""
found = None
for n in self.incpaths:
found = n.find_resource(filename)
if found:
self.nodes.append(found)
self.waiting.append(found)
break
if not found:
if not filename in self.names:
self.names.append(filename)



+ 0
- 53
waflib/Tools/flex.py View File

@@ -1,53 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# John O'Meara, 2006
# Thomas Nagy, 2006-2010 (ita)

"""
The **flex** program is a code generator which creates C or C++ files.
The generated files are compiled into object files.
"""

import waflib.TaskGen, os, re

def decide_ext(self, node):
if 'cxx' in self.features:
return ['.lex.cc']
return ['.lex.c']

def flexfun(tsk):
env = tsk.env
bld = tsk.generator.bld
wd = bld.variant_dir
def to_list(xx):
if isinstance(xx, str): return [xx]
return xx
tsk.last_cmd = lst = []
lst.extend(to_list(env['FLEX']))
lst.extend(to_list(env['FLEXFLAGS']))
inputs = [a.path_from(bld.bldnode) for a in tsk.inputs]
if env.FLEX_MSYS:
inputs = [x.replace(os.sep, '/') for x in inputs]
lst.extend(inputs)
lst = [x for x in lst if x]
txt = bld.cmd_and_log(lst, cwd=wd, env=env.env or None, quiet=0)
tsk.outputs[0].write(txt.replace('\r\n', '\n').replace('\r', '\n')) # issue #1207

waflib.TaskGen.declare_chain(
name = 'flex',
rule = flexfun, # issue #854
ext_in = '.l',
decider = decide_ext,
)

def configure(conf):
"""
Detect the *flex* program
"""
conf.find_program('flex', var='FLEX')
conf.env.FLEXFLAGS = ['-t']

if re.search (r"\\msys\\[0-9.]+\\bin\\flex.exe$", conf.env.FLEX[0]):
# this is the flex shipped with MSYS
conf.env.FLEX_MSYS = True


+ 0
- 66
waflib/Tools/g95.py View File

@@ -1,66 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# KWS 2010
# Thomas Nagy 2010 (ita)

import re
from waflib import Utils
from waflib.Tools import fc, fc_config, fc_scan, ar
from waflib.Configure import conf

@conf
def find_g95(conf):
fc = conf.find_program('g95', var='FC')
conf.get_g95_version(fc)
conf.env.FC_NAME = 'G95'

@conf
def g95_flags(conf):
v = conf.env
v['FCFLAGS_fcshlib'] = ['-fPIC']
v['FORTRANMODFLAG'] = ['-fmod=', ''] # template for module path
v['FCFLAGS_DEBUG'] = ['-Werror'] # why not

@conf
def g95_modifier_win32(conf):
fc_config.fortran_modifier_win32(conf)

@conf
def g95_modifier_cygwin(conf):
fc_config.fortran_modifier_cygwin(conf)

@conf
def g95_modifier_darwin(conf):
fc_config.fortran_modifier_darwin(conf)

@conf
def g95_modifier_platform(conf):
dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
g95_modifier_func = getattr(conf, 'g95_modifier_' + dest_os, None)
if g95_modifier_func:
g95_modifier_func()

@conf
def get_g95_version(conf, fc):
"""get the compiler version"""

version_re = re.compile(r"g95\s*(?P<major>\d*)\.(?P<minor>\d*)").search
cmd = fc + ['--version']
out, err = fc_config.getoutput(conf, cmd, stdin=False)
if out:
match = version_re(out)
else:
match = version_re(err)
if not match:
conf.fatal('cannot determine g95 version')
k = match.groupdict()
conf.env['FC_VERSION'] = (k['major'], k['minor'])

def configure(conf):
conf.find_g95()
conf.find_ar()
conf.fc_flags()
conf.fc_add_flags()
conf.g95_flags()
conf.g95_modifier_platform()


+ 0
- 18
waflib/Tools/gas.py View File

@@ -1,18 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2008-2010 (ita)

"Detect as/gas/gcc for compiling assembly files"

import waflib.Tools.asm # - leave this
from waflib.Tools import ar

def configure(conf):
"""
Find the programs gas/as/gcc and set the variable *AS*
"""
conf.find_program(['gas', 'gcc'], var='AS')
conf.env.AS_TGT_F = ['-c', '-o']
conf.env.ASLNK_TGT_F = ['-o']
conf.find_ar()
conf.load('asm')

+ 0
- 60
waflib/Tools/gdc.py View File

@@ -1,60 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Carlos Rafael Giani, 2007 (dv)

import sys
from waflib.Tools import ar, d
from waflib.Configure import conf

@conf
def find_gdc(conf):
"""
Find the program gdc and set the variable *D*
"""
conf.find_program('gdc', var='D')

out = conf.cmd_and_log(conf.env.D + ['--version'])
if out.find("gdc") == -1:
conf.fatal("detected compiler is not gdc")

@conf
def common_flags_gdc(conf):
"""
Set the flags required by *gdc*
"""
v = conf.env

# _DFLAGS _DIMPORTFLAGS

# for mory info about the meaning of this dict see dmd.py
v['DFLAGS'] = []

v['D_SRC_F'] = ['-c']
v['D_TGT_F'] = '-o%s'

# linker
v['D_LINKER'] = v['D']
v['DLNK_SRC_F'] = ''
v['DLNK_TGT_F'] = '-o%s'
v['DINC_ST'] = '-I%s'

v['DSHLIB_MARKER'] = v['DSTLIB_MARKER'] = ''
v['DSTLIB_ST'] = v['DSHLIB_ST'] = '-l%s'
v['DSTLIBPATH_ST'] = v['DLIBPATH_ST'] = '-L%s'

v['LINKFLAGS_dshlib'] = ['-shared']

v['DHEADER_ext'] = '.di'
v.DFLAGS_d_with_header = '-fintfc'
v['D_HDR_F'] = '-fintfc-file=%s'

def configure(conf):
"""
Configuration for gdc
"""
conf.find_gdc()
conf.load('ar')
conf.load('d')
conf.common_flags_gdc()
conf.d_platform_flags()


+ 0
- 90
waflib/Tools/gfortran.py View File

@@ -1,90 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# DC 2008
# Thomas Nagy 2010 (ita)

import re
from waflib import Utils
from waflib.Tools import fc, fc_config, fc_scan, ar
from waflib.Configure import conf

@conf
def find_gfortran(conf):
"""Find the gfortran program (will look in the environment variable 'FC')"""
fc = conf.find_program(['gfortran','g77'], var='FC')
# (fallback to g77 for systems, where no gfortran is available)
conf.get_gfortran_version(fc)
conf.env.FC_NAME = 'GFORTRAN'

@conf
def gfortran_flags(conf):
v = conf.env
v['FCFLAGS_fcshlib'] = ['-fPIC']
v['FORTRANMODFLAG'] = ['-J', ''] # template for module path
v['FCFLAGS_DEBUG'] = ['-Werror'] # why not

@conf
def gfortran_modifier_win32(conf):
fc_config.fortran_modifier_win32(conf)

@conf
def gfortran_modifier_cygwin(conf):
fc_config.fortran_modifier_cygwin(conf)

@conf
def gfortran_modifier_darwin(conf):
fc_config.fortran_modifier_darwin(conf)

@conf
def gfortran_modifier_platform(conf):
dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
gfortran_modifier_func = getattr(conf, 'gfortran_modifier_' + dest_os, None)
if gfortran_modifier_func:
gfortran_modifier_func()

@conf
def get_gfortran_version(conf, fc):
"""Get the compiler version"""

# ensure this is actually gfortran, not an imposter.
version_re = re.compile(r"GNU\s*Fortran", re.I).search
cmd = fc + ['--version']
out, err = fc_config.getoutput(conf, cmd, stdin=False)
if out: match = version_re(out)
else: match = version_re(err)
if not match:
conf.fatal('Could not determine the compiler type')

# --- now get more detailed info -- see c_config.get_cc_version
cmd = fc + ['-dM', '-E', '-']
out, err = fc_config.getoutput(conf, cmd, stdin=True)

if out.find('__GNUC__') < 0:
conf.fatal('Could not determine the compiler type')

k = {}
out = out.splitlines()
import shlex

for line in out:
lst = shlex.split(line)
if len(lst)>2:
key = lst[1]
val = lst[2]
k[key] = val

def isD(var):
return var in k

def isT(var):
return var in k and k[var] != '0'

conf.env['FC_VERSION'] = (k['__GNUC__'], k['__GNUC_MINOR__'], k['__GNUC_PATCHLEVEL__'])

def configure(conf):
conf.find_gfortran()
conf.find_ar()
conf.fc_flags()
conf.fc_add_flags()
conf.gfortran_flags()
conf.gfortran_modifier_platform()

+ 0
- 488
waflib/Tools/glib2.py View File

@@ -1,488 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)

"""
Support for GLib2 tools:

* marshal
* enums
* gsettings
* gresource
"""

import os
from waflib import Context, Task, Utils, Options, Errors, Logs
from waflib.TaskGen import taskgen_method, before_method, after_method, feature, extension
from waflib.Configure import conf

################## marshal files

@taskgen_method
def add_marshal_file(self, filename, prefix):
"""
Add a file to the list of marshal files to process. Store them in the attribute *marshal_list*.

:param filename: xml file to compile
:type filename: string
:param prefix: marshal prefix (--prefix=prefix)
:type prefix: string
"""
if not hasattr(self, 'marshal_list'):
self.marshal_list = []
self.meths.append('process_marshal')
self.marshal_list.append((filename, prefix))

@before_method('process_source')
def process_marshal(self):
"""
Process the marshal files stored in the attribute *marshal_list* to create :py:class:`waflib.Tools.glib2.glib_genmarshal` instances.
Add the c file created to the list of source to process.
"""
for f, prefix in getattr(self, 'marshal_list', []):
node = self.path.find_resource(f)

if not node:
raise Errors.WafError('file not found %r' % f)

h_node = node.change_ext('.h')
c_node = node.change_ext('.c')

task = self.create_task('glib_genmarshal', node, [h_node, c_node])
task.env.GLIB_GENMARSHAL_PREFIX = prefix
self.source = self.to_nodes(getattr(self, 'source', []))
self.source.append(c_node)

class glib_genmarshal(Task.Task):

def run(self):

bld = self.inputs[0].__class__.ctx

get = self.env.get_flat
cmd1 = "%s %s --prefix=%s --header > %s" % (
get('GLIB_GENMARSHAL'),
self.inputs[0].srcpath(),
get('GLIB_GENMARSHAL_PREFIX'),
self.outputs[0].abspath()
)

ret = bld.exec_command(cmd1)
if ret: return ret

#print self.outputs[1].abspath()
c = '''#include "%s"\n''' % self.outputs[0].name
self.outputs[1].write(c)

cmd2 = "%s %s --prefix=%s --body >> %s" % (
get('GLIB_GENMARSHAL'),
self.inputs[0].srcpath(),
get('GLIB_GENMARSHAL_PREFIX'),
self.outputs[1].abspath()
)
return bld.exec_command(cmd2)

vars = ['GLIB_GENMARSHAL_PREFIX', 'GLIB_GENMARSHAL']
color = 'BLUE'
ext_out = ['.h']

########################## glib-mkenums

@taskgen_method
def add_enums_from_template(self, source='', target='', template='', comments=''):
"""
Add a file to the list of enum files to process. Store them in the attribute *enums_list*.

:param source: enum file to process
:type source: string
:param target: target file
:type target: string
:param template: template file
:type template: string
:param comments: comments
:type comments: string
"""
if not hasattr(self, 'enums_list'):
self.enums_list = []
self.meths.append('process_enums')
self.enums_list.append({'source': source,
'target': target,
'template': template,
'file-head': '',
'file-prod': '',
'file-tail': '',
'enum-prod': '',
'value-head': '',
'value-prod': '',
'value-tail': '',
'comments': comments})

@taskgen_method
def add_enums(self, source='', target='',
file_head='', file_prod='', file_tail='', enum_prod='',
value_head='', value_prod='', value_tail='', comments=''):
"""
Add a file to the list of enum files to process. Store them in the attribute *enums_list*.

:param source: enum file to process
:type source: string
:param target: target file
:type target: string
:param file_head: unused
:param file_prod: unused
:param file_tail: unused
:param enum_prod: unused
:param value_head: unused
:param value_prod: unused
:param value_tail: unused
:param comments: comments
:type comments: string
"""
if not hasattr(self, 'enums_list'):
self.enums_list = []
self.meths.append('process_enums')
self.enums_list.append({'source': source,
'template': '',
'target': target,
'file-head': file_head,
'file-prod': file_prod,
'file-tail': file_tail,
'enum-prod': enum_prod,
'value-head': value_head,
'value-prod': value_prod,
'value-tail': value_tail,
'comments': comments})

@before_method('process_source')
def process_enums(self):
"""
Process the enum files stored in the attribute *enum_list* to create :py:class:`waflib.Tools.glib2.glib_mkenums` instances.
"""
for enum in getattr(self, 'enums_list', []):
task = self.create_task('glib_mkenums')
env = task.env

inputs = []

# process the source
source_list = self.to_list(enum['source'])
if not source_list:
raise Errors.WafError('missing source ' + str(enum))
source_list = [self.path.find_resource(k) for k in source_list]
inputs += source_list
env['GLIB_MKENUMS_SOURCE'] = [k.abspath() for k in source_list]

# find the target
if not enum['target']:
raise Errors.WafError('missing target ' + str(enum))
tgt_node = self.path.find_or_declare(enum['target'])
if tgt_node.name.endswith('.c'):
self.source.append(tgt_node)
env['GLIB_MKENUMS_TARGET'] = tgt_node.abspath()


options = []

if enum['template']: # template, if provided
template_node = self.path.find_resource(enum['template'])
options.append('--template %s' % (template_node.abspath()))
inputs.append(template_node)
params = {'file-head' : '--fhead',
'file-prod' : '--fprod',
'file-tail' : '--ftail',
'enum-prod' : '--eprod',
'value-head' : '--vhead',
'value-prod' : '--vprod',
'value-tail' : '--vtail',
'comments': '--comments'}
for param, option in params.items():
if enum[param]:
options.append('%s %r' % (option, enum[param]))

env['GLIB_MKENUMS_OPTIONS'] = ' '.join(options)

# update the task instance
task.set_inputs(inputs)
task.set_outputs(tgt_node)

class glib_mkenums(Task.Task):
"""
Process enum files
"""
run_str = '${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}'
color = 'PINK'
ext_out = ['.h']

######################################### gsettings

@taskgen_method
def add_settings_schemas(self, filename_list):
"""
Add settings files to process, add them to *settings_schema_files*

:param filename_list: files
:type filename_list: list of string
"""
if not hasattr(self, 'settings_schema_files'):
self.settings_schema_files = []

if not isinstance(filename_list, list):
filename_list = [filename_list]

self.settings_schema_files.extend(filename_list)

@taskgen_method
def add_settings_enums(self, namespace, filename_list):
"""
This function may be called only once by task generator to set the enums namespace.

:param namespace: namespace
:type namespace: string
:param filename_list: enum files to process
:type filename_list: file list
"""
if hasattr(self, 'settings_enum_namespace'):
raise Errors.WafError("Tried to add gsettings enums to '%s' more than once" % self.name)
self.settings_enum_namespace = namespace

if type(filename_list) != 'list':
filename_list = [filename_list]
self.settings_enum_files = filename_list

@feature('glib2')
def process_settings(self):
"""
Process the schema files in *settings_schema_files* to create :py:class:`waflib.Tools.glib2.glib_mkenums` instances. The
same files are validated through :py:class:`waflib.Tools.glib2.glib_validate_schema` tasks.

"""
enums_tgt_node = []
install_files = []

settings_schema_files = getattr(self, 'settings_schema_files', [])
if settings_schema_files and not self.env['GLIB_COMPILE_SCHEMAS']:
raise Errors.WafError ("Unable to process GSettings schemas - glib-compile-schemas was not found during configure")

# 1. process gsettings_enum_files (generate .enums.xml)
#
if hasattr(self, 'settings_enum_files'):
enums_task = self.create_task('glib_mkenums')

source_list = self.settings_enum_files
source_list = [self.path.find_resource(k) for k in source_list]
enums_task.set_inputs(source_list)
enums_task.env['GLIB_MKENUMS_SOURCE'] = [k.abspath() for k in source_list]

target = self.settings_enum_namespace + '.enums.xml'
tgt_node = self.path.find_or_declare(target)
enums_task.set_outputs(tgt_node)
enums_task.env['GLIB_MKENUMS_TARGET'] = tgt_node.abspath()
enums_tgt_node = [tgt_node]

install_files.append (tgt_node)

options = '--comments "<!-- @comment@ -->" --fhead "<schemalist>" --vhead " <@type@ id=\\"%s.@EnumName@\\">" --vprod " <value nick=\\"@valuenick@\\" value=\\"@valuenum@\\"/>" --vtail " </@type@>" --ftail "</schemalist>" ' % (self.settings_enum_namespace)
enums_task.env['GLIB_MKENUMS_OPTIONS'] = options

# 2. process gsettings_schema_files (validate .gschema.xml files)
#
for schema in settings_schema_files:
schema_task = self.create_task ('glib_validate_schema')

schema_node = self.path.find_resource(schema)
if not schema_node:
raise Errors.WafError("Cannot find the schema file '%s'" % schema)
install_files.append(schema_node)
source_list = enums_tgt_node + [schema_node]

schema_task.set_inputs (source_list)
schema_task.env['GLIB_COMPILE_SCHEMAS_OPTIONS'] = [("--schema-file=" + k.abspath()) for k in source_list]

target_node = schema_node.change_ext('.xml.valid')
schema_task.set_outputs (target_node)
schema_task.env['GLIB_VALIDATE_SCHEMA_OUTPUT'] = target_node.abspath()

# 3. schemas install task
def compile_schemas_callback(bld):
if not bld.is_install: return
Logs.pprint ('YELLOW','Updating GSettings schema cache')
command = Utils.subst_vars("${GLIB_COMPILE_SCHEMAS} ${GSETTINGSSCHEMADIR}", bld.env)
ret = self.bld.exec_command(command)

if self.bld.is_install:
if not self.env['GSETTINGSSCHEMADIR']:
raise Errors.WafError ('GSETTINGSSCHEMADIR not defined (should have been set up automatically during configure)')

if install_files:
self.bld.install_files (self.env['GSETTINGSSCHEMADIR'], install_files)

if not hasattr(self.bld, '_compile_schemas_registered'):
self.bld.add_post_fun (compile_schemas_callback)
self.bld._compile_schemas_registered = True

class glib_validate_schema(Task.Task):
"""
Validate schema files
"""
run_str = 'rm -f ${GLIB_VALIDATE_SCHEMA_OUTPUT} && ${GLIB_COMPILE_SCHEMAS} --dry-run ${GLIB_COMPILE_SCHEMAS_OPTIONS} && touch ${GLIB_VALIDATE_SCHEMA_OUTPUT}'
color = 'PINK'

################## gresource

@extension('.gresource.xml')
def process_gresource_source(self, node):
"""
Hook to process .gresource.xml to generate C source files
"""
if not self.env['GLIB_COMPILE_RESOURCES']:
raise Errors.WafError ("Unable to process GResource file - glib-compile-resources was not found during configure")

if 'gresource' in self.features:
return

h_node = node.change_ext('_xml.h')
c_node = node.change_ext('_xml.c')
self.create_task('glib_gresource_source', node, [h_node, c_node])
self.source.append(c_node)

@feature('gresource')
def process_gresource_bundle(self):
"""
Generate a binary .gresource files from .gresource.xml files::

def build(bld):
bld(
features='gresource',
source=['resources1.gresource.xml', 'resources2.gresource.xml'],
install_path='${LIBDIR}/${PACKAGE}'
)

:param source: XML files to process
:type source: list of string
:param install_path: installation path
:type install_path: string
"""
for i in self.to_list(self.source):
node = self.path.find_resource(i)

task = self.create_task('glib_gresource_bundle', node, node.change_ext(''))
inst_to = getattr(self, 'install_path', None)
if inst_to:
self.bld.install_files(inst_to, task.outputs)

class glib_gresource_base(Task.Task):
"""
Base class for gresource based tasks, it implements the implicit dependencies scan.
"""
color = 'BLUE'
base_cmd = '${GLIB_COMPILE_RESOURCES} --sourcedir=${SRC[0].parent.srcpath()} --sourcedir=${SRC[0].bld_dir()}'

def scan(self):
"""
Scan gresource dependencies through ``glib-compile-resources --generate-dependencies command``
"""
bld = self.generator.bld
kw = {}
try:
if not kw.get('cwd', None):
kw['cwd'] = bld.cwd
except AttributeError:
bld.cwd = kw['cwd'] = bld.variant_dir
kw['quiet'] = Context.BOTH

cmd = Utils.subst_vars('${GLIB_COMPILE_RESOURCES} --sourcedir=%s --sourcedir=%s --generate-dependencies %s' % (
self.inputs[0].parent.srcpath(),
self.inputs[0].bld_dir(),
self.inputs[0].bldpath()
), self.env)

output = bld.cmd_and_log(cmd, **kw)

nodes = []
names = []
for dep in output.splitlines():
if dep:
node = bld.bldnode.find_node(dep)
if node:
nodes.append(node)
else:
names.append(dep)

return (nodes, names)

class glib_gresource_source(glib_gresource_base):
"""
Task to generate C source code (.h and .c files) from a gresource.xml file
"""
vars = ['GLIB_COMPILE_RESOURCES']
fun_h = Task.compile_fun_shell(glib_gresource_base.base_cmd + ' --target=${TGT[0].abspath()} --generate-header ${SRC}')
fun_c = Task.compile_fun_shell(glib_gresource_base.base_cmd + ' --target=${TGT[1].abspath()} --generate-source ${SRC}')
ext_out = ['.h']

def run(self):
return self.fun_h[0](self) or self.fun_c[0](self)

class glib_gresource_bundle(glib_gresource_base):
"""
Task to generate a .gresource binary file from a gresource.xml file
"""
run_str = glib_gresource_base.base_cmd + ' --target=${TGT} ${SRC}'
shell = True # temporary workaround for #795

@conf
def find_glib_genmarshal(conf):
conf.find_program('glib-genmarshal', var='GLIB_GENMARSHAL')

@conf
def find_glib_mkenums(conf):
if not conf.env.PERL:
conf.find_program('perl', var='PERL')
conf.find_program('glib-mkenums', interpreter='PERL', var='GLIB_MKENUMS')

@conf
def find_glib_compile_schemas(conf):
# when cross-compiling, gsettings.m4 locates the program with the following:
# pkg-config --variable glib_compile_schemas gio-2.0
conf.find_program('glib-compile-schemas', var='GLIB_COMPILE_SCHEMAS')

def getstr(varname):
return getattr(Options.options, varname, getattr(conf.env,varname, ''))

# TODO make this dependent on the gnu_dirs tool?
gsettingsschemadir = getstr('GSETTINGSSCHEMADIR')
if not gsettingsschemadir:
datadir = getstr('DATADIR')
if not datadir:
prefix = conf.env['PREFIX']
datadir = os.path.join(prefix, 'share')
gsettingsschemadir = os.path.join(datadir, 'glib-2.0', 'schemas')

conf.env['GSETTINGSSCHEMADIR'] = gsettingsschemadir

@conf
def find_glib_compile_resources(conf):
conf.find_program('glib-compile-resources', var='GLIB_COMPILE_RESOURCES')

def configure(conf):
"""
Find the following programs:

* *glib-genmarshal* and set *GLIB_GENMARSHAL*
* *glib-mkenums* and set *GLIB_MKENUMS*
* *glib-compile-schemas* and set *GLIB_COMPILE_SCHEMAS* (not mandatory)
* *glib-compile-resources* and set *GLIB_COMPILE_RESOURCES* (not mandatory)

And set the variable *GSETTINGSSCHEMADIR*
"""
conf.find_glib_genmarshal()
conf.find_glib_mkenums()
conf.find_glib_compile_schemas(mandatory=False)
conf.find_glib_compile_resources(mandatory=False)

def options(opt):
"""
Add the ``--gsettingsschemadir`` command-line option
"""
gr = opt.add_option_group('Installation directories')
gr.add_option('--gsettingsschemadir', help='GSettings schema location [DATADIR/glib-2.0/schemas]', default='', dest='GSETTINGSSCHEMADIR')


+ 0
- 131
waflib/Tools/gnu_dirs.py View File

@@ -1,131 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Ali Sabil, 2007

"""
Sets various standard variables such as INCLUDEDIR. SBINDIR and others. To use this module just call::

opt.load('gnu_dirs')

and::

conf.load('gnu_dirs')

Add options for the standard GNU directories, this tool will add the options
found in autotools, and will update the environment with the following
installation variables:

============== ========================================= =======================
Variable Description Default Value
============== ========================================= =======================
PREFIX installation prefix /usr/local
EXEC_PREFIX installation prefix for binaries PREFIX
BINDIR user commands EXEC_PREFIX/bin
SBINDIR system binaries EXEC_PREFIX/sbin
LIBEXECDIR program-specific binaries EXEC_PREFIX/libexec
SYSCONFDIR host-specific configuration PREFIX/etc
SHAREDSTATEDIR architecture-independent variable data PREFIX/com
LOCALSTATEDIR variable data PREFIX/var
LIBDIR object code libraries EXEC_PREFIX/lib
INCLUDEDIR header files PREFIX/include
OLDINCLUDEDIR header files for non-GCC compilers /usr/include
DATAROOTDIR architecture-independent data root PREFIX/share
DATADIR architecture-independent data DATAROOTDIR
INFODIR GNU "info" documentation DATAROOTDIR/info
LOCALEDIR locale-dependent data DATAROOTDIR/locale
MANDIR manual pages DATAROOTDIR/man
DOCDIR documentation root DATAROOTDIR/doc/APPNAME
HTMLDIR HTML documentation DOCDIR
DVIDIR DVI documentation DOCDIR
PDFDIR PDF documentation DOCDIR
PSDIR PostScript documentation DOCDIR
============== ========================================= =======================
"""

import os, re
from waflib import Utils, Options, Context

gnuopts = '''
bindir, user commands, ${EXEC_PREFIX}/bin
sbindir, system binaries, ${EXEC_PREFIX}/sbin
libexecdir, program-specific binaries, ${EXEC_PREFIX}/libexec
sysconfdir, host-specific configuration, ${PREFIX}/etc
sharedstatedir, architecture-independent variable data, ${PREFIX}/com
localstatedir, variable data, ${PREFIX}/var
libdir, object code libraries, ${EXEC_PREFIX}/lib%s
includedir, header files, ${PREFIX}/include
oldincludedir, header files for non-GCC compilers, /usr/include
datarootdir, architecture-independent data root, ${PREFIX}/share
datadir, architecture-independent data, ${DATAROOTDIR}
infodir, GNU "info" documentation, ${DATAROOTDIR}/info
localedir, locale-dependent data, ${DATAROOTDIR}/locale
mandir, manual pages, ${DATAROOTDIR}/man
docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE}
htmldir, HTML documentation, ${DOCDIR}
dvidir, DVI documentation, ${DOCDIR}
pdfdir, PDF documentation, ${DOCDIR}
psdir, PostScript documentation, ${DOCDIR}
''' % Utils.lib64()

_options = [x.split(', ') for x in gnuopts.splitlines() if x]

def configure(conf):
"""
Read the command-line options to set lots of variables in *conf.env*. The variables
BINDIR and LIBDIR will be overwritten.
"""
def get_param(varname, default):
return getattr(Options.options, varname, '') or default

env = conf.env
env.LIBDIR = env.BINDIR = []
env.EXEC_PREFIX = get_param('EXEC_PREFIX', env.PREFIX)
env.PACKAGE = getattr(Context.g_module, 'APPNAME', None) or env.PACKAGE

complete = False
iter = 0
while not complete and iter < len(_options) + 1:
iter += 1
complete = True
for name, help, default in _options:
name = name.upper()
if not env[name]:
try:
env[name] = Utils.subst_vars(get_param(name, default).replace('/', os.sep), env)
except TypeError:
complete = False

if not complete:
lst = [x for x, _, _ in _options if not env[x.upper()]]
raise conf.errors.WafError('Variable substitution failure %r' % lst)

def options(opt):
"""
Add lots of command-line options, for example::

--exec-prefix: EXEC_PREFIX
"""
inst_dir = opt.add_option_group('Installation prefix',
'By default, "waf install" will put the files in\
"/usr/local/bin", "/usr/local/lib" etc. An installation prefix other\
than "/usr/local" can be given using "--prefix", for example "--prefix=$HOME"')

for k in ('--prefix', '--destdir'):
option = opt.parser.get_option(k)
if option:
opt.parser.remove_option(k)
inst_dir.add_option(option)

inst_dir.add_option('--exec-prefix',
help = 'installation prefix for binaries [PREFIX]',
default = '',
dest = 'EXEC_PREFIX')

dirs_options = opt.add_option_group('Installation directories')

for name, help, default in _options:
option_name = '--' + name
str_default = default
str_help = '%s [%s]' % (help, re.sub(r'\$\{([^}]+)\}', r'\1', str_default))
dirs_options.add_option(option_name, help=str_help, default='', dest=name.upper())


+ 0
- 60
waflib/Tools/ifort.py View File

@@ -1,60 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# DC 2008
# Thomas Nagy 2010 (ita)

import re
from waflib import Utils
from waflib.Tools import fc, fc_config, fc_scan, ar
from waflib.Configure import conf

@conf
def find_ifort(conf):
fc = conf.find_program('ifort', var='FC')
conf.get_ifort_version(fc)
conf.env.FC_NAME = 'IFORT'

@conf
def ifort_modifier_cygwin(conf):
raise NotImplementedError("Ifort on cygwin not yet implemented")

@conf
def ifort_modifier_win32(conf):
fc_config.fortran_modifier_win32(conf)

@conf
def ifort_modifier_darwin(conf):
fc_config.fortran_modifier_darwin(conf)

@conf
def ifort_modifier_platform(conf):
dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
ifort_modifier_func = getattr(conf, 'ifort_modifier_' + dest_os, None)
if ifort_modifier_func:
ifort_modifier_func()

@conf
def get_ifort_version(conf, fc):
"""get the compiler version"""

version_re = re.compile(r"Intel[\sa-zA-Z()0-9,-]*Version\s*(?P<major>\d*)\.(?P<minor>\d*)",re.I).search
if Utils.is_win32:
cmd = fc
else:
cmd = fc + ['-logo']

out, err = fc_config.getoutput(conf, cmd, stdin=False)
match = version_re(out) or version_re(err)
if not match:
conf.fatal('cannot determine ifort version.')
k = match.groupdict()
conf.env['FC_VERSION'] = (k['major'], k['minor'])

def configure(conf):
conf.find_ifort()
conf.find_program('xiar', var='AR')
conf.env.ARFLAGS = 'rcs'
conf.fc_flags()
conf.fc_add_flags()
conf.ifort_modifier_platform()


+ 0
- 220
waflib/Tools/intltool.py View File

@@ -1,220 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)

"""
Support for translation tools such as msgfmt and intltool

Usage::

def configure(conf):
conf.load('gnu_dirs intltool')

def build(bld):
# process the .po files into .gmo files, and install them in LOCALEDIR
bld(features='intltool_po', appname='myapp', podir='po', install_path="${LOCALEDIR}")

# process an input file, substituting the translations from the po dir
bld(
features = "intltool_in",
podir = "../po",
style = "desktop",
flags = ["-u"],
source = 'kupfer.desktop.in',
install_path = "${DATADIR}/applications",
)

Usage of the :py:mod:`waflib.Tools.gnu_dirs` is recommended, but not obligatory.
"""

import os, re
from waflib import Configure, Context, TaskGen, Task, Utils, Runner, Options, Build, Logs
import waflib.Tools.ccroot
from waflib.TaskGen import feature, before_method, taskgen_method
from waflib.Logs import error
from waflib.Configure import conf

_style_flags = {
'ba': '-b',
'desktop': '-d',
'keys': '-k',
'quoted': '--quoted-style',
'quotedxml': '--quotedxml-style',
'rfc822deb': '-r',
'schemas': '-s',
'xml': '-x',
}

@taskgen_method
def ensure_localedir(self):
"""
Expand LOCALEDIR from DATAROOTDIR/locale if possible, or fallback to PREFIX/share/locale
"""
# use the tool gnu_dirs to provide options to define this
if not self.env.LOCALEDIR:
if self.env.DATAROOTDIR:
self.env.LOCALEDIR = os.path.join(self.env.DATAROOTDIR, 'locale')
else:
self.env.LOCALEDIR = os.path.join(self.env.PREFIX, 'share', 'locale')

@before_method('process_source')
@feature('intltool_in')
def apply_intltool_in_f(self):
"""
Create tasks to translate files by intltool-merge::

def build(bld):
bld(
features = "intltool_in",
podir = "../po",
style = "desktop",
flags = ["-u"],
source = 'kupfer.desktop.in',
install_path = "${DATADIR}/applications",
)

:param podir: location of the .po files
:type podir: string
:param source: source files to process
:type source: list of string
:param style: the intltool-merge mode of operation, can be one of the following values:
``ba``, ``desktop``, ``keys``, ``quoted``, ``quotedxml``, ``rfc822deb``, ``schemas`` and ``xml``.
See the ``intltool-merge`` man page for more information about supported modes of operation.
:type style: string
:param flags: compilation flags ("-quc" by default)
:type flags: list of string
:param install_path: installation path
:type install_path: string
"""
try: self.meths.remove('process_source')
except ValueError: pass

self.ensure_localedir()

podir = getattr(self, 'podir', '.')
podirnode = self.path.find_dir(podir)
if not podirnode:
error("could not find the podir %r" % podir)
return

cache = getattr(self, 'intlcache', '.intlcache')
self.env.INTLCACHE = [os.path.join(str(self.path.get_bld()), podir, cache)]
self.env.INTLPODIR = podirnode.bldpath()
self.env.append_value('INTLFLAGS', getattr(self, 'flags', self.env.INTLFLAGS_DEFAULT))

if '-c' in self.env.INTLFLAGS:
self.bld.fatal('Redundant -c flag in intltool task %r' % self)

style = getattr(self, 'style', None)
if style:
try:
style_flag = _style_flags[style]
except KeyError:
self.bld.fatal('intltool_in style "%s" is not valid' % style)

self.env.append_unique('INTLFLAGS', [style_flag])

for i in self.to_list(self.source):
node = self.path.find_resource(i)

task = self.create_task('intltool', node, node.change_ext(''))
inst = getattr(self, 'install_path', None)
if inst:
self.bld.install_files(inst, task.outputs)

@feature('intltool_po')
def apply_intltool_po(self):
"""
Create tasks to process po files::

def build(bld):
bld(features='intltool_po', appname='myapp', podir='po', install_path="${LOCALEDIR}")

The relevant task generator arguments are:

:param podir: directory of the .po files
:type podir: string
:param appname: name of the application
:type appname: string
:param install_path: installation directory
:type install_path: string

The file LINGUAS must be present in the directory pointed by *podir* and list the translation files to process.
"""
try: self.meths.remove('process_source')
except ValueError: pass

self.ensure_localedir()

appname = getattr(self, 'appname', getattr(Context.g_module, Context.APPNAME, 'set_your_app_name'))
podir = getattr(self, 'podir', '.')
inst = getattr(self, 'install_path', '${LOCALEDIR}')

linguas = self.path.find_node(os.path.join(podir, 'LINGUAS'))
if linguas:
# scan LINGUAS file for locales to process
file = open(linguas.abspath())
langs = []
for line in file.readlines():
# ignore lines containing comments
if not line.startswith('#'):
langs += line.split()
file.close()
re_linguas = re.compile('[-a-zA-Z_@.]+')
for lang in langs:
# Make sure that we only process lines which contain locales
if re_linguas.match(lang):
node = self.path.find_resource(os.path.join(podir, re_linguas.match(lang).group() + '.po'))
task = self.create_task('po', node, node.change_ext('.mo'))

if inst:
filename = task.outputs[0].name
(langname, ext) = os.path.splitext(filename)
inst_file = inst + os.sep + langname + os.sep + 'LC_MESSAGES' + os.sep + appname + '.mo'
self.bld.install_as(inst_file, task.outputs[0], chmod=getattr(self, 'chmod', Utils.O644), env=task.env)

else:
Logs.pprint('RED', "Error no LINGUAS file found in po directory")

class po(Task.Task):
"""
Compile .po files into .gmo files
"""
run_str = '${MSGFMT} -o ${TGT} ${SRC}'
color = 'BLUE'

class intltool(Task.Task):
"""
Let intltool-merge translate an input file
"""
run_str = '${INTLTOOL} ${INTLFLAGS} ${INTLCACHE_ST:INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}'
color = 'BLUE'

@conf
def find_msgfmt(conf):
conf.find_program('msgfmt', var='MSGFMT')

@conf
def find_intltool_merge(conf):
if not conf.env.PERL:
conf.find_program('perl', var='PERL')
conf.env.INTLCACHE_ST = '--cache=%s'
conf.env.INTLFLAGS_DEFAULT = ['-q', '-u']
conf.find_program('intltool-merge', interpreter='PERL', var='INTLTOOL')

def configure(conf):
"""
Detect the program *msgfmt* and set *conf.env.MSGFMT*.
Detect the program *intltool-merge* and set *conf.env.INTLTOOL*.
It is possible to set INTLTOOL in the environment, but it must not have spaces in it::

$ INTLTOOL="/path/to/the program/intltool" waf configure

If a C/C++ compiler is present, execute a compilation test to find the header *locale.h*.
"""
conf.find_msgfmt()
conf.find_intltool_merge()

if conf.env.CC or conf.env.CXX:
conf.check(header_name='locale.h')


+ 0
- 477
waflib/Tools/javaw.py View File

@@ -1,477 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)

"""
Java support

Javac is one of the few compilers that behaves very badly:

#. it outputs files where it wants to (-d is only for the package root)

#. it recompiles files silently behind your back

#. it outputs an undefined amount of files (inner classes)

Remember that the compilation can be performed using Jython[1] rather than regular Python. Instead of
running one of the following commands::

./waf configure
python waf configure

You would have to run::

java -jar /path/to/jython.jar waf configure

[1] http://www.jython.org/
"""

import os, re, tempfile, shutil
from waflib import TaskGen, Task, Utils, Options, Build, Errors, Node, Logs
from waflib.Configure import conf
from waflib.TaskGen import feature, before_method, after_method

from waflib.Tools import ccroot
ccroot.USELIB_VARS['javac'] = set(['CLASSPATH', 'JAVACFLAGS'])


SOURCE_RE = '**/*.java'
JAR_RE = '**/*'

class_check_source = '''
public class Test {
public static void main(String[] argv) {
Class lib;
if (argv.length < 1) {
System.err.println("Missing argument");
System.exit(77);
}
try {
lib = Class.forName(argv[0]);
} catch (ClassNotFoundException e) {
System.err.println("ClassNotFoundException");
System.exit(1);
}
lib = null;
System.exit(0);
}
}
'''

@feature('javac')
@before_method('process_source')
def apply_java(self):
"""
Create a javac task for compiling *.java files*. There can be
only one javac task by task generator.
"""
Utils.def_attrs(self, jarname='', classpath='',
sourcepath='.', srcdir='.',
jar_mf_attributes={}, jar_mf_classpath=[])

nodes_lst = []

outdir = getattr(self, 'outdir', None)
if outdir:
if not isinstance(outdir, Node.Node):
outdir = self.path.get_bld().make_node(self.outdir)
else:
outdir = self.path.get_bld()
outdir.mkdir()
self.outdir = outdir
self.env['OUTDIR'] = outdir.abspath()

self.javac_task = tsk = self.create_task('javac')
tmp = []

srcdir = getattr(self, 'srcdir', '')
if isinstance(srcdir, Node.Node):
srcdir = [srcdir]
for x in Utils.to_list(srcdir):
if isinstance(x, Node.Node):
y = x
else:
y = self.path.find_dir(x)
if not y:
self.bld.fatal('Could not find the folder %s from %s' % (x, self.path))
tmp.append(y)
tsk.srcdir = tmp

if getattr(self, 'compat', None):
tsk.env.append_value('JAVACFLAGS', ['-source', self.compat])

if hasattr(self, 'sourcepath'):
fold = [isinstance(x, Node.Node) and x or self.path.find_dir(x) for x in self.to_list(self.sourcepath)]
names = os.pathsep.join([x.srcpath() for x in fold])
else:
names = [x.srcpath() for x in tsk.srcdir]

if names:
tsk.env.append_value('JAVACFLAGS', ['-sourcepath', names])

@feature('javac')
@after_method('apply_java')
def use_javac_files(self):
"""
Process the *use* attribute referring to other java compilations
"""
lst = []
self.uselib = self.to_list(getattr(self, 'uselib', []))
names = self.to_list(getattr(self, 'use', []))
get = self.bld.get_tgen_by_name
for x in names:
try:
y = get(x)
except Exception:
self.uselib.append(x)
else:
y.post()
lst.append(y.jar_task.outputs[0].abspath())
self.javac_task.set_run_after(y.jar_task)

if lst:
self.env.append_value('CLASSPATH', lst)

@feature('javac')
@after_method('apply_java', 'propagate_uselib_vars', 'use_javac_files')
def set_classpath(self):
"""
Set the CLASSPATH value on the *javac* task previously created.
"""
self.env.append_value('CLASSPATH', getattr(self, 'classpath', []))
for x in self.tasks:
x.env.CLASSPATH = os.pathsep.join(self.env.CLASSPATH) + os.pathsep

@feature('jar')
@after_method('apply_java', 'use_javac_files')
@before_method('process_source')
def jar_files(self):
"""
Create a jar task. There can be only one jar task by task generator.
"""
destfile = getattr(self, 'destfile', 'test.jar')
jaropts = getattr(self, 'jaropts', [])
manifest = getattr(self, 'manifest', None)

basedir = getattr(self, 'basedir', None)
if basedir:
if not isinstance(self.basedir, Node.Node):
basedir = self.path.get_bld().make_node(basedir)
else:
basedir = self.path.get_bld()
if not basedir:
self.bld.fatal('Could not find the basedir %r for %r' % (self.basedir, self))

self.jar_task = tsk = self.create_task('jar_create')
if manifest:
jarcreate = getattr(self, 'jarcreate', 'cfm')
node = self.path.find_node(manifest)
tsk.dep_nodes.append(node)
jaropts.insert(0, node.abspath())
else:
jarcreate = getattr(self, 'jarcreate', 'cf')
if not isinstance(destfile, Node.Node):
destfile = self.path.find_or_declare(destfile)
if not destfile:
self.bld.fatal('invalid destfile %r for %r' % (destfile, self))
tsk.set_outputs(destfile)
tsk.basedir = basedir

jaropts.append('-C')
jaropts.append(basedir.bldpath())
jaropts.append('.')

tsk.env['JAROPTS'] = jaropts
tsk.env['JARCREATE'] = jarcreate

if getattr(self, 'javac_task', None):
tsk.set_run_after(self.javac_task)

@feature('jar')
@after_method('jar_files')
def use_jar_files(self):
"""
Process the *use* attribute to set the build order on the
tasks created by another task generator.
"""
lst = []
self.uselib = self.to_list(getattr(self, 'uselib', []))
names = self.to_list(getattr(self, 'use', []))
get = self.bld.get_tgen_by_name
for x in names:
try:
y = get(x)
except Exception:
self.uselib.append(x)
else:
y.post()
self.jar_task.run_after.update(y.tasks)

class jar_create(Task.Task):
"""
Create a jar file
"""
color = 'GREEN'
run_str = '${JAR} ${JARCREATE} ${TGT} ${JAROPTS}'

def runnable_status(self):
"""
Wait for dependent tasks to be executed, then read the
files to update the list of inputs.
"""
for t in self.run_after:
if not t.hasrun:
return Task.ASK_LATER
if not self.inputs:
global JAR_RE
try:
self.inputs = [x for x in self.basedir.ant_glob(JAR_RE, remove=False) if id(x) != id(self.outputs[0])]
except Exception:
raise Errors.WafError('Could not find the basedir %r for %r' % (self.basedir, self))
return super(jar_create, self).runnable_status()

class javac(Task.Task):
"""
Compile java files
"""
color = 'BLUE'

vars = ['CLASSPATH', 'JAVACFLAGS', 'JAVAC', 'OUTDIR']
"""
The javac task will be executed again if the variables CLASSPATH, JAVACFLAGS, JAVAC or OUTDIR change.
"""

def runnable_status(self):
"""
Wait for dependent tasks to be complete, then read the file system to find the input nodes.
"""
for t in self.run_after:
if not t.hasrun:
return Task.ASK_LATER

if not self.inputs:
global SOURCE_RE
self.inputs = []
for x in self.srcdir:
self.inputs.extend(x.ant_glob(SOURCE_RE, remove=False))
return super(javac, self).runnable_status()

def run(self):
"""
Execute the javac compiler
"""
env = self.env
gen = self.generator
bld = gen.bld
wd = bld.bldnode.abspath()
def to_list(xx):
if isinstance(xx, str): return [xx]
return xx
cmd = []
cmd.extend(to_list(env['JAVAC']))
cmd.extend(['-classpath'])
cmd.extend(to_list(env['CLASSPATH']))
cmd.extend(['-d'])
cmd.extend(to_list(env['OUTDIR']))
cmd.extend(to_list(env['JAVACFLAGS']))

files = [a.path_from(bld.bldnode) for a in self.inputs]

# workaround for command line length limit:
# http://support.microsoft.com/kb/830473
tmp = None
try:
if len(str(files)) + len(str(cmd)) > 8192:
(fd, tmp) = tempfile.mkstemp(dir=bld.bldnode.abspath())
try:
os.write(fd, '\n'.join(files).encode())
finally:
if tmp:
os.close(fd)
if Logs.verbose:
Logs.debug('runner: %r' % (cmd + files))
cmd.append('@' + tmp)
else:
cmd += files

ret = self.exec_command(cmd, cwd=wd, env=env.env or None)
finally:
if tmp:
os.remove(tmp)
return ret

def post_run(self):
"""
"""
for n in self.generator.outdir.ant_glob('**/*.class'):
n.sig = Utils.h_file(n.abspath()) # careful with this
self.generator.bld.task_sigs[self.uid()] = self.cache_sig

@feature('javadoc')
@after_method('process_rule')
def create_javadoc(self):
"""
Creates a javadoc task (feature 'javadoc')
"""
tsk = self.create_task('javadoc')
tsk.classpath = getattr(self, 'classpath', [])
self.javadoc_package = Utils.to_list(self.javadoc_package)
if not isinstance(self.javadoc_output, Node.Node):
self.javadoc_output = self.bld.path.find_or_declare(self.javadoc_output)

class javadoc(Task.Task):
color = 'BLUE'

def __str__(self):
return '%s: %s -> %s\n' % (self.__class__.__name__, self.generator.srcdir, self.generator.javadoc_output)

def run(self):
env = self.env
bld = self.generator.bld
wd = bld.bldnode.abspath()

#add src node + bld node (for generated java code)
srcpath = self.generator.path.abspath() + os.sep + self.generator.srcdir
srcpath += os.pathsep
srcpath += self.generator.path.get_bld().abspath() + os.sep + self.generator.srcdir

classpath = env.CLASSPATH
classpath += os.pathsep
classpath += os.pathsep.join(self.classpath)
classpath = "".join(classpath)

self.last_cmd = lst = []
lst.extend(Utils.to_list(env['JAVADOC']))
lst.extend(['-d', self.generator.javadoc_output.abspath()])
lst.extend(['-sourcepath', srcpath])
lst.extend(['-classpath', classpath])
lst.extend(['-subpackages'])
lst.extend(self.generator.javadoc_package)
lst = [x for x in lst if x]

self.generator.bld.cmd_and_log(lst, cwd=wd, env=env.env or None, quiet=0)

def post_run(self):
nodes = self.generator.javadoc_output.ant_glob('**')
for x in nodes:
x.sig = Utils.h_file(x.abspath())
self.generator.bld.task_sigs[self.uid()] = self.cache_sig

def configure(self):
"""
Detect the javac, java and jar programs
"""
# If JAVA_PATH is set, we prepend it to the path list
java_path = self.environ['PATH'].split(os.pathsep)
v = self.env

if 'JAVA_HOME' in self.environ:
java_path = [os.path.join(self.environ['JAVA_HOME'], 'bin')] + java_path
self.env['JAVA_HOME'] = [self.environ['JAVA_HOME']]

for x in 'javac java jar javadoc'.split():
self.find_program(x, var=x.upper(), path_list=java_path)

if 'CLASSPATH' in self.environ:
v['CLASSPATH'] = self.environ['CLASSPATH']

if not v['JAR']: self.fatal('jar is required for making java packages')
if not v['JAVAC']: self.fatal('javac is required for compiling java classes')

v['JARCREATE'] = 'cf' # can use cvf
v['JAVACFLAGS'] = []

@conf
def check_java_class(self, classname, with_classpath=None):
"""
Check if the specified java class exists

:param classname: class to check, like java.util.HashMap
:type classname: string
:param with_classpath: additional classpath to give
:type with_classpath: string
"""

javatestdir = '.waf-javatest'

classpath = javatestdir
if self.env['CLASSPATH']:
classpath += os.pathsep + self.env['CLASSPATH']
if isinstance(with_classpath, str):
classpath += os.pathsep + with_classpath

shutil.rmtree(javatestdir, True)
os.mkdir(javatestdir)

Utils.writef(os.path.join(javatestdir, 'Test.java'), class_check_source)

# Compile the source
self.exec_command(self.env['JAVAC'] + [os.path.join(javatestdir, 'Test.java')], shell=False)

# Try to run the app
cmd = self.env['JAVA'] + ['-cp', classpath, 'Test', classname]
self.to_log("%s\n" % str(cmd))
found = self.exec_command(cmd, shell=False)

self.msg('Checking for java class %s' % classname, not found)

shutil.rmtree(javatestdir, True)

return found

@conf
def check_jni_headers(conf):
"""
Check for jni headers and libraries. On success the conf.env variables xxx_JAVA are added for use in C/C++ targets::

def options(opt):
opt.load('compiler_c')

def configure(conf):
conf.load('compiler_c java')
conf.check_jni_headers()

def build(bld):
bld.shlib(source='a.c', target='app', use='JAVA')
"""

if not conf.env.CC_NAME and not conf.env.CXX_NAME:
conf.fatal('load a compiler first (gcc, g++, ..)')

if not conf.env.JAVA_HOME:
conf.fatal('set JAVA_HOME in the system environment')

# jni requires the jvm
javaHome = conf.env['JAVA_HOME'][0]

dir = conf.root.find_dir(conf.env.JAVA_HOME[0] + '/include')
if dir is None:
dir = conf.root.find_dir(conf.env.JAVA_HOME[0] + '/../Headers') # think different?!
if dir is None:
conf.fatal('JAVA_HOME does not seem to be set properly')

f = dir.ant_glob('**/(jni|jni_md).h')
incDirs = [x.parent.abspath() for x in f]

dir = conf.root.find_dir(conf.env.JAVA_HOME[0])
f = dir.ant_glob('**/*jvm.(so|dll|dylib)')
libDirs = [x.parent.abspath() for x in f] or [javaHome]

# On windows, we need both the .dll and .lib to link. On my JDK, they are
# in different directories...
f = dir.ant_glob('**/*jvm.(lib)')
if f:
libDirs = [[x, y.parent.abspath()] for x in libDirs for y in f]

for d in libDirs:
try:
conf.check(header_name='jni.h', define_name='HAVE_JNI_H', lib='jvm',
libpath=d, includes=incDirs, uselib_store='JAVA', uselib='JAVA')
except Exception:
pass
else:
break
else:
conf.fatal('could not find lib jvm in %r (see config.log)' % libDirs)



+ 0
- 90
waflib/Tools/kde4.py View File

@@ -1,90 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)

"""
Support for the KDE4 libraries and msgfmt
"""

import os, sys, re
from waflib import Options, TaskGen, Task, Utils
from waflib.TaskGen import feature, after_method

@feature('msgfmt')
def apply_msgfmt(self):
"""
Process all languages to create .mo files and to install them::

def build(bld):
bld(features='msgfmt', langs='es de fr', appname='myapp', install_path='${KDE4_LOCALE_INSTALL_DIR}')
"""
for lang in self.to_list(self.langs):
node = self.path.find_resource(lang+'.po')
task = self.create_task('msgfmt', node, node.change_ext('.mo'))

langname = lang.split('/')
langname = langname[-1]

inst = getattr(self, 'install_path', '${KDE4_LOCALE_INSTALL_DIR}')

self.bld.install_as(
inst + os.sep + langname + os.sep + 'LC_MESSAGES' + os.sep + getattr(self, 'appname', 'set_your_appname') + '.mo',
task.outputs[0],
chmod = getattr(self, 'chmod', Utils.O644))

class msgfmt(Task.Task):
"""
Transform .po files into .mo files
"""
color = 'BLUE'
run_str = '${MSGFMT} ${SRC} -o ${TGT}'

def configure(self):
"""
Detect kde4-config and set various variables for the *use* system::

def options(opt):
opt.load('compiler_cxx kde4')
def configure(conf):
conf.load('compiler_cxx kde4')
def build(bld):
bld.program(source='main.c', target='app', use='KDECORE KIO KHTML')
"""
kdeconfig = self.find_program('kde4-config')
prefix = self.cmd_and_log(kdeconfig + ['--prefix']).strip()
fname = '%s/share/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
try: os.stat(fname)
except OSError:
fname = '%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
try: os.stat(fname)
except OSError: self.fatal('could not open %s' % fname)

try:
txt = Utils.readf(fname)
except EnvironmentError:
self.fatal('could not read %s' % fname)

txt = txt.replace('\\\n', '\n')
fu = re.compile('#(.*)\n')
txt = fu.sub('', txt)

setregexp = re.compile('([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)')
found = setregexp.findall(txt)

for (_, key, val) in found:
#print key, val
self.env[key] = val

# well well, i could just write an interpreter for cmake files
self.env['LIB_KDECORE']= ['kdecore']
self.env['LIB_KDEUI'] = ['kdeui']
self.env['LIB_KIO'] = ['kio']
self.env['LIB_KHTML'] = ['khtml']
self.env['LIB_KPARTS'] = ['kparts']

self.env['LIBPATH_KDECORE'] = [os.path.join(self.env.KDE4_LIB_INSTALL_DIR, 'kde4', 'devel'), self.env.KDE4_LIB_INSTALL_DIR]
self.env['INCLUDES_KDECORE'] = [self.env['KDE4_INCLUDE_INSTALL_DIR']]
self.env.append_value('INCLUDES_KDECORE', [self.env['KDE4_INCLUDE_INSTALL_DIR']+ os.sep + 'KDE'])

self.find_program('msgfmt', var='MSGFMT')


+ 0
- 59
waflib/Tools/ldc2.py View File

@@ -1,59 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Alex Rønne Petersen, 2012 (alexrp/Zor)

import sys
from waflib.Tools import ar, d
from waflib.Configure import conf

@conf
def find_ldc2(conf):
"""
Find the program *ldc2* and set the variable *D*
"""

conf.find_program(['ldc2'], var='D')

out = conf.cmd_and_log(conf.env.D + ['-version'])
if out.find("based on DMD v2.") == -1:
conf.fatal("detected compiler is not ldc2")

@conf
def common_flags_ldc2(conf):
"""
Set the D flags required by *ldc2*
"""

v = conf.env

v['D_SRC_F'] = ['-c']
v['D_TGT_F'] = '-of%s'

v['D_LINKER'] = v['D']
v['DLNK_SRC_F'] = ''
v['DLNK_TGT_F'] = '-of%s'
v['DINC_ST'] = '-I%s'

v['DSHLIB_MARKER'] = v['DSTLIB_MARKER'] = ''
v['DSTLIB_ST'] = v['DSHLIB_ST'] = '-L-l%s'
v['DSTLIBPATH_ST'] = v['DLIBPATH_ST'] = '-L-L%s'

v['LINKFLAGS_dshlib'] = ['-L-shared']

v['DHEADER_ext'] = '.di'
v['DFLAGS_d_with_header'] = ['-H', '-Hf']
v['D_HDR_F'] = '%s'

v['LINKFLAGS'] = []
v['DFLAGS_dshlib'] = ['-relocation-model=pic']

def configure(conf):
"""
Configuration for *ldc2*
"""

conf.find_ldc2()
conf.load('ar')
conf.load('d')
conf.common_flags_ldc2()
conf.d_platform_flags()

+ 0
- 38
waflib/Tools/lua.py View File

@@ -1,38 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Sebastian Schlingmann, 2008
# Thomas Nagy, 2008-2010 (ita)

"""
Lua support.

Compile *.lua* files into *.luac*::

def configure(conf):
conf.load('lua')
conf.env.LUADIR = '/usr/local/share/myapp/scripts/'
def build(bld):
bld(source='foo.lua')
"""

from waflib.TaskGen import extension
from waflib import Task, Utils

@extension('.lua')
def add_lua(self, node):
tsk = self.create_task('luac', node, node.change_ext('.luac'))
inst_to = getattr(self, 'install_path', self.env.LUADIR and '${LUADIR}' or None)
if inst_to:
self.bld.install_files(inst_to, tsk.outputs)
return tsk

class luac(Task.Task):
run_str = '${LUAC} -s -o ${TGT} ${SRC}'
color = 'PINK'

def configure(conf):
"""
Detect the luac compiler and set *conf.env.LUAC*
"""
conf.find_program('luac', var='LUAC')


+ 0
- 26
waflib/Tools/nasm.py View File

@@ -1,26 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2008-2010 (ita)

"""
Nasm tool (asm processing)
"""

import os
import waflib.Tools.asm # leave this
from waflib.TaskGen import feature

@feature('asm')
def apply_nasm_vars(self):
"""provided for compatibility"""
self.env.append_value('ASFLAGS', self.to_list(getattr(self, 'nasm_flags', [])))

def configure(conf):
"""
Detect nasm/yasm and set the variable *AS*
"""
nasm = conf.find_program(['nasm', 'yasm'], var='AS')
conf.env.AS_TGT_F = ['-o']
conf.env.ASLNK_TGT_F = ['-o']
conf.load('asm')
conf.env.ASMPATH_ST = '-I%s' + os.sep

+ 0
- 163
waflib/Tools/perl.py View File

@@ -1,163 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# andersg at 0x63.nu 2007
# Thomas Nagy 2010 (ita)

"""
Support for Perl extensions. A C/C++ compiler is required::

def options(opt):
opt.load('compiler_c perl')
def configure(conf):
conf.load('compiler_c perl')
conf.check_perl_version((5,6,0))
conf.check_perl_ext_devel()
conf.check_perl_module('Cairo')
conf.check_perl_module('Devel::PPPort 4.89')
def build(bld):
bld(
features = 'c cshlib perlext',
source = 'Mytest.xs',
target = 'Mytest',
install_path = '${ARCHDIR_PERL}/auto')
bld.install_files('${ARCHDIR_PERL}', 'Mytest.pm')
"""

import os
from waflib import Task, Options, Utils
from waflib.Configure import conf
from waflib.TaskGen import extension, feature, before_method

@before_method('apply_incpaths', 'apply_link', 'propagate_uselib_vars')
@feature('perlext')
def init_perlext(self):
"""
Change the values of *cshlib_PATTERN* and *cxxshlib_PATTERN* to remove the
*lib* prefix from library names.
"""
self.uselib = self.to_list(getattr(self, 'uselib', []))
if not 'PERLEXT' in self.uselib: self.uselib.append('PERLEXT')
self.env['cshlib_PATTERN'] = self.env['cxxshlib_PATTERN'] = self.env['perlext_PATTERN']

@extension('.xs')
def xsubpp_file(self, node):
"""
Create :py:class:`waflib.Tools.perl.xsubpp` tasks to process *.xs* files
"""
outnode = node.change_ext('.c')
self.create_task('xsubpp', node, outnode)
self.source.append(outnode)

class xsubpp(Task.Task):
"""
Process *.xs* files
"""
run_str = '${PERL} ${XSUBPP} -noprototypes -typemap ${EXTUTILS_TYPEMAP} ${SRC} > ${TGT}'
color = 'BLUE'
ext_out = ['.h']

@conf
def check_perl_version(self, minver=None):
"""
Check if Perl is installed, and set the variable PERL.
minver is supposed to be a tuple
"""
res = True
if minver:
cver = '.'.join(map(str,minver))
else:
cver = ''

self.start_msg('Checking for minimum perl version %s' % cver)

perl = getattr(Options.options, 'perlbinary', None)

if not perl:
perl = self.find_program('perl', var='PERL')
if not perl:
self.end_msg("Perl not found", color="YELLOW")
return False

self.env['PERL'] = perl

version = self.cmd_and_log(self.env.PERL + ["-e", 'printf \"%vd\", $^V'])
if not version:
res = False
version = "Unknown"
elif not minver is None:
ver = tuple(map(int, version.split(".")))
if ver < minver:
res = False

self.end_msg(version, color=res and "GREEN" or "YELLOW")
return res

@conf
def check_perl_module(self, module):
"""
Check if specified perlmodule is installed.

The minimum version can be specified by specifying it after modulename
like this::

def configure(conf):
conf.check_perl_module("Some::Module 2.92")
"""
cmd = self.env.PERL + ['-e', 'use %s' % module]
self.start_msg('perl module %s' % module)
try:
r = self.cmd_and_log(cmd)
except Exception:
self.end_msg(False)
return None
self.end_msg(r or True)
return r

@conf
def check_perl_ext_devel(self):
"""
Check for configuration needed to build perl extensions.

Sets different xxx_PERLEXT variables in the environment.

Also sets the ARCHDIR_PERL variable useful as installation path,
which can be overridden by ``--with-perl-archdir`` option.
"""

env = self.env
perl = env.PERL
if not perl:
self.fatal('find perl first')

def cmd_perl_config(s):
return perl + ['-MConfig', '-e', 'print \"%s\"' % s]
def cfg_str(cfg):
return self.cmd_and_log(cmd_perl_config(cfg))
def cfg_lst(cfg):
return Utils.to_list(cfg_str(cfg))
def find_xsubpp():
for var in ('privlib', 'vendorlib'):
xsubpp = cfg_lst('$Config{%s}/ExtUtils/xsubpp$Config{exe_ext}' % var)
if xsubpp and os.path.isfile(xsubpp[0]):
return xsubpp
return self.find_program('xsubpp')

env['LINKFLAGS_PERLEXT'] = cfg_lst('$Config{lddlflags}')
env['INCLUDES_PERLEXT'] = cfg_lst('$Config{archlib}/CORE')
env['CFLAGS_PERLEXT'] = cfg_lst('$Config{ccflags} $Config{cccdlflags}')
env['EXTUTILS_TYPEMAP'] = cfg_lst('$Config{privlib}/ExtUtils/typemap')
env['XSUBPP'] = find_xsubpp()

if not getattr(Options.options, 'perlarchdir', None):
env['ARCHDIR_PERL'] = cfg_str('$Config{sitearch}')
else:
env['ARCHDIR_PERL'] = getattr(Options.options, 'perlarchdir')

env['perlext_PATTERN'] = '%s.' + cfg_str('$Config{dlext}')

def options(opt):
"""
Add the ``--with-perl-archdir`` and ``--with-perl-binary`` command-line options.
"""
opt.add_option('--with-perl-binary', type='string', dest='perlbinary', help = 'Specify alternate perl binary', default=None)
opt.add_option('--with-perl-archdir', type='string', dest='perlarchdir', help = 'Specify directory where to install arch specific files', default=None)

+ 0
- 594
waflib/Tools/python.py View File

@@ -1,594 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2007-2015 (ita)
# Gustavo Carneiro (gjc), 2007

"""
Support for Python, detect the headers and libraries and provide
*use* variables to link C/C++ programs against them::

def options(opt):
opt.load('compiler_c python')
def configure(conf):
conf.load('compiler_c python')
conf.check_python_version((2,4,2))
conf.check_python_headers()
def build(bld):
bld.program(features='pyembed', source='a.c', target='myprog')
bld.shlib(features='pyext', source='b.c', target='mylib')
"""

import os, sys
from waflib import Utils, Options, Errors, Logs, Task, Node
from waflib.TaskGen import extension, before_method, after_method, feature
from waflib.Configure import conf

FRAG = '''
#include <Python.h>
#ifdef __cplusplus
extern "C" {
#endif
void Py_Initialize(void);
void Py_Finalize(void);
#ifdef __cplusplus
}
#endif
int main(int argc, char **argv)
{
(void)argc; (void)argv;
Py_Initialize();
Py_Finalize();
return 0;
}
'''
"""
Piece of C/C++ code used in :py:func:`waflib.Tools.python.check_python_headers`
"""

INST = '''
import sys, py_compile
py_compile.compile(sys.argv[1], sys.argv[2], sys.argv[3], True)
'''
"""
Piece of Python code used in :py:func:`waflib.Tools.python.pytask` for byte-compiling python files
"""

DISTUTILS_IMP = ['from distutils.sysconfig import get_config_var, get_python_lib']

@before_method('process_source')
@feature('py')
def feature_py(self):
"""
Create tasks to byte-compile .py files and install them, if requested
"""
self.install_path = getattr(self, 'install_path', '${PYTHONDIR}')
install_from = getattr(self, 'install_from', None)
if install_from and not isinstance(install_from, Node.Node):
install_from = self.path.find_dir(install_from)
self.install_from = install_from

ver = self.env.PYTHON_VERSION
if not ver:
self.bld.fatal('Installing python files requires PYTHON_VERSION, try conf.check_python_version')

if int(ver.replace('.', '')) > 31:
self.install_32 = True

@extension('.py')
def process_py(self, node):
"""
Add signature of .py file, so it will be byte-compiled when necessary
"""
assert(node.get_bld_sig())
assert(getattr(self, 'install_path')), 'add features="py"'

# where to install the python file
if self.install_path:
if self.install_from:
self.bld.install_files(self.install_path, [node], cwd=self.install_from, relative_trick=True)
else:
self.bld.install_files(self.install_path, [node], relative_trick=True)

lst = []
if self.env.PYC:
lst.append('pyc')
if self.env.PYO:
lst.append('pyo')

if self.install_path:
if self.install_from:
pyd = Utils.subst_vars("%s/%s" % (self.install_path, node.path_from(self.install_from)), self.env)
else:
pyd = Utils.subst_vars("%s/%s" % (self.install_path, node.path_from(self.path)), self.env)
else:
pyd = node.abspath()

for ext in lst:
if self.env.PYTAG:
# __pycache__ installation for python 3.2 - PEP 3147
name = node.name[:-3]
pyobj = node.parent.get_bld().make_node('__pycache__').make_node("%s.%s.%s" % (name, self.env.PYTAG, ext))
pyobj.parent.mkdir()
else:
pyobj = node.change_ext(".%s" % ext)

tsk = self.create_task(ext, node, pyobj)
tsk.pyd = pyd

if self.install_path:
self.bld.install_files(os.path.dirname(pyd), pyobj, cwd=node.parent.get_bld(), relative_trick=True)

class pyc(Task.Task):
"""
Byte-compiling python files
"""
color = 'PINK'
def run(self):
cmd = [Utils.subst_vars('${PYTHON}', self.env), '-c', INST, self.inputs[0].abspath(), self.outputs[0].abspath(), self.pyd]
ret = self.generator.bld.exec_command(cmd)
return ret

class pyo(Task.Task):
"""
Byte-compiling python files
"""
color = 'PINK'
def run(self):
cmd = [Utils.subst_vars('${PYTHON}', self.env), Utils.subst_vars('${PYFLAGS_OPT}', self.env), '-c', INST, self.inputs[0].abspath(), self.outputs[0].abspath(), self.pyd]
ret = self.generator.bld.exec_command(cmd)
return ret

@feature('pyext')
@before_method('propagate_uselib_vars', 'apply_link')
@after_method('apply_bundle')
def init_pyext(self):
"""
Change the values of *cshlib_PATTERN* and *cxxshlib_PATTERN* to remove the
*lib* prefix from library names.
"""
self.uselib = self.to_list(getattr(self, 'uselib', []))
if not 'PYEXT' in self.uselib:
self.uselib.append('PYEXT')
# override shlib_PATTERN set by the osx module
self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.macbundle_PATTERN = self.env.pyext_PATTERN
self.env.fcshlib_PATTERN = self.env.dshlib_PATTERN = self.env.pyext_PATTERN

try:
if not self.install_path:
return
except AttributeError:
self.install_path = '${PYTHONARCHDIR}'

@feature('pyext')
@before_method('apply_link', 'apply_bundle')
def set_bundle(self):
"""Mac-specific pyext extension that enables bundles from c_osx.py"""
if Utils.unversioned_sys_platform() == 'darwin':
self.mac_bundle = True

@before_method('propagate_uselib_vars')
@feature('pyembed')
def init_pyembed(self):
"""
Add the PYEMBED variable.
"""
self.uselib = self.to_list(getattr(self, 'uselib', []))
if not 'PYEMBED' in self.uselib:
self.uselib.append('PYEMBED')

@conf
def get_python_variables(self, variables, imports=None):
"""
Spawn a new python process to dump configuration variables

:param variables: variables to print
:type variables: list of string
:param imports: one import by element
:type imports: list of string
:return: the variable values
:rtype: list of string
"""
if not imports:
try:
imports = self.python_imports
except AttributeError:
imports = DISTUTILS_IMP

program = list(imports) # copy
program.append('')
for v in variables:
program.append("print(repr(%s))" % v)
os_env = dict(os.environ)
try:
del os_env['MACOSX_DEPLOYMENT_TARGET'] # see comments in the OSX tool
except KeyError:
pass

try:
out = self.cmd_and_log(self.env.PYTHON + ['-c', '\n'.join(program)], env=os_env)
except Errors.WafError:
self.fatal('The distutils module is unusable: install "python-devel"?')
self.to_log(out)
return_values = []
for s in out.splitlines():
s = s.strip()
if not s:
continue
if s == 'None':
return_values.append(None)
elif (s[0] == "'" and s[-1] == "'") or (s[0] == '"' and s[-1] == '"'):
return_values.append(eval(s))
elif s[0].isdigit():
return_values.append(int(s))
else: break
return return_values

@conf
def python_cross_compile(self, features='pyembed pyext'):
"""
For cross-compilation purposes, it is possible to bypass the normal detection and set the flags that you want:
PYTHON_VERSION='3.4' PYTAG='cpython34' pyext_PATTERN="%s.so" PYTHON_LDFLAGS='-lpthread -ldl' waf configure

The following variables are used:
PYTHON_VERSION required
PYTAG required
PYTHON_LDFLAGS required
pyext_PATTERN required
PYTHON_PYEXT_LDFLAGS
PYTHON_PYEMBED_LDFLAGS
"""
features = Utils.to_list(features)
if not ('PYTHON_LDFLAGS' in self.environ or 'PYTHON_PYEXT_LDFLAGS' in self.environ or 'PYTHON_PYEMBED_LDFLAGS' in self.environ):
return False

for x in 'PYTHON_VERSION PYTAG pyext_PATTERN'.split():
if not x in self.environ:
self.fatal('Please set %s in the os environment' % x)
else:
self.env[x] = self.environ[x]

xx = self.env.CXX_NAME and 'cxx' or 'c'
if 'pyext' in features:
flags = self.environ.get('PYTHON_PYEXT_LDFLAGS', self.environ.get('PYTHON_LDFLAGS', None))
if flags is None:
self.fatal('No flags provided through PYTHON_PYEXT_LDFLAGS as required')
else:
self.parse_flags(flags, 'PYEXT')

self.check(header_name='Python.h', define_name='HAVE_PYEXT', msg='Testing pyext configuration',
features='%s %sshlib pyext' % (xx, xx), fragment=FRAG, errmsg='Could not build python extensions')
if 'pyembed' in features:
flags = self.environ.get('PYTHON_PYEMBED_LDFLAGS', self.environ.get('PYTHON_LDFLAGS', None))
if flags is None:
self.fatal('No flags provided through PYTHON_PYEMBED_LDFLAGS as required')
else:
self.parse_flags(flags, 'PYEMBED')
self.check(header_name='Python.h', define_name='HAVE_PYEMBED', msg='Testing pyembed configuration',
fragment=FRAG, errmsg='Could not build a python embedded interpreter', features='%s %sprogram pyembed' % (xx, xx))
return True

@conf
def check_python_headers(conf, features='pyembed pyext'):
"""
Check for headers and libraries necessary to extend or embed python by using the module *distutils*.
On success the environment variables xxx_PYEXT and xxx_PYEMBED are added:

* PYEXT: for compiling python extensions
* PYEMBED: for embedding a python interpreter
"""
features = Utils.to_list(features)
assert ('pyembed' in features) or ('pyext' in features), "check_python_headers features must include 'pyembed' and/or 'pyext'"
env = conf.env
if not env['CC_NAME'] and not env['CXX_NAME']:
conf.fatal('load a compiler first (gcc, g++, ..)')

# bypass all the code below for cross-compilation
if conf.python_cross_compile(features):
return

if not env['PYTHON_VERSION']:
conf.check_python_version()

pybin = env.PYTHON
if not pybin:
conf.fatal('Could not find the python executable')

# so we actually do all this for compatibility reasons and for obtaining pyext_PATTERN below
v = 'prefix SO LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS LDVERSION'.split()
try:
lst = conf.get_python_variables(["get_config_var('%s') or ''" % x for x in v])
except RuntimeError:
conf.fatal("Python development headers not found (-v for details).")

vals = ['%s = %r' % (x, y) for (x, y) in zip(v, lst)]
conf.to_log("Configuration returned from %r:\n%s\n" % (pybin, '\n'.join(vals)))

dct = dict(zip(v, lst))
x = 'MACOSX_DEPLOYMENT_TARGET'
if dct[x]:
env[x] = conf.environ[x] = dct[x]
env['pyext_PATTERN'] = '%s' + dct['SO'] # not a mistake


# Try to get pythonX.Y-config
num = '.'.join(env['PYTHON_VERSION'].split('.')[:2])
conf.find_program([''.join(pybin) + '-config', 'python%s-config' % num, 'python-config-%s' % num, 'python%sm-config' % num], var='PYTHON_CONFIG', msg="python-config", mandatory=False)

if env.PYTHON_CONFIG:
# python2.6-config requires 3 runs
all_flags = [['--cflags', '--libs', '--ldflags']]
if sys.hexversion < 0x2070000:
all_flags = [[k] for k in all_flags[0]]

xx = env.CXX_NAME and 'cxx' or 'c'

if 'pyembed' in features:
for flags in all_flags:
conf.check_cfg(msg='Asking python-config for pyembed %r flags' % ' '.join(flags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEMBED', args=flags)

conf.check(header_name='Python.h', define_name='HAVE_PYEMBED', msg='Getting pyembed flags from python-config',
fragment=FRAG, errmsg='Could not build a python embedded interpreter',
features='%s %sprogram pyembed' % (xx, xx))

if 'pyext' in features:
for flags in all_flags:
conf.check_cfg(msg='Asking python-config for pyext %r flags' % ' '.join(flags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEXT', args=flags)

conf.check(header_name='Python.h', define_name='HAVE_PYEXT', msg='Getting pyext flags from python-config',
features='%s %sshlib pyext' % (xx, xx), fragment=FRAG, errmsg='Could not build python extensions')

conf.define('HAVE_PYTHON_H', 1)
return

# No python-config, do something else on windows systems
all_flags = dct['LDFLAGS'] + ' ' + dct['CFLAGS']
conf.parse_flags(all_flags, 'PYEMBED')

all_flags = dct['LDFLAGS'] + ' ' + dct['LDSHARED'] + ' ' + dct['CFLAGS']
conf.parse_flags(all_flags, 'PYEXT')

result = None
if not dct["LDVERSION"]:
dct["LDVERSION"] = env['PYTHON_VERSION']

# further simplification will be complicated
for name in ('python' + dct['LDVERSION'], 'python' + env['PYTHON_VERSION'] + 'm', 'python' + env['PYTHON_VERSION'].replace('.', '')):

# LIBPATH_PYEMBED is already set; see if it works.
if not result and env['LIBPATH_PYEMBED']:
path = env['LIBPATH_PYEMBED']
conf.to_log("\n\n# Trying default LIBPATH_PYEMBED: %r\n" % path)
result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in LIBPATH_PYEMBED' % name)

if not result and dct['LIBDIR']:
path = [dct['LIBDIR']]
conf.to_log("\n\n# try again with -L$python_LIBDIR: %r\n" % path)
result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in LIBDIR' % name)

if not result and dct['LIBPL']:
path = [dct['LIBPL']]
conf.to_log("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n")
result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in python_LIBPL' % name)

if not result:
path = [os.path.join(dct['prefix'], "libs")]
conf.to_log("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n")
result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in $prefix/libs' % name)

if result:
break # do not forget to set LIBPATH_PYEMBED

if result:
env['LIBPATH_PYEMBED'] = path
env.append_value('LIB_PYEMBED', [name])
else:
conf.to_log("\n\n### LIB NOT FOUND\n")

# under certain conditions, python extensions must link to
# python libraries, not just python embedding programs.
if Utils.is_win32 or dct['Py_ENABLE_SHARED']:
env['LIBPATH_PYEXT'] = env['LIBPATH_PYEMBED']
env['LIB_PYEXT'] = env['LIB_PYEMBED']

conf.to_log("Include path for Python extensions (found via distutils module): %r\n" % (dct['INCLUDEPY'],))
env['INCLUDES_PYEXT'] = [dct['INCLUDEPY']]
env['INCLUDES_PYEMBED'] = [dct['INCLUDEPY']]

# Code using the Python API needs to be compiled with -fno-strict-aliasing
if env['CC_NAME'] == 'gcc':
env.append_value('CFLAGS_PYEMBED', ['-fno-strict-aliasing'])
env.append_value('CFLAGS_PYEXT', ['-fno-strict-aliasing'])
if env['CXX_NAME'] == 'gcc':
env.append_value('CXXFLAGS_PYEMBED', ['-fno-strict-aliasing'])
env.append_value('CXXFLAGS_PYEXT', ['-fno-strict-aliasing'])

if env.CC_NAME == "msvc":
from distutils.msvccompiler import MSVCCompiler
dist_compiler = MSVCCompiler()
dist_compiler.initialize()
env.append_value('CFLAGS_PYEXT', dist_compiler.compile_options)
env.append_value('CXXFLAGS_PYEXT', dist_compiler.compile_options)
env.append_value('LINKFLAGS_PYEXT', dist_compiler.ldflags_shared)

# See if it compiles
conf.check(header_name='Python.h', define_name='HAVE_PYTHON_H', uselib='PYEMBED', fragment=FRAG, errmsg='Distutils not installed? Broken python installation? Get python-config now!')

@conf
def check_python_version(conf, minver=None):
"""
Check if the python interpreter is found matching a given minimum version.
minver should be a tuple, eg. to check for python >= 2.4.2 pass (2,4,2) as minver.

If successful, PYTHON_VERSION is defined as 'MAJOR.MINOR'
(eg. '2.4') of the actual python version found, and PYTHONDIR is
defined, pointing to the site-packages directory appropriate for
this python version, where modules/packages/extensions should be
installed.

:param minver: minimum version
:type minver: tuple of int
"""
assert minver is None or isinstance(minver, tuple)
pybin = conf.env['PYTHON']
if not pybin:
conf.fatal('could not find the python executable')

# Get python version string
cmd = pybin + ['-c', 'import sys\nfor x in sys.version_info: print(str(x))']
Logs.debug('python: Running python command %r' % cmd)
lines = conf.cmd_and_log(cmd).split()
assert len(lines) == 5, "found %i lines, expected 5: %r" % (len(lines), lines)
pyver_tuple = (int(lines[0]), int(lines[1]), int(lines[2]), lines[3], int(lines[4]))

# compare python version with the minimum required
result = (minver is None) or (pyver_tuple >= minver)

if result:
# define useful environment variables
pyver = '.'.join([str(x) for x in pyver_tuple[:2]])
conf.env['PYTHON_VERSION'] = pyver

if 'PYTHONDIR' in conf.env:
# Check if --pythondir was specified
pydir = conf.env['PYTHONDIR']
elif 'PYTHONDIR' in conf.environ:
# Check environment for PYTHONDIR
pydir = conf.environ['PYTHONDIR']
else:
# Finally, try to guess
if Utils.is_win32:
(python_LIBDEST, pydir) = conf.get_python_variables(
["get_config_var('LIBDEST') or ''",
"get_python_lib(standard_lib=0) or ''"])
else:
python_LIBDEST = None
(pydir,) = conf.get_python_variables( ["get_python_lib(standard_lib=0, prefix='%s') or ''" % conf.env['PREFIX']])
if python_LIBDEST is None:
if conf.env['LIBDIR']:
python_LIBDEST = os.path.join(conf.env['LIBDIR'], "python" + pyver)
else:
python_LIBDEST = os.path.join(conf.env['PREFIX'], "lib", "python" + pyver)

if 'PYTHONARCHDIR' in conf.env:
# Check if --pythonarchdir was specified
pyarchdir = conf.env['PYTHONARCHDIR']
elif 'PYTHONARCHDIR' in conf.environ:
# Check environment for PYTHONDIR
pyarchdir = conf.environ['PYTHONARCHDIR']
else:
# Finally, try to guess
(pyarchdir, ) = conf.get_python_variables( ["get_python_lib(plat_specific=1, standard_lib=0, prefix='%s') or ''" % conf.env['PREFIX']])
if not pyarchdir:
pyarchdir = pydir

if hasattr(conf, 'define'): # conf.define is added by the C tool, so may not exist
conf.define('PYTHONDIR', pydir)
conf.define('PYTHONARCHDIR', pyarchdir)

conf.env['PYTHONDIR'] = pydir
conf.env['PYTHONARCHDIR'] = pyarchdir

# Feedback
pyver_full = '.'.join(map(str, pyver_tuple[:3]))
if minver is None:
conf.msg('Checking for python version', pyver_full)
else:
minver_str = '.'.join(map(str, minver))
conf.msg('Checking for python version', pyver_tuple, ">= %s" % (minver_str,) and 'GREEN' or 'YELLOW')

if not result:
conf.fatal('The python version is too old, expecting %r' % (minver,))

PYTHON_MODULE_TEMPLATE = '''
import %s as current_module
version = getattr(current_module, '__version__', None)
if version is not None:
print(str(version))
else:
print('unknown version')
'''

@conf
def check_python_module(conf, module_name, condition=''):
"""
Check if the selected python interpreter can import the given python module::

def configure(conf):
conf.check_python_module('pygccxml')
conf.check_python_module('re', condition="ver > num(2, 0, 4) and ver <= num(3, 0, 0)")

:param module_name: module
:type module_name: string
"""
msg = "Checking for python module '%s'" % module_name
if condition:
msg = '%s (%s)' % (msg, condition)
conf.start_msg(msg)
try:
ret = conf.cmd_and_log(conf.env['PYTHON'] + ['-c', PYTHON_MODULE_TEMPLATE % module_name])
except Exception:
conf.end_msg(False)
conf.fatal('Could not find the python module %r' % module_name)

ret = ret.strip()
if condition:
conf.end_msg(ret)
if ret == 'unknown version':
conf.fatal('Could not check the %s version' % module_name)

from distutils.version import LooseVersion
def num(*k):
if isinstance(k[0], int):
return LooseVersion('.'.join([str(x) for x in k]))
else:
return LooseVersion(k[0])
d = {'num': num, 'ver': LooseVersion(ret)}
ev = eval(condition, {}, d)
if not ev:
conf.fatal('The %s version does not satisfy the requirements' % module_name)
else:
if ret == 'unknown version':
conf.end_msg(True)
else:
conf.end_msg(ret)

def configure(conf):
"""
Detect the python interpreter
"""
v = conf.env
v['PYTHON'] = Options.options.python or os.environ.get('PYTHON', sys.executable)
if Options.options.pythondir:
v['PYTHONDIR'] = Options.options.pythondir
if Options.options.pythonarchdir:
v['PYTHONARCHDIR'] = Options.options.pythonarchdir

conf.find_program('python', var='PYTHON')

v['PYFLAGS'] = ''
v['PYFLAGS_OPT'] = '-O'

v['PYC'] = getattr(Options.options, 'pyc', 1)
v['PYO'] = getattr(Options.options, 'pyo', 1)

try:
v.PYTAG = conf.cmd_and_log(conf.env.PYTHON + ['-c', "import imp;print(imp.get_tag())"]).strip()
except Errors.WafError:
pass

def options(opt):
"""
Add python-specific options
"""
pyopt=opt.add_option_group("Python Options")
pyopt.add_option('--nopyc', dest = 'pyc', action='store_false', default=1,
help = 'Do not install bytecode compiled .pyc files (configuration) [Default:install]')
pyopt.add_option('--nopyo', dest='pyo', action='store_false', default=1,
help='Do not install optimised compiled .pyo files (configuration) [Default:install]')
pyopt.add_option('--python', dest="python",
help='python binary to be used [Default: %s]' % sys.executable)
pyopt.add_option('--pythondir', dest='pythondir',
help='Installation path for python modules (py, platform-independent .py and .pyc files)')
pyopt.add_option('--pythonarchdir', dest='pythonarchdir',
help='Installation path for python extension (pyext, platform-dependent .so or .dylib files)')


+ 0
- 692
waflib/Tools/qt4.py View File

@@ -1,692 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)

"""

Tool Description
================

This tool helps with finding Qt4 tools and libraries,
and also provides syntactic sugar for using Qt4 tools.

The following snippet illustrates the tool usage::

def options(opt):
opt.load('compiler_cxx qt4')

def configure(conf):
conf.load('compiler_cxx qt4')

def build(bld):
bld(
features = 'qt4 cxx cxxprogram',
uselib = 'QTCORE QTGUI QTOPENGL QTSVG',
source = 'main.cpp textures.qrc aboutDialog.ui',
target = 'window',
)

Here, the UI description and resource files will be processed
to generate code.

Usage
=====

Load the "qt4" tool.

You also need to edit your sources accordingly:

- the normal way of doing things is to have your C++ files
include the .moc file.
This is regarded as the best practice (and provides much faster
compilations).
It also implies that the include paths have beenset properly.

- to have the include paths added automatically, use the following::

from waflib.TaskGen import feature, before_method, after_method
@feature('cxx')
@after_method('process_source')
@before_method('apply_incpaths')
def add_includes_paths(self):
incs = set(self.to_list(getattr(self, 'includes', '')))
for x in self.compiled_tasks:
incs.add(x.inputs[0].parent.path_from(self.path))
self.includes = list(incs)

Note: another tool provides Qt processing that does not require
.moc includes, see 'playground/slow_qt/'.

A few options (--qt{dir,bin,...}) and environment variables
(QT4_{ROOT,DIR,MOC,UIC,XCOMPILE}) allow finer tuning of the tool,
tool path selection, etc; please read the source for more info.

"""

try:
from xml.sax import make_parser
from xml.sax.handler import ContentHandler
except ImportError:
has_xml = False
ContentHandler = object
else:
has_xml = True

import os, sys
from waflib.Tools import cxx
from waflib import Task, Utils, Options, Errors, Context
from waflib.TaskGen import feature, after_method, extension
from waflib.Configure import conf
from waflib import Logs

MOC_H = ['.h', '.hpp', '.hxx', '.hh']
"""
File extensions associated to the .moc files
"""

EXT_RCC = ['.qrc']
"""
File extension for the resource (.qrc) files
"""

EXT_UI = ['.ui']
"""
File extension for the user interface (.ui) files
"""

EXT_QT4 = ['.cpp', '.cc', '.cxx', '.C']
"""
File extensions of C++ files that may require a .moc processing
"""

QT4_LIBS = "QtCore QtGui QtUiTools QtNetwork QtOpenGL QtSql QtSvg QtTest QtXml QtXmlPatterns QtWebKit Qt3Support QtHelp QtScript QtDeclarative QtDesigner"

class qxx(Task.classes['cxx']):
"""
Each C++ file can have zero or several .moc files to create.
They are known only when the files are scanned (preprocessor)
To avoid scanning the c++ files each time (parsing C/C++), the results
are retrieved from the task cache (bld.node_deps/bld.raw_deps).
The moc tasks are also created *dynamically* during the build.
"""

def __init__(self, *k, **kw):
Task.Task.__init__(self, *k, **kw)
self.moc_done = 0

def runnable_status(self):
"""
Compute the task signature to make sure the scanner was executed. Create the
moc tasks by using :py:meth:`waflib.Tools.qt4.qxx.add_moc_tasks` (if necessary),
then postpone the task execution (there is no need to recompute the task signature).
"""
if self.moc_done:
return Task.Task.runnable_status(self)
else:
for t in self.run_after:
if not t.hasrun:
return Task.ASK_LATER
self.add_moc_tasks()
return Task.Task.runnable_status(self)

def create_moc_task(self, h_node, m_node):
"""
If several libraries use the same classes, it is possible that moc will run several times (Issue 1318)
It is not possible to change the file names, but we can assume that the moc transformation will be identical,
and the moc tasks can be shared in a global cache.

The defines passed to moc will then depend on task generator order. If this is not acceptable, then
use the tool slow_qt4 instead (and enjoy the slow builds... :-( )
"""
try:
moc_cache = self.generator.bld.moc_cache
except AttributeError:
moc_cache = self.generator.bld.moc_cache = {}

try:
return moc_cache[h_node]
except KeyError:
tsk = moc_cache[h_node] = Task.classes['moc'](env=self.env, generator=self.generator)
tsk.set_inputs(h_node)
tsk.set_outputs(m_node)

if self.generator:
self.generator.tasks.append(tsk)

# direct injection in the build phase (safe because called from the main thread)
gen = self.generator.bld.producer
gen.outstanding.insert(0, tsk)
gen.total += 1

return tsk

def moc_h_ext(self):
try:
ext = Options.options.qt_header_ext.split()
except AttributeError:
pass
if not ext:
ext = MOC_H
return ext

def add_moc_tasks(self):
"""
Create the moc tasks by looking in ``bld.raw_deps[self.uid()]``
"""
node = self.inputs[0]
bld = self.generator.bld

try:
# compute the signature once to know if there is a moc file to create
self.signature()
except KeyError:
# the moc file may be referenced somewhere else
pass
else:
# remove the signature, it must be recomputed with the moc task
delattr(self, 'cache_sig')

include_nodes = [node.parent] + self.generator.includes_nodes

moctasks = []
mocfiles = set([])
for d in bld.raw_deps.get(self.uid(), []):
if not d.endswith('.moc'):
continue

# process that base.moc only once
if d in mocfiles:
continue
mocfiles.add(d)

# find the source associated with the moc file
h_node = None

base2 = d[:-4]
for x in include_nodes:
for e in self.moc_h_ext():
h_node = x.find_node(base2 + e)
if h_node:
break
if h_node:
m_node = h_node.change_ext('.moc')
break
else:
# foo.cpp -> foo.cpp.moc
for k in EXT_QT4:
if base2.endswith(k):
for x in include_nodes:
h_node = x.find_node(base2)
if h_node:
break
if h_node:
m_node = h_node.change_ext(k + '.moc')
break

if not h_node:
raise Errors.WafError('No source found for %r which is a moc file' % d)

# create the moc task
task = self.create_moc_task(h_node, m_node)
moctasks.append(task)

# simple scheduler dependency: run the moc task before others
self.run_after.update(set(moctasks))
self.moc_done = 1

class trans_update(Task.Task):
"""Update a .ts files from a list of C++ files"""
run_str = '${QT_LUPDATE} ${SRC} -ts ${TGT}'
color = 'BLUE'
Task.update_outputs(trans_update)

class XMLHandler(ContentHandler):
"""
Parser for *.qrc* files
"""
def __init__(self):
self.buf = []
self.files = []
def startElement(self, name, attrs):
if name == 'file':
self.buf = []
def endElement(self, name):
if name == 'file':
self.files.append(str(''.join(self.buf)))
def characters(self, cars):
self.buf.append(cars)

@extension(*EXT_RCC)
def create_rcc_task(self, node):
"Create rcc and cxx tasks for *.qrc* files"
rcnode = node.change_ext('_rc.cpp')
self.create_task('rcc', node, rcnode)
cpptask = self.create_task('cxx', rcnode, rcnode.change_ext('.o'))
try:
self.compiled_tasks.append(cpptask)
except AttributeError:
self.compiled_tasks = [cpptask]
return cpptask

@extension(*EXT_UI)
def create_uic_task(self, node):
"hook for uic tasks"
uictask = self.create_task('ui4', node)
uictask.outputs = [self.path.find_or_declare(self.env['ui_PATTERN'] % node.name[:-3])]

@extension('.ts')
def add_lang(self, node):
"""add all the .ts file into self.lang"""
self.lang = self.to_list(getattr(self, 'lang', [])) + [node]

@feature('qt4')
@after_method('apply_link')
def apply_qt4(self):
"""
Add MOC_FLAGS which may be necessary for moc::

def build(bld):
bld.program(features='qt4', source='main.cpp', target='app', use='QTCORE')

The additional parameters are:

:param lang: list of translation files (\*.ts) to process
:type lang: list of :py:class:`waflib.Node.Node` or string without the .ts extension
:param update: whether to process the C++ files to update the \*.ts files (use **waf --translate**)
:type update: bool
:param langname: if given, transform the \*.ts files into a .qrc files to include in the binary file
:type langname: :py:class:`waflib.Node.Node` or string without the .qrc extension
"""
if getattr(self, 'lang', None):
qmtasks = []
for x in self.to_list(self.lang):
if isinstance(x, str):
x = self.path.find_resource(x + '.ts')
qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.qm')))

if getattr(self, 'update', None) and Options.options.trans_qt4:
cxxnodes = [a.inputs[0] for a in self.compiled_tasks] + [
a.inputs[0] for a in self.tasks if getattr(a, 'inputs', None) and a.inputs[0].name.endswith('.ui')]
for x in qmtasks:
self.create_task('trans_update', cxxnodes, x.inputs)

if getattr(self, 'langname', None):
qmnodes = [x.outputs[0] for x in qmtasks]
rcnode = self.langname
if isinstance(rcnode, str):
rcnode = self.path.find_or_declare(rcnode + '.qrc')
t = self.create_task('qm2rcc', qmnodes, rcnode)
k = create_rcc_task(self, t.outputs[0])
self.link_task.inputs.append(k.outputs[0])

lst = []
for flag in self.to_list(self.env['CXXFLAGS']):
if len(flag) < 2: continue
f = flag[0:2]
if f in ('-D', '-I', '/D', '/I'):
if (f[0] == '/'):
lst.append('-' + flag[1:])
else:
lst.append(flag)
self.env.append_value('MOC_FLAGS', lst)

@extension(*EXT_QT4)
def cxx_hook(self, node):
"""
Re-map C++ file extensions to the :py:class:`waflib.Tools.qt4.qxx` task.
"""
return self.create_compiled_task('qxx', node)

class rcc(Task.Task):
"""
Process *.qrc* files
"""
color = 'BLUE'
run_str = '${QT_RCC} -name ${tsk.rcname()} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}'
ext_out = ['.h']

def rcname(self):
return os.path.splitext(self.inputs[0].name)[0]

def scan(self):
"""Parse the *.qrc* files"""
if not has_xml:
Logs.error('no xml support was found, the rcc dependencies will be incomplete!')
return ([], [])

parser = make_parser()
curHandler = XMLHandler()
parser.setContentHandler(curHandler)
fi = open(self.inputs[0].abspath(), 'r')
try:
parser.parse(fi)
finally:
fi.close()

nodes = []
names = []
root = self.inputs[0].parent
for x in curHandler.files:
nd = root.find_resource(x)
if nd: nodes.append(nd)
else: names.append(x)
return (nodes, names)

class moc(Task.Task):
"""
Create *.moc* files
"""
color = 'BLUE'
run_str = '${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}'
def keyword(self):
return "Creating"
def __str__(self):
return self.outputs[0].path_from(self.generator.bld.launch_node())

class ui4(Task.Task):
"""
Process *.ui* files
"""
color = 'BLUE'
run_str = '${QT_UIC} ${SRC} -o ${TGT}'
ext_out = ['.h']

class ts2qm(Task.Task):
"""
Create *.qm* files from *.ts* files
"""
color = 'BLUE'
run_str = '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}'

class qm2rcc(Task.Task):
"""
Transform *.qm* files into *.rc* files
"""
color = 'BLUE'
after = 'ts2qm'

def run(self):
"""Create a qrc file including the inputs"""
txt = '\n'.join(['<file>%s</file>' % k.path_from(self.outputs[0].parent) for k in self.inputs])
code = '<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n%s\n</qresource>\n</RCC>' % txt
self.outputs[0].write(code)

def configure(self):
"""
Besides the configuration options, the environment variable QT4_ROOT may be used
to give the location of the qt4 libraries (absolute path).

The detection will use the program *pkg-config* through :py:func:`waflib.Tools.config_c.check_cfg`
"""
self.find_qt4_binaries()
self.set_qt4_libs_to_check()
self.set_qt4_defines()
self.find_qt4_libraries()
self.add_qt4_rpath()
self.simplify_qt4_libs()

@conf
def find_qt4_binaries(self):
env = self.env
opt = Options.options

qtdir = getattr(opt, 'qtdir', '')
qtbin = getattr(opt, 'qtbin', '')

paths = []

if qtdir:
qtbin = os.path.join(qtdir, 'bin')

# the qt directory has been given from QT4_ROOT - deduce the qt binary path
if not qtdir:
qtdir = os.environ.get('QT4_ROOT', '')
qtbin = os.environ.get('QT4_BIN', None) or os.path.join(qtdir, 'bin')

if qtbin:
paths = [qtbin]

# no qtdir, look in the path and in /usr/local/Trolltech
if not qtdir:
paths = os.environ.get('PATH', '').split(os.pathsep)
paths.append('/usr/share/qt4/bin/')
try:
lst = Utils.listdir('/usr/local/Trolltech/')
except OSError:
pass
else:
if lst:
lst.sort()
lst.reverse()

# keep the highest version
qtdir = '/usr/local/Trolltech/%s/' % lst[0]
qtbin = os.path.join(qtdir, 'bin')
paths.append(qtbin)

# at the end, try to find qmake in the paths given
# keep the one with the highest version
cand = None
prev_ver = ['4', '0', '0']
for qmk in ('qmake-qt4', 'qmake4', 'qmake'):
try:
qmake = self.find_program(qmk, path_list=paths)
except self.errors.ConfigurationError:
pass
else:
try:
version = self.cmd_and_log(qmake + ['-query', 'QT_VERSION']).strip()
except self.errors.WafError:
pass
else:
if version:
new_ver = version.split('.')
if new_ver > prev_ver:
cand = qmake
prev_ver = new_ver
if cand:
self.env.QMAKE = cand
else:
self.fatal('Could not find qmake for qt4')

qtbin = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_BINS']).strip() + os.sep

def find_bin(lst, var):
if var in env:
return
for f in lst:
try:
ret = self.find_program(f, path_list=paths)
except self.errors.ConfigurationError:
pass
else:
env[var]=ret
break

find_bin(['uic-qt3', 'uic3'], 'QT_UIC3')
find_bin(['uic-qt4', 'uic'], 'QT_UIC')
if not env.QT_UIC:
self.fatal('cannot find the uic compiler for qt4')

self.start_msg('Checking for uic version')
uicver = self.cmd_and_log(env.QT_UIC + ["-version"], output=Context.BOTH)
uicver = ''.join(uicver).strip()
uicver = uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt', '')
self.end_msg(uicver)
if uicver.find(' 3.') != -1:
self.fatal('this uic compiler is for qt3, add uic for qt4 to your path')

find_bin(['moc-qt4', 'moc'], 'QT_MOC')
find_bin(['rcc-qt4', 'rcc'], 'QT_RCC')
find_bin(['lrelease-qt4', 'lrelease'], 'QT_LRELEASE')
find_bin(['lupdate-qt4', 'lupdate'], 'QT_LUPDATE')

env['UIC3_ST']= '%s -o %s'
env['UIC_ST'] = '%s -o %s'
env['MOC_ST'] = '-o'
env['ui_PATTERN'] = 'ui_%s.h'
env['QT_LRELEASE_FLAGS'] = ['-silent']
env.MOCCPPPATH_ST = '-I%s'
env.MOCDEFINES_ST = '-D%s'

@conf
def find_qt4_libraries(self):
qtlibs = getattr(Options.options, 'qtlibs', None) or os.environ.get("QT4_LIBDIR", None)
if not qtlibs:
try:
qtlibs = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_LIBS']).strip()
except Errors.WafError:
qtdir = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_PREFIX']).strip() + os.sep
qtlibs = os.path.join(qtdir, 'lib')
self.msg('Found the Qt4 libraries in', qtlibs)

qtincludes = os.environ.get("QT4_INCLUDES", None) or self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_HEADERS']).strip()
env = self.env
if not 'PKG_CONFIG_PATH' in os.environ:
os.environ['PKG_CONFIG_PATH'] = '%s:%s/pkgconfig:/usr/lib/qt4/lib/pkgconfig:/opt/qt4/lib/pkgconfig:/usr/lib/qt4/lib:/opt/qt4/lib' % (qtlibs, qtlibs)

try:
if os.environ.get("QT4_XCOMPILE", None):
raise self.errors.ConfigurationError()
self.check_cfg(atleast_pkgconfig_version='0.1')
except self.errors.ConfigurationError:
for i in self.qt4_vars:
uselib = i.upper()
if Utils.unversioned_sys_platform() == "darwin":
# Since at least qt 4.7.3 each library locates in separate directory
frameworkName = i + ".framework"
qtDynamicLib = os.path.join(qtlibs, frameworkName, i)
if os.path.exists(qtDynamicLib):
env.append_unique('FRAMEWORK_' + uselib, i)
self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN')
else:
self.msg('Checking for %s' % i, False, 'YELLOW')
env.append_unique('INCLUDES_' + uselib, os.path.join(qtlibs, frameworkName, 'Headers'))
elif env.DEST_OS != "win32":
qtDynamicLib = os.path.join(qtlibs, "lib" + i + ".so")
qtStaticLib = os.path.join(qtlibs, "lib" + i + ".a")
if os.path.exists(qtDynamicLib):
env.append_unique('LIB_' + uselib, i)
self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN')
elif os.path.exists(qtStaticLib):
env.append_unique('LIB_' + uselib, i)
self.msg('Checking for %s' % i, qtStaticLib, 'GREEN')
else:
self.msg('Checking for %s' % i, False, 'YELLOW')

env.append_unique('LIBPATH_' + uselib, qtlibs)
env.append_unique('INCLUDES_' + uselib, qtincludes)
env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i))
else:
# Release library names are like QtCore4
for k in ("lib%s.a", "lib%s4.a", "%s.lib", "%s4.lib"):
lib = os.path.join(qtlibs, k % i)
if os.path.exists(lib):
env.append_unique('LIB_' + uselib, i + k[k.find("%s") + 2 : k.find('.')])
self.msg('Checking for %s' % i, lib, 'GREEN')
break
else:
self.msg('Checking for %s' % i, False, 'YELLOW')

env.append_unique('LIBPATH_' + uselib, qtlibs)
env.append_unique('INCLUDES_' + uselib, qtincludes)
env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i))

# Debug library names are like QtCore4d
uselib = i.upper() + "_debug"
for k in ("lib%sd.a", "lib%sd4.a", "%sd.lib", "%sd4.lib"):
lib = os.path.join(qtlibs, k % i)
if os.path.exists(lib):
env.append_unique('LIB_' + uselib, i + k[k.find("%s") + 2 : k.find('.')])
self.msg('Checking for %s' % i, lib, 'GREEN')
break
else:
self.msg('Checking for %s' % i, False, 'YELLOW')

env.append_unique('LIBPATH_' + uselib, qtlibs)
env.append_unique('INCLUDES_' + uselib, qtincludes)
env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i))
else:
for i in self.qt4_vars_debug + self.qt4_vars:
self.check_cfg(package=i, args='--cflags --libs', mandatory=False)

@conf
def simplify_qt4_libs(self):
# the libpaths make really long command-lines
# remove the qtcore ones from qtgui, etc
env = self.env
def process_lib(vars_, coreval):
for d in vars_:
var = d.upper()
if var == 'QTCORE':
continue

value = env['LIBPATH_'+var]
if value:
core = env[coreval]
accu = []
for lib in value:
if lib in core:
continue
accu.append(lib)
env['LIBPATH_'+var] = accu

process_lib(self.qt4_vars, 'LIBPATH_QTCORE')
process_lib(self.qt4_vars_debug, 'LIBPATH_QTCORE_DEBUG')

@conf
def add_qt4_rpath(self):
# rpath if wanted
env = self.env
if getattr(Options.options, 'want_rpath', False):
def process_rpath(vars_, coreval):
for d in vars_:
var = d.upper()
value = env['LIBPATH_'+var]
if value:
core = env[coreval]
accu = []
for lib in value:
if var != 'QTCORE':
if lib in core:
continue
accu.append('-Wl,--rpath='+lib)
env['RPATH_'+var] = accu
process_rpath(self.qt4_vars, 'LIBPATH_QTCORE')
process_rpath(self.qt4_vars_debug, 'LIBPATH_QTCORE_DEBUG')

@conf
def set_qt4_libs_to_check(self):
if not hasattr(self, 'qt4_vars'):
self.qt4_vars = QT4_LIBS
self.qt4_vars = Utils.to_list(self.qt4_vars)
if not hasattr(self, 'qt4_vars_debug'):
self.qt4_vars_debug = [a + '_debug' for a in self.qt4_vars]
self.qt4_vars_debug = Utils.to_list(self.qt4_vars_debug)

@conf
def set_qt4_defines(self):
if sys.platform != 'win32':
return
for x in self.qt4_vars:
y = x[2:].upper()
self.env.append_unique('DEFINES_%s' % x.upper(), 'QT_%s_LIB' % y)
self.env.append_unique('DEFINES_%s_DEBUG' % x.upper(), 'QT_%s_LIB' % y)

def options(opt):
"""
Command-line options
"""
opt.add_option('--want-rpath', action='store_true', default=False, dest='want_rpath', help='enable the rpath for qt libraries')

opt.add_option('--header-ext',
type='string',
default='',
help='header extension for moc files',
dest='qt_header_ext')

for i in 'qtdir qtbin qtlibs'.split():
opt.add_option('--'+i, type='string', default='', dest=i)

opt.add_option('--translate', action="store_true", help="collect translation strings", dest="trans_qt4", default=False)


+ 0
- 744
waflib/Tools/qt5.py View File

@@ -1,744 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2015 (ita)

"""

Tool Description
================

This tool helps with finding Qt5 tools and libraries,
and also provides syntactic sugar for using Qt5 tools.

The following snippet illustrates the tool usage::

def options(opt):
opt.load('compiler_cxx qt5')

def configure(conf):
conf.load('compiler_cxx qt5')

def build(bld):
bld(
features = 'qt5 cxx cxxprogram',
uselib = 'QTCORE QTGUI QTOPENGL QTSVG',
source = 'main.cpp textures.qrc aboutDialog.ui',
target = 'window',
)

Here, the UI description and resource files will be processed
to generate code.

Usage
=====

Load the "qt5" tool.

You also need to edit your sources accordingly:

- the normal way of doing things is to have your C++ files
include the .moc file.
This is regarded as the best practice (and provides much faster
compilations).
It also implies that the include paths have beenset properly.

- to have the include paths added automatically, use the following::

from waflib.TaskGen import feature, before_method, after_method
@feature('cxx')
@after_method('process_source')
@before_method('apply_incpaths')
def add_includes_paths(self):
incs = set(self.to_list(getattr(self, 'includes', '')))
for x in self.compiled_tasks:
incs.add(x.inputs[0].parent.path_from(self.path))
self.includes = list(incs)

Note: another tool provides Qt processing that does not require
.moc includes, see 'playground/slow_qt/'.

A few options (--qt{dir,bin,...}) and environment variables
(QT5_{ROOT,DIR,MOC,UIC,XCOMPILE}) allow finer tuning of the tool,
tool path selection, etc; please read the source for more info.

"""

try:
from xml.sax import make_parser
from xml.sax.handler import ContentHandler
except ImportError:
has_xml = False
ContentHandler = object
else:
has_xml = True

import os, sys
from waflib.Tools import cxx
from waflib import Task, Utils, Options, Errors, Context
from waflib.TaskGen import feature, after_method, extension
from waflib.Configure import conf
from waflib import Logs

MOC_H = ['.h', '.hpp', '.hxx', '.hh']
"""
File extensions associated to the .moc files
"""

EXT_RCC = ['.qrc']
"""
File extension for the resource (.qrc) files
"""

EXT_UI = ['.ui']
"""
File extension for the user interface (.ui) files
"""

EXT_QT5 = ['.cpp', '.cc', '.cxx', '.C']
"""
File extensions of C++ files that may require a .moc processing
"""

QT5_LIBS = '''
qtmain
Qt5Bluetooth
Qt5CLucene
Qt5Concurrent
Qt5Core
Qt5DBus
Qt5Declarative
Qt5DesignerComponents
Qt5Designer
Qt5Gui
Qt5Help
Qt5MultimediaQuick_p
Qt5Multimedia
Qt5MultimediaWidgets
Qt5Network
Qt5Nfc
Qt5OpenGL
Qt5Positioning
Qt5PrintSupport
Qt5Qml
Qt5QuickParticles
Qt5Quick
Qt5QuickTest
Qt5Script
Qt5ScriptTools
Qt5Sensors
Qt5SerialPort
Qt5Sql
Qt5Svg
Qt5Test
Qt5WebKit
Qt5WebKitWidgets
Qt5Widgets
Qt5WinExtras
Qt5X11Extras
Qt5XmlPatterns
Qt5Xml'''

class qxx(Task.classes['cxx']):
"""
Each C++ file can have zero or several .moc files to create.
They are known only when the files are scanned (preprocessor)
To avoid scanning the c++ files each time (parsing C/C++), the results
are retrieved from the task cache (bld.node_deps/bld.raw_deps).
The moc tasks are also created *dynamically* during the build.
"""

def __init__(self, *k, **kw):
Task.Task.__init__(self, *k, **kw)
self.moc_done = 0

def runnable_status(self):
"""
Compute the task signature to make sure the scanner was executed. Create the
moc tasks by using :py:meth:`waflib.Tools.qt5.qxx.add_moc_tasks` (if necessary),
then postpone the task execution (there is no need to recompute the task signature).
"""
if self.moc_done:
return Task.Task.runnable_status(self)
else:
for t in self.run_after:
if not t.hasrun:
return Task.ASK_LATER
self.add_moc_tasks()
return Task.Task.runnable_status(self)

def create_moc_task(self, h_node, m_node):
"""
If several libraries use the same classes, it is possible that moc will run several times (Issue 1318)
It is not possible to change the file names, but we can assume that the moc transformation will be identical,
and the moc tasks can be shared in a global cache.

The defines passed to moc will then depend on task generator order. If this is not acceptable, then
use the tool slow_qt5 instead (and enjoy the slow builds... :-( )
"""
try:
moc_cache = self.generator.bld.moc_cache
except AttributeError:
moc_cache = self.generator.bld.moc_cache = {}

try:
return moc_cache[h_node]
except KeyError:
tsk = moc_cache[h_node] = Task.classes['moc'](env=self.env, generator=self.generator)
tsk.set_inputs(h_node)
tsk.set_outputs(m_node)

if self.generator:
self.generator.tasks.append(tsk)

# direct injection in the build phase (safe because called from the main thread)
gen = self.generator.bld.producer
gen.outstanding.insert(0, tsk)
gen.total += 1

return tsk

else:
# remove the signature, it must be recomputed with the moc task
delattr(self, 'cache_sig')

def moc_h_ext(self):
try:
ext = Options.options.qt_header_ext.split()
except AttributeError:
pass
if not ext:
ext = MOC_H
return ext

def add_moc_tasks(self):
"""
Create the moc tasks by looking in ``bld.raw_deps[self.uid()]``
"""
node = self.inputs[0]
bld = self.generator.bld

try:
# compute the signature once to know if there is a moc file to create
self.signature()
except KeyError:
# the moc file may be referenced somewhere else
pass
else:
# remove the signature, it must be recomputed with the moc task
delattr(self, 'cache_sig')

include_nodes = [node.parent] + self.generator.includes_nodes

moctasks = []
mocfiles = set([])
for d in bld.raw_deps.get(self.uid(), []):
if not d.endswith('.moc'):
continue

# process that base.moc only once
if d in mocfiles:
continue
mocfiles.add(d)

# find the source associated with the moc file
h_node = None

base2 = d[:-4]
for x in include_nodes:
for e in self.moc_h_ext():
h_node = x.find_node(base2 + e)
if h_node:
break
if h_node:
m_node = h_node.change_ext('.moc')
break
else:
# foo.cpp -> foo.cpp.moc
for k in EXT_QT5:
if base2.endswith(k):
for x in include_nodes:
h_node = x.find_node(base2)
if h_node:
break
if h_node:
m_node = h_node.change_ext(k + '.moc')
break

if not h_node:
raise Errors.WafError('No source found for %r which is a moc file' % d)

# create the moc task
task = self.create_moc_task(h_node, m_node)
moctasks.append(task)

# simple scheduler dependency: run the moc task before others
self.run_after.update(set(moctasks))
self.moc_done = 1

class trans_update(Task.Task):
"""Update a .ts files from a list of C++ files"""
run_str = '${QT_LUPDATE} ${SRC} -ts ${TGT}'
color = 'BLUE'
Task.update_outputs(trans_update)

class XMLHandler(ContentHandler):
"""
Parser for *.qrc* files
"""
def __init__(self):
self.buf = []
self.files = []
def startElement(self, name, attrs):
if name == 'file':
self.buf = []
def endElement(self, name):
if name == 'file':
self.files.append(str(''.join(self.buf)))
def characters(self, cars):
self.buf.append(cars)

@extension(*EXT_RCC)
def create_rcc_task(self, node):
"Create rcc and cxx tasks for *.qrc* files"
rcnode = node.change_ext('_rc.cpp')
self.create_task('rcc', node, rcnode)
cpptask = self.create_task('cxx', rcnode, rcnode.change_ext('.o'))
try:
self.compiled_tasks.append(cpptask)
except AttributeError:
self.compiled_tasks = [cpptask]
return cpptask

@extension(*EXT_UI)
def create_uic_task(self, node):
"hook for uic tasks"
uictask = self.create_task('ui5', node)
uictask.outputs = [self.path.find_or_declare(self.env['ui_PATTERN'] % node.name[:-3])]

@extension('.ts')
def add_lang(self, node):
"""add all the .ts file into self.lang"""
self.lang = self.to_list(getattr(self, 'lang', [])) + [node]

@feature('qt5')
@after_method('apply_link')
def apply_qt5(self):
"""
Add MOC_FLAGS which may be necessary for moc::

def build(bld):
bld.program(features='qt5', source='main.cpp', target='app', use='QTCORE')

The additional parameters are:

:param lang: list of translation files (\*.ts) to process
:type lang: list of :py:class:`waflib.Node.Node` or string without the .ts extension
:param update: whether to process the C++ files to update the \*.ts files (use **waf --translate**)
:type update: bool
:param langname: if given, transform the \*.ts files into a .qrc files to include in the binary file
:type langname: :py:class:`waflib.Node.Node` or string without the .qrc extension
"""
if getattr(self, 'lang', None):
qmtasks = []
for x in self.to_list(self.lang):
if isinstance(x, str):
x = self.path.find_resource(x + '.ts')
qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.qm')))

if getattr(self, 'update', None) and Options.options.trans_qt5:
cxxnodes = [a.inputs[0] for a in self.compiled_tasks] + [
a.inputs[0] for a in self.tasks if getattr(a, 'inputs', None) and a.inputs[0].name.endswith('.ui')]
for x in qmtasks:
self.create_task('trans_update', cxxnodes, x.inputs)

if getattr(self, 'langname', None):
qmnodes = [x.outputs[0] for x in qmtasks]
rcnode = self.langname
if isinstance(rcnode, str):
rcnode = self.path.find_or_declare(rcnode + '.qrc')
t = self.create_task('qm2rcc', qmnodes, rcnode)
k = create_rcc_task(self, t.outputs[0])
self.link_task.inputs.append(k.outputs[0])

lst = []
for flag in self.to_list(self.env['CXXFLAGS']):
if len(flag) < 2: continue
f = flag[0:2]
if f in ('-D', '-I', '/D', '/I'):
if (f[0] == '/'):
lst.append('-' + flag[1:])
else:
lst.append(flag)
self.env.append_value('MOC_FLAGS', lst)

@extension(*EXT_QT5)
def cxx_hook(self, node):
"""
Re-map C++ file extensions to the :py:class:`waflib.Tools.qt5.qxx` task.
"""
return self.create_compiled_task('qxx', node)

class rcc(Task.Task):
"""
Process *.qrc* files
"""
color = 'BLUE'
run_str = '${QT_RCC} -name ${tsk.rcname()} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}'
ext_out = ['.h']

def rcname(self):
return os.path.splitext(self.inputs[0].name)[0]

def scan(self):
"""Parse the *.qrc* files"""
if not has_xml:
Logs.error('no xml support was found, the rcc dependencies will be incomplete!')
return ([], [])

parser = make_parser()
curHandler = XMLHandler()
parser.setContentHandler(curHandler)
fi = open(self.inputs[0].abspath(), 'r')
try:
parser.parse(fi)
finally:
fi.close()

nodes = []
names = []
root = self.inputs[0].parent
for x in curHandler.files:
nd = root.find_resource(x)
if nd: nodes.append(nd)
else: names.append(x)
return (nodes, names)

class moc(Task.Task):
"""
Create *.moc* files
"""
color = 'BLUE'
run_str = '${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}'

class ui5(Task.Task):
"""
Process *.ui* files
"""
color = 'BLUE'
run_str = '${QT_UIC} ${SRC} -o ${TGT}'
ext_out = ['.h']

class ts2qm(Task.Task):
"""
Create *.qm* files from *.ts* files
"""
color = 'BLUE'
run_str = '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}'

class qm2rcc(Task.Task):
"""
Transform *.qm* files into *.rc* files
"""
color = 'BLUE'
after = 'ts2qm'

def run(self):
"""Create a qrc file including the inputs"""
txt = '\n'.join(['<file>%s</file>' % k.path_from(self.outputs[0].parent) for k in self.inputs])
code = '<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n%s\n</qresource>\n</RCC>' % txt
self.outputs[0].write(code)

def configure(self):
"""
Besides the configuration options, the environment variable QT5_ROOT may be used
to give the location of the qt5 libraries (absolute path).

The detection will use the program *pkg-config* through :py:func:`waflib.Tools.config_c.check_cfg`
"""
self.find_qt5_binaries()
self.set_qt5_libs_to_check()
self.set_qt5_defines()
self.find_qt5_libraries()
self.add_qt5_rpath()
self.simplify_qt5_libs()

@conf
def find_qt5_binaries(self):
env = self.env
opt = Options.options

qtdir = getattr(opt, 'qtdir', '')
qtbin = getattr(opt, 'qtbin', '')

paths = []

if qtdir:
qtbin = os.path.join(qtdir, 'bin')

# the qt directory has been given from QT5_ROOT - deduce the qt binary path
if not qtdir:
qtdir = os.environ.get('QT5_ROOT', '')
qtbin = os.environ.get('QT5_BIN', None) or os.path.join(qtdir, 'bin')

if qtbin:
paths = [qtbin]

# no qtdir, look in the path and in /usr/local/Trolltech
if not qtdir:
paths = os.environ.get('PATH', '').split(os.pathsep)
paths.append('/usr/share/qt5/bin/')
try:
lst = Utils.listdir('/usr/local/Trolltech/')
except OSError:
pass
else:
if lst:
lst.sort()
lst.reverse()

# keep the highest version
qtdir = '/usr/local/Trolltech/%s/' % lst[0]
qtbin = os.path.join(qtdir, 'bin')
paths.append(qtbin)

# at the end, try to find qmake in the paths given
# keep the one with the highest version
cand = None
prev_ver = ['5', '0', '0']
for qmk in ('qmake-qt5', 'qmake5', 'qmake'):
try:
qmake = self.find_program(qmk, path_list=paths)
except self.errors.ConfigurationError:
pass
else:
try:
version = self.cmd_and_log(qmake + ['-query', 'QT_VERSION']).strip()
except self.errors.WafError:
pass
else:
if version:
new_ver = version.split('.')
if new_ver > prev_ver:
cand = qmake
prev_ver = new_ver

# qmake could not be found easily, rely on qtchooser
if not cand:
try:
self.find_program('qtchooser')
except self.errors.ConfigurationError:
pass
else:
cmd = self.env.QTCHOOSER + ['-qt=5', '-run-tool=qmake']
try:
version = self.cmd_and_log(cmd + ['-query', 'QT_VERSION'])
except self.errors.WafError:
pass
else:
cand = cmd

if cand:
self.env.QMAKE = cand
else:
self.fatal('Could not find qmake for qt5')

self.env.QT_INSTALL_BINS = qtbin = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_BINS']).strip() + os.sep
paths.insert(0, qtbin)

def find_bin(lst, var):
if var in env:
return
for f in lst:
try:
ret = self.find_program(f, path_list=paths)
except self.errors.ConfigurationError:
pass
else:
env[var]=ret
break

find_bin(['uic-qt5', 'uic'], 'QT_UIC')
if not env.QT_UIC:
self.fatal('cannot find the uic compiler for qt5')

self.start_msg('Checking for uic version')
uicver = self.cmd_and_log(env.QT_UIC + ['-version'], output=Context.BOTH)
uicver = ''.join(uicver).strip()
uicver = uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt', '')
self.end_msg(uicver)
if uicver.find(' 3.') != -1 or uicver.find(' 4.') != -1:
self.fatal('this uic compiler is for qt3 or qt5, add uic for qt5 to your path')

find_bin(['moc-qt5', 'moc'], 'QT_MOC')
find_bin(['rcc-qt5', 'rcc'], 'QT_RCC')
find_bin(['lrelease-qt5', 'lrelease'], 'QT_LRELEASE')
find_bin(['lupdate-qt5', 'lupdate'], 'QT_LUPDATE')

env['UIC_ST'] = '%s -o %s'
env['MOC_ST'] = '-o'
env['ui_PATTERN'] = 'ui_%s.h'
env['QT_LRELEASE_FLAGS'] = ['-silent']
env.MOCCPPPATH_ST = '-I%s'
env.MOCDEFINES_ST = '-D%s'

@conf
def find_qt5_libraries(self):
qtlibs = getattr(Options.options, 'qtlibs', None) or os.environ.get("QT5_LIBDIR", None)
if not qtlibs:
try:
qtlibs = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_LIBS']).strip()
except Errors.WafError:
qtdir = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_PREFIX']).strip() + os.sep
qtlibs = os.path.join(qtdir, 'lib')
self.msg('Found the Qt5 libraries in', qtlibs)

qtincludes = os.environ.get("QT5_INCLUDES", None) or self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_HEADERS']).strip()
env = self.env
if not 'PKG_CONFIG_PATH' in os.environ:
os.environ['PKG_CONFIG_PATH'] = '%s:%s/pkgconfig:/usr/lib/qt5/lib/pkgconfig:/opt/qt5/lib/pkgconfig:/usr/lib/qt5/lib:/opt/qt5/lib' % (qtlibs, qtlibs)

try:
if os.environ.get("QT5_XCOMPILE", None):
raise self.errors.ConfigurationError()
self.check_cfg(atleast_pkgconfig_version='0.1')
except self.errors.ConfigurationError:
for i in self.qt5_vars:
uselib = i.upper()
if Utils.unversioned_sys_platform() == "darwin":
# Since at least qt 4.7.3 each library locates in separate directory
frameworkName = i + ".framework"
qtDynamicLib = os.path.join(qtlibs, frameworkName, i)
if os.path.exists(qtDynamicLib):
env.append_unique('FRAMEWORK_' + uselib, i)
self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN')
else:
self.msg('Checking for %s' % i, False, 'YELLOW')
env.append_unique('INCLUDES_' + uselib, os.path.join(qtlibs, frameworkName, 'Headers'))
elif env.DEST_OS != "win32":
qtDynamicLib = os.path.join(qtlibs, "lib" + i + ".so")
qtStaticLib = os.path.join(qtlibs, "lib" + i + ".a")
if os.path.exists(qtDynamicLib):
env.append_unique('LIB_' + uselib, i)
self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN')
elif os.path.exists(qtStaticLib):
env.append_unique('LIB_' + uselib, i)
self.msg('Checking for %s' % i, qtStaticLib, 'GREEN')
else:
self.msg('Checking for %s' % i, False, 'YELLOW')

env.append_unique('LIBPATH_' + uselib, qtlibs)
env.append_unique('INCLUDES_' + uselib, qtincludes)
env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i))
else:
# Release library names are like QtCore5
for k in ("lib%s.a", "lib%s5.a", "%s.lib", "%s5.lib"):
lib = os.path.join(qtlibs, k % i)
if os.path.exists(lib):
env.append_unique('LIB_' + uselib, i + k[k.find("%s") + 2 : k.find('.')])
self.msg('Checking for %s' % i, lib, 'GREEN')
break
else:
self.msg('Checking for %s' % i, False, 'YELLOW')

env.append_unique('LIBPATH_' + uselib, qtlibs)
env.append_unique('INCLUDES_' + uselib, qtincludes)
env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i))

# Debug library names are like QtCore5d
uselib = i.upper() + "_debug"
for k in ("lib%sd.a", "lib%sd5.a", "%sd.lib", "%sd5.lib"):
lib = os.path.join(qtlibs, k % i)
if os.path.exists(lib):
env.append_unique('LIB_' + uselib, i + k[k.find("%s") + 2 : k.find('.')])
self.msg('Checking for %s' % i, lib, 'GREEN')
break
else:
self.msg('Checking for %s' % i, False, 'YELLOW')

env.append_unique('LIBPATH_' + uselib, qtlibs)
env.append_unique('INCLUDES_' + uselib, qtincludes)
env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i))
else:
for i in self.qt5_vars_debug + self.qt5_vars:
self.check_cfg(package=i, args='--cflags --libs', mandatory=False)

@conf
def simplify_qt5_libs(self):
# the libpaths make really long command-lines
# remove the qtcore ones from qtgui, etc
env = self.env
def process_lib(vars_, coreval):
for d in vars_:
var = d.upper()
if var == 'QTCORE':
continue

value = env['LIBPATH_'+var]
if value:
core = env[coreval]
accu = []
for lib in value:
if lib in core:
continue
accu.append(lib)
env['LIBPATH_'+var] = accu

process_lib(self.qt5_vars, 'LIBPATH_QTCORE')
process_lib(self.qt5_vars_debug, 'LIBPATH_QTCORE_DEBUG')

@conf
def add_qt5_rpath(self):
# rpath if wanted
env = self.env
if getattr(Options.options, 'want_rpath', False):
def process_rpath(vars_, coreval):
for d in vars_:
var = d.upper()
value = env['LIBPATH_'+var]
if value:
core = env[coreval]
accu = []
for lib in value:
if var != 'QTCORE':
if lib in core:
continue
accu.append('-Wl,--rpath='+lib)
env['RPATH_'+var] = accu
process_rpath(self.qt5_vars, 'LIBPATH_QTCORE')
process_rpath(self.qt5_vars_debug, 'LIBPATH_QTCORE_DEBUG')

@conf
def set_qt5_libs_to_check(self):
if not hasattr(self, 'qt5_vars'):
self.qt5_vars = QT5_LIBS
self.qt5_vars = Utils.to_list(self.qt5_vars)
if not hasattr(self, 'qt5_vars_debug'):
self.qt5_vars_debug = [a + '_debug' for a in self.qt5_vars]
self.qt5_vars_debug = Utils.to_list(self.qt5_vars_debug)

@conf
def set_qt5_defines(self):
if sys.platform != 'win32':
return
for x in self.qt5_vars:
y = x[2:].upper()
self.env.append_unique('DEFINES_%s' % x.upper(), 'QT_%s_LIB' % y)
self.env.append_unique('DEFINES_%s_DEBUG' % x.upper(), 'QT_%s_LIB' % y)

def options(opt):
"""
Command-line options
"""
opt.add_option('--want-rpath', action='store_true', default=False, dest='want_rpath', help='enable the rpath for qt libraries')

opt.add_option('--header-ext',
type='string',
default='',
help='header extension for moc files',
dest='qt_header_ext')

for i in 'qtdir qtbin qtlibs'.split():
opt.add_option('--'+i, type='string', default='', dest=i)

opt.add_option('--translate', action="store_true", help="collect translation strings", dest="trans_qt5", default=False)


+ 0
- 193
waflib/Tools/ruby.py View File

@@ -1,193 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# daniel.svensson at purplescout.se 2008
# Thomas Nagy 2010 (ita)

"""
Support for Ruby extensions. A C/C++ compiler is required::

def options(opt):
opt.load('compiler_c ruby')
def configure(conf):
conf.load('compiler_c ruby')
conf.check_ruby_version((1,8,0))
conf.check_ruby_ext_devel()
conf.check_ruby_module('libxml')
def build(bld):
bld(
features = 'c cshlib rubyext',
source = 'rb_mytest.c',
target = 'mytest_ext',
install_path = '${ARCHDIR_RUBY}')
bld.install_files('${LIBDIR_RUBY}', 'Mytest.rb')
"""

import os
from waflib import Task, Options, Utils
from waflib.TaskGen import before_method, feature, after_method, Task, extension
from waflib.Configure import conf

@feature('rubyext')
@before_method('apply_incpaths', 'apply_lib_vars', 'apply_bundle', 'apply_link')
def init_rubyext(self):
"""
Add required variables for ruby extensions
"""
self.install_path = '${ARCHDIR_RUBY}'
self.uselib = self.to_list(getattr(self, 'uselib', ''))
if not 'RUBY' in self.uselib:
self.uselib.append('RUBY')
if not 'RUBYEXT' in self.uselib:
self.uselib.append('RUBYEXT')

@feature('rubyext')
@before_method('apply_link', 'propagate_uselib')
def apply_ruby_so_name(self):
"""
Strip the *lib* prefix from ruby extensions
"""
self.env['cshlib_PATTERN'] = self.env['cxxshlib_PATTERN'] = self.env['rubyext_PATTERN']

@conf
def check_ruby_version(self, minver=()):
"""
Checks if ruby is installed.
If installed the variable RUBY will be set in environment.
The ruby binary can be overridden by ``--with-ruby-binary`` command-line option.
"""

if Options.options.rubybinary:
self.env.RUBY = Options.options.rubybinary
else:
self.find_program('ruby', var='RUBY')

ruby = self.env.RUBY

try:
version = self.cmd_and_log(ruby + ['-e', 'puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip()
except Exception:
self.fatal('could not determine ruby version')
self.env.RUBY_VERSION = version

try:
ver = tuple(map(int, version.split(".")))
except Exception:
self.fatal('unsupported ruby version %r' % version)

cver = ''
if minver:
if ver < minver:
self.fatal('ruby is too old %r' % ver)
cver = '.'.join([str(x) for x in minver])
else:
cver = ver

self.msg('Checking for ruby version %s' % str(minver or ''), cver)

@conf
def check_ruby_ext_devel(self):
"""
Check if a ruby extension can be created
"""
if not self.env.RUBY:
self.fatal('ruby detection is required first')

if not self.env.CC_NAME and not self.env.CXX_NAME:
self.fatal('load a c/c++ compiler first')

version = tuple(map(int, self.env.RUBY_VERSION.split(".")))

def read_out(cmd):
return Utils.to_list(self.cmd_and_log(self.env.RUBY + ['-rrbconfig', '-e', cmd]))

def read_config(key):
return read_out('puts RbConfig::CONFIG[%r]' % key)

ruby = self.env['RUBY']
archdir = read_config('archdir')
cpppath = archdir

if version >= (1, 9, 0):
ruby_hdrdir = read_config('rubyhdrdir')
cpppath += ruby_hdrdir
cpppath += [os.path.join(ruby_hdrdir[0], read_config('arch')[0])]

self.check(header_name='ruby.h', includes=cpppath, errmsg='could not find ruby header file')

self.env.LIBPATH_RUBYEXT = read_config('libdir')
self.env.LIBPATH_RUBYEXT += archdir
self.env.INCLUDES_RUBYEXT = cpppath
self.env.CFLAGS_RUBYEXT = read_config('CCDLFLAGS')
self.env.rubyext_PATTERN = '%s.' + read_config('DLEXT')[0]

# ok this is really stupid, but the command and flags are combined.
# so we try to find the first argument...
flags = read_config('LDSHARED')
while flags and flags[0][0] != '-':
flags = flags[1:]

# we also want to strip out the deprecated ppc flags
if len(flags) > 1 and flags[1] == "ppc":
flags = flags[2:]

self.env.LINKFLAGS_RUBYEXT = flags
self.env.LINKFLAGS_RUBYEXT += read_config('LIBS')
self.env.LINKFLAGS_RUBYEXT += read_config('LIBRUBYARG_SHARED')

if Options.options.rubyarchdir:
self.env.ARCHDIR_RUBY = Options.options.rubyarchdir
else:
self.env.ARCHDIR_RUBY = read_config('sitearchdir')[0]

if Options.options.rubylibdir:
self.env.LIBDIR_RUBY = Options.options.rubylibdir
else:
self.env.LIBDIR_RUBY = read_config('sitelibdir')[0]

@conf
def check_ruby_module(self, module_name):
"""
Check if the selected ruby interpreter can require the given ruby module::

def configure(conf):
conf.check_ruby_module('libxml')

:param module_name: module
:type module_name: string
"""
self.start_msg('Ruby module %s' % module_name)
try:
self.cmd_and_log(self.env.RUBY + ['-e', 'require \'%s\';puts 1' % module_name])
except Exception:
self.end_msg(False)
self.fatal('Could not find the ruby module %r' % module_name)
self.end_msg(True)

@extension('.rb')
def process(self, node):
tsk = self.create_task('run_ruby', node)

class run_ruby(Task.Task):
"""
Task to run ruby files detected by file extension .rb::
def options(opt):
opt.load('ruby')
def configure(ctx):
ctx.check_ruby_version()
def build(bld):
bld.env['RBFLAGS'] = '-e puts "hello world"'
bld(source='a_ruby_file.rb')
"""
run_str = '${RUBY} ${RBFLAGS} -I ${SRC[0].parent.abspath()} ${SRC}'

def options(opt):
"""
Add the ``--with-ruby-archdir``, ``--with-ruby-libdir`` and ``--with-ruby-binary`` options
"""
opt.add_option('--with-ruby-archdir', type='string', dest='rubyarchdir', help='Specify directory where to install arch specific files')
opt.add_option('--with-ruby-libdir', type='string', dest='rubylibdir', help='Specify alternate ruby library path')
opt.add_option('--with-ruby-binary', type='string', dest='rubybinary', help='Specify alternate ruby binary')


+ 0
- 515
waflib/Tools/tex.py View File

@@ -1,515 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)

"""
TeX/LaTeX/PDFLaTeX/XeLaTeX support

Example::

def configure(conf):
conf.load('tex')
if not conf.env.LATEX:
conf.fatal('The program LaTex is required')

def build(bld):
bld(
features = 'tex',
type = 'latex', # pdflatex or xelatex
source = 'document.ltx', # mandatory, the source
outs = 'ps', # 'pdf' or 'ps pdf'
deps = 'crossreferencing.lst', # to give dependencies directly
prompt = 1, # 0 for the batch mode
)

Notes:

- To configure with a special program, use::

$ PDFLATEX=luatex waf configure

- This tool doesn't use the target attribute of the task generator
(``bld(target=...)``); the target file name is built from the source
base name and the out type(s)

"""

import os, re
from waflib import Utils, Task, Errors, Logs, Node
from waflib.TaskGen import feature, before_method

re_bibunit = re.compile(r'\\(?P<type>putbib)\[(?P<file>[^\[\]]*)\]',re.M)
def bibunitscan(self):
"""
Parse the inputs and try to find the *bibunit* dependencies

:return: list of bibunit files
:rtype: list of :py:class:`waflib.Node.Node`
"""
node = self.inputs[0]

nodes = []
if not node: return nodes

code = node.read()

for match in re_bibunit.finditer(code):
path = match.group('file')
if path:
for k in ('', '.bib'):
# add another loop for the tex include paths?
Logs.debug('tex: trying %s%s' % (path, k))
fi = node.parent.find_resource(path + k)
if fi:
nodes.append(fi)
# no break, people are crazy
else:
Logs.debug('tex: could not find %s' % path)

Logs.debug("tex: found the following bibunit files: %s" % nodes)
return nodes

exts_deps_tex = ['', '.ltx', '.tex', '.bib', '.pdf', '.png', '.eps', '.ps', '.sty']
"""List of typical file extensions included in latex files"""

exts_tex = ['.ltx', '.tex']
"""List of typical file extensions that contain latex"""

re_tex = re.compile(r'\\(?P<type>usepackage|RequirePackage|include|bibliography([^\[\]{}]*)|putbib|includegraphics|input|import|bringin|lstinputlisting)(\[[^\[\]]*\])?{(?P<file>[^{}]*)}',re.M)
"""Regexp for expressions that may include latex files"""

g_bibtex_re = re.compile('bibdata', re.M)
"""Regexp for bibtex files"""

g_glossaries_re = re.compile('\\@newglossary', re.M)
"""Regexp for expressions that create glossaries"""

class tex(Task.Task):
"""
Compile a tex/latex file.

.. inheritance-diagram:: waflib.Tools.tex.latex waflib.Tools.tex.xelatex waflib.Tools.tex.pdflatex
"""

bibtex_fun, _ = Task.compile_fun('${BIBTEX} ${BIBTEXFLAGS} ${SRCFILE}', shell=False)
bibtex_fun.__doc__ = """
Execute the program **bibtex**
"""

makeindex_fun, _ = Task.compile_fun('${MAKEINDEX} ${MAKEINDEXFLAGS} ${SRCFILE}', shell=False)
makeindex_fun.__doc__ = """
Execute the program **makeindex**
"""

makeglossaries_fun, _ = Task.compile_fun('${MAKEGLOSSARIES} ${SRCFILE}', shell=False)
makeglossaries_fun.__doc__ = """
Execute the program **makeglossaries**
"""

def exec_command(self, cmd, **kw):
"""
Override :py:meth:`waflib.Task.Task.exec_command` to execute the command without buffering (latex may prompt for inputs)

:return: the return code
:rtype: int
"""
bld = self.generator.bld
Logs.info('runner: %r' % cmd)
try:
if not kw.get('cwd', None):
kw['cwd'] = bld.cwd
except AttributeError:
bld.cwd = kw['cwd'] = bld.variant_dir
return Utils.subprocess.Popen(cmd, **kw).wait()

def scan_aux(self, node):
"""
A recursive regex-based scanner that finds included auxiliary files.
"""
nodes = [node]
re_aux = re.compile(r'\\@input{(?P<file>[^{}]*)}', re.M)

def parse_node(node):
code = node.read()
for match in re_aux.finditer(code):
path = match.group('file')
found = node.parent.find_or_declare(path)
if found and found not in nodes:
Logs.debug('tex: found aux node ' + found.abspath())
nodes.append(found)
parse_node(found)

parse_node(node)
return nodes

def scan(self):
"""
A recursive regex-based scanner that finds latex dependencies. It uses :py:attr:`waflib.Tools.tex.re_tex`

Depending on your needs you might want:

* to change re_tex::

from waflib.Tools import tex
tex.re_tex = myregex

* or to change the method scan from the latex tasks::

from waflib.Task import classes
classes['latex'].scan = myscanfunction
"""
node = self.inputs[0]

nodes = []
names = []
seen = []
if not node: return (nodes, names)

def parse_node(node):
if node in seen:
return
seen.append(node)
code = node.read()
global re_tex
for match in re_tex.finditer(code):

multibib = match.group('type')
if multibib and multibib.startswith('bibliography'):
multibib = multibib[len('bibliography'):]
if multibib.startswith('style'):
continue
else:
multibib = None

for path in match.group('file').split(','):
if path:
add_name = True
found = None
for k in exts_deps_tex:

# issue 1067, scan in all texinputs folders
for up in self.texinputs_nodes:
Logs.debug('tex: trying %s%s' % (path, k))
found = up.find_resource(path + k)
if found:
break


for tsk in self.generator.tasks:
if not found or found in tsk.outputs:
break
else:
nodes.append(found)
add_name = False
for ext in exts_tex:
if found.name.endswith(ext):
parse_node(found)
break

# multibib stuff
if found and multibib and found.name.endswith('.bib'):
try:
self.multibibs.append(found)
except AttributeError:
self.multibibs = [found]

# no break, people are crazy
if add_name:
names.append(path)
parse_node(node)

for x in nodes:
x.parent.get_bld().mkdir()

Logs.debug("tex: found the following : %s and names %s" % (nodes, names))
return (nodes, names)

def check_status(self, msg, retcode):
"""
Check an exit status and raise an error with a particular message

:param msg: message to display if the code is non-zero
:type msg: string
:param retcode: condition
:type retcode: boolean
"""
if retcode != 0:
raise Errors.WafError("%r command exit status %r" % (msg, retcode))

def bibfile(self):
"""
Parse the *.aux* files to find bibfiles to process.
If yes, execute :py:meth:`waflib.Tools.tex.tex.bibtex_fun`
"""
for aux_node in self.aux_nodes:
try:
ct = aux_node.read()
except EnvironmentError:
Logs.error('Error reading %s: %r' % aux_node.abspath())
continue

if g_bibtex_re.findall(ct):
Logs.info('calling bibtex')

self.env.env = {}
self.env.env.update(os.environ)
self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()})
self.env.SRCFILE = aux_node.name[:-4]
self.check_status('error when calling bibtex', self.bibtex_fun())

for node in getattr(self, 'multibibs', []):
self.env.env = {}
self.env.env.update(os.environ)
self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()})
self.env.SRCFILE = node.name[:-4]
self.check_status('error when calling bibtex', self.bibtex_fun())

def bibunits(self):
"""
Parse the *.aux* file to find bibunit files. If there are bibunit files,
execute :py:meth:`waflib.Tools.tex.tex.bibtex_fun`.
"""
try:
bibunits = bibunitscan(self)
except OSError:
Logs.error('error bibunitscan')
else:
if bibunits:
fn = ['bu' + str(i) for i in range(1, len(bibunits) + 1)]
if fn:
Logs.info('calling bibtex on bibunits')

for f in fn:
self.env.env = {'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()}
self.env.SRCFILE = f
self.check_status('error when calling bibtex', self.bibtex_fun())

def makeindex(self):
"""
Look on the filesystem if there is a *.idx* file to process. If yes, execute
:py:meth:`waflib.Tools.tex.tex.makeindex_fun`
"""
self.idx_node = self.inputs[0].change_ext('.idx')
try:
idx_path = self.idx_node.abspath()
os.stat(idx_path)
except OSError:
Logs.info('index file %s absent, not calling makeindex' % idx_path)
else:
Logs.info('calling makeindex')

self.env.SRCFILE = self.idx_node.name
self.env.env = {}
self.check_status('error when calling makeindex %s' % idx_path, self.makeindex_fun())

def bibtopic(self):
"""
Additional .aux files from the bibtopic package
"""
p = self.inputs[0].parent.get_bld()
if os.path.exists(os.path.join(p.abspath(), 'btaux.aux')):
self.aux_nodes += p.ant_glob('*[0-9].aux')

def makeglossaries(self):
src_file = self.inputs[0].abspath()
base_file = os.path.basename(src_file)
base, _ = os.path.splitext(base_file)
for aux_node in self.aux_nodes:
try:
ct = aux_node.read()
except EnvironmentError:
Logs.error('Error reading %s: %r' % aux_node.abspath())
continue

if g_glossaries_re.findall(ct):
if not self.env.MAKEGLOSSARIES:
raise Errors.WafError("The program 'makeglossaries' is missing!")
Logs.warn('calling makeglossaries')
self.env.SRCFILE = base
self.check_status('error when calling makeglossaries %s' % base, self.makeglossaries_fun())
return

def texinputs(self):
return os.pathsep.join([k.abspath() for k in self.texinputs_nodes]) + os.pathsep

def run(self):
"""
Runs the TeX build process.

It may require multiple passes, depending on the usage of cross-references,
bibliographies, content susceptible of needing such passes.
The appropriate TeX compiler is called until the *.aux* files stop changing.

Makeindex and bibtex are called if necessary.
"""
env = self.env

if not env['PROMPT_LATEX']:
env.append_value('LATEXFLAGS', '-interaction=batchmode')
env.append_value('PDFLATEXFLAGS', '-interaction=batchmode')
env.append_value('XELATEXFLAGS', '-interaction=batchmode')

# important, set the cwd for everybody
self.cwd = self.inputs[0].parent.get_bld().abspath()

Logs.info('first pass on %s' % self.__class__.__name__)

# Hash .aux files before even calling the LaTeX compiler
cur_hash = self.hash_aux_nodes()

self.call_latex()

# Find the .aux files again since bibtex processing can require it
self.hash_aux_nodes()

self.bibtopic()
self.bibfile()
self.bibunits()
self.makeindex()
self.makeglossaries()

for i in range(10):
# There is no need to call latex again if the .aux hash value has not changed
prev_hash = cur_hash
cur_hash = self.hash_aux_nodes()
if not cur_hash:
Logs.error('No aux.h to process')
if cur_hash and cur_hash == prev_hash:
break

# run the command
Logs.info('calling %s' % self.__class__.__name__)
self.call_latex()

def hash_aux_nodes(self):
try:
nodes = self.aux_nodes
except AttributeError:
try:
self.aux_nodes = self.scan_aux(self.inputs[0].change_ext('.aux'))
except IOError:
return None
return Utils.h_list([Utils.h_file(x.abspath()) for x in self.aux_nodes])

def call_latex(self):
self.env.env = {}
self.env.env.update(os.environ)
self.env.env.update({'TEXINPUTS': self.texinputs()})
self.env.SRCFILE = self.inputs[0].abspath()
self.check_status('error when calling latex', self.texfun())


class latex(tex):
texfun, vars = Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}', shell=False)
class pdflatex(tex):
texfun, vars = Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}', shell=False)
class xelatex(tex):
texfun, vars = Task.compile_fun('${XELATEX} ${XELATEXFLAGS} ${SRCFILE}', shell=False)

class dvips(Task.Task):
run_str = '${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}'
color = 'BLUE'
after = ['latex', 'pdflatex', 'xelatex']

class dvipdf(Task.Task):
run_str = '${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}'
color = 'BLUE'
after = ['latex', 'pdflatex', 'xelatex']

class pdf2ps(Task.Task):
run_str = '${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}'
color = 'BLUE'
after = ['latex', 'pdflatex', 'xelatex']

@feature('tex')
@before_method('process_source')
def apply_tex(self):
"""
Create :py:class:`waflib.Tools.tex.tex` objects, and dvips/dvipdf/pdf2ps tasks if necessary (outs='ps', etc).
"""
if not getattr(self, 'type', None) in ('latex', 'pdflatex', 'xelatex'):
self.type = 'pdflatex'

tree = self.bld
outs = Utils.to_list(getattr(self, 'outs', []))

# prompt for incomplete files (else the batchmode is used)
self.env['PROMPT_LATEX'] = getattr(self, 'prompt', 1)

deps_lst = []

if getattr(self, 'deps', None):
deps = self.to_list(self.deps)
for dep in deps:
if isinstance(dep, str):
n = self.path.find_resource(dep)
if not n:
self.bld.fatal('Could not find %r for %r' % (dep, self))
if not n in deps_lst:
deps_lst.append(n)
elif isinstance(dep, Node.Node):
deps_lst.append(dep)

for node in self.to_nodes(self.source):

if self.type == 'latex':
task = self.create_task('latex', node, node.change_ext('.dvi'))
elif self.type == 'pdflatex':
task = self.create_task('pdflatex', node, node.change_ext('.pdf'))
elif self.type == 'xelatex':
task = self.create_task('xelatex', node, node.change_ext('.pdf'))

task.env = self.env

# add the manual dependencies
if deps_lst:
for n in deps_lst:
if not n in task.dep_nodes:
task.dep_nodes.append(n)

# texinputs is a nasty beast
if hasattr(self, 'texinputs_nodes'):
task.texinputs_nodes = self.texinputs_nodes
else:
task.texinputs_nodes = [node.parent, node.parent.get_bld(), self.path, self.path.get_bld()]
lst = os.environ.get('TEXINPUTS', '')
if self.env.TEXINPUTS:
lst += os.pathsep + self.env.TEXINPUTS
if lst:
lst = lst.split(os.pathsep)
for x in lst:
if x:
if os.path.isabs(x):
p = self.bld.root.find_node(x)
if p:
task.texinputs_nodes.append(p)
else:
Logs.error('Invalid TEXINPUTS folder %s' % x)
else:
Logs.error('Cannot resolve relative paths in TEXINPUTS %s' % x)

if self.type == 'latex':
if 'ps' in outs:
tsk = self.create_task('dvips', task.outputs, node.change_ext('.ps'))
tsk.env.env = dict(os.environ)
if 'pdf' in outs:
tsk = self.create_task('dvipdf', task.outputs, node.change_ext('.pdf'))
tsk.env.env = dict(os.environ)
elif self.type == 'pdflatex':
if 'ps' in outs:
self.create_task('pdf2ps', task.outputs, node.change_ext('.ps'))
self.source = []

def configure(self):
"""
Try to find the programs tex, latex and others. Do not raise any error if they
are not found.
"""
v = self.env
for p in 'tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps makeglossaries'.split():
try:
self.find_program(p, var=p.upper())
except self.errors.ConfigurationError:
pass
v['DVIPSFLAGS'] = '-Ppdf'


+ 0
- 335
waflib/Tools/vala.py View File

@@ -1,335 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Ali Sabil, 2007
# Radosław Szkodziński, 2010

"""
At this point, vala is still unstable, so do not expect
this tool to be too stable either (apis, etc)
"""

import os.path, shutil, re
from waflib import Context, Task, Utils, Logs, Options, Errors
from waflib.TaskGen import extension, taskgen_method
from waflib.Configure import conf

class valac(Task.Task):
"""
Task to compile vala files.
"""
#run_str = "${VALAC} ${VALAFLAGS}" # ideally
#vars = ['VALAC_VERSION']
vars = ["VALAC", "VALAC_VERSION", "VALAFLAGS"]
ext_out = ['.h']

def run(self):
cmd = self.env.VALAC + self.env.VALAFLAGS
cmd.extend([a.abspath() for a in self.inputs])
ret = self.exec_command(cmd, cwd=self.outputs[0].parent.abspath())

if ret:
return ret

for x in self.outputs:
if id(x.parent) != id(self.outputs[0].parent):
shutil.move(self.outputs[0].parent.abspath() + os.sep + x.name, x.abspath())

if self.generator.dump_deps_node:
self.generator.dump_deps_node.write('\n'.join(self.generator.packages))

return ret

valac = Task.update_outputs(valac) # no decorators for python2 classes

@taskgen_method
def init_vala_task(self):
"""
Initializes the vala task with the relevant data (acts as a constructor)
"""
self.profile = getattr(self, 'profile', 'gobject')

if self.profile == 'gobject':
self.uselib = Utils.to_list(getattr(self, 'uselib', []))
if not 'GOBJECT' in self.uselib:
self.uselib.append('GOBJECT')

def addflags(flags):
self.env.append_value('VALAFLAGS', flags)

if self.profile:
addflags('--profile=%s' % self.profile)

if hasattr(self, 'threading'):
if self.profile == 'gobject':
if not 'GTHREAD' in self.uselib:
self.uselib.append('GTHREAD')
else:
#Vala doesn't have threading support for dova nor posix
Logs.warn("Profile %s means no threading support" % self.profile)
self.threading = False

if self.threading:
addflags('--threading')

valatask = self.valatask

self.is_lib = 'cprogram' not in self.features
if self.is_lib:
addflags('--library=%s' % self.target)

h_node = self.path.find_or_declare('%s.h' % self.target)
valatask.outputs.append(h_node)
addflags('--header=%s' % h_node.name)

valatask.outputs.append(self.path.find_or_declare('%s.vapi' % self.target))

if getattr(self, 'gir', None):
gir_node = self.path.find_or_declare('%s.gir' % self.gir)
addflags('--gir=%s' % gir_node.name)
valatask.outputs.append(gir_node)

self.vala_target_glib = getattr(self, 'vala_target_glib', getattr(Options.options, 'vala_target_glib', None))
if self.vala_target_glib:
addflags('--target-glib=%s' % self.vala_target_glib)

addflags(['--define=%s' % x for x in getattr(self, 'vala_defines', [])])


packages_private = Utils.to_list(getattr(self, 'packages_private', []))
addflags(['--pkg=%s' % x for x in packages_private])


def _get_api_version():
api_version = '1.0'
if hasattr(Context.g_module, 'API_VERSION'):
version = Context.g_module.API_VERSION.split(".")
if version[0] == "0":
api_version = "0." + version[1]
else:
api_version = version[0] + ".0"
return api_version

self.includes = Utils.to_list(getattr(self, 'includes', []))
self.uselib = self.to_list(getattr(self, 'uselib', []))
valatask.install_path = getattr(self, 'install_path', '')

valatask.vapi_path = getattr(self, 'vapi_path', '${DATAROOTDIR}/vala/vapi')
valatask.pkg_name = getattr(self, 'pkg_name', self.env['PACKAGE'])
valatask.header_path = getattr(self, 'header_path', '${INCLUDEDIR}/%s-%s' % (valatask.pkg_name, _get_api_version()))
valatask.install_binding = getattr(self, 'install_binding', True)

self.packages = packages = Utils.to_list(getattr(self, 'packages', []))
self.vapi_dirs = vapi_dirs = Utils.to_list(getattr(self, 'vapi_dirs', []))
includes = []

if hasattr(self, 'use'):
local_packages = Utils.to_list(self.use)[:] # make sure to have a copy
seen = []
while len(local_packages) > 0:
package = local_packages.pop()
if package in seen:
continue
seen.append(package)

# check if the package exists
try:
package_obj = self.bld.get_tgen_by_name(package)
except Errors.WafError:
continue
package_name = package_obj.target
package_node = package_obj.path
package_dir = package_node.path_from(self.path)

for task in package_obj.tasks:
for output in task.outputs:
if output.name == package_name + ".vapi":
valatask.set_run_after(task)
if package_name not in packages:
packages.append(package_name)
if package_dir not in vapi_dirs:
vapi_dirs.append(package_dir)
if package_dir not in includes:
includes.append(package_dir)

if hasattr(package_obj, 'use'):
lst = self.to_list(package_obj.use)
lst.reverse()
local_packages = [pkg for pkg in lst if pkg not in seen] + local_packages

addflags(['--pkg=%s' % p for p in packages])

for vapi_dir in vapi_dirs:
v_node = self.path.find_dir(vapi_dir)
if not v_node:
Logs.warn('Unable to locate Vala API directory: %r' % vapi_dir)
else:
addflags('--vapidir=%s' % v_node.abspath())
addflags('--vapidir=%s' % v_node.get_bld().abspath())

self.dump_deps_node = None
if self.is_lib and self.packages:
self.dump_deps_node = self.path.find_or_declare('%s.deps' % self.target)
valatask.outputs.append(self.dump_deps_node)

self.includes.append(self.bld.srcnode.abspath())
self.includes.append(self.bld.bldnode.abspath())
for include in includes:
try:
self.includes.append(self.path.find_dir(include).abspath())
self.includes.append(self.path.find_dir(include).get_bld().abspath())
except AttributeError:
Logs.warn("Unable to locate include directory: '%s'" % include)


if self.is_lib and valatask.install_binding:
headers_list = [o for o in valatask.outputs if o.suffix() == ".h"]
try:
self.install_vheader.source = headers_list
except AttributeError:
self.install_vheader = self.bld.install_files(valatask.header_path, headers_list, self.env)

vapi_list = [o for o in valatask.outputs if (o.suffix() in (".vapi", ".deps"))]
try:
self.install_vapi.source = vapi_list
except AttributeError:
self.install_vapi = self.bld.install_files(valatask.vapi_path, vapi_list, self.env)

gir_list = [o for o in valatask.outputs if o.suffix() == '.gir']
try:
self.install_gir.source = gir_list
except AttributeError:
self.install_gir = self.bld.install_files(getattr(self, 'gir_path', '${DATAROOTDIR}/gir-1.0'), gir_list, self.env)

@extension('.vala', '.gs')
def vala_file(self, node):
"""
Compile a vala file and bind the task to *self.valatask*. If an existing vala task is already set, add the node
to its inputs. The typical example is::

def build(bld):
bld.program(
packages = 'gtk+-2.0',
target = 'vala-gtk-example',
uselib = 'GTK GLIB',
source = 'vala-gtk-example.vala foo.vala',
vala_defines = ['DEBUG'] # adds --define=<xyz> values to the command-line

# the following arguments are for libraries
#gir = 'hello-1.0',
#gir_path = '/tmp',
#vapi_path = '/tmp',
#pkg_name = 'hello'
# disable installing of gir, vapi and header
#install_binding = False

# profile = 'xyz' # adds --profile=<xyz> to enable profiling
# threading = True, # add --threading, except if profile is on or not on 'gobject'
# vala_target_glib = 'xyz' # adds --target-glib=<xyz>, can be given through the command-line option --vala-target-glib=<xyz>
)


:param node: vala file
:type node: :py:class:`waflib.Node.Node`
"""

try:
valatask = self.valatask
except AttributeError:
valatask = self.valatask = self.create_task('valac')
self.init_vala_task()

valatask.inputs.append(node)
c_node = node.change_ext('.c')
valatask.outputs.append(c_node)
self.source.append(c_node)

@conf
def find_valac(self, valac_name, min_version):
"""
Find the valac program, and execute it to store the version
number in *conf.env.VALAC_VERSION*

:param valac_name: program name
:type valac_name: string or list of string
:param min_version: minimum version acceptable
:type min_version: tuple of int
"""
valac = self.find_program(valac_name, var='VALAC')
try:
output = self.cmd_and_log(valac + ['--version'])
except Exception:
valac_version = None
else:
ver = re.search(r'\d+.\d+.\d+', output).group(0).split('.')
valac_version = tuple([int(x) for x in ver])

self.msg('Checking for %s version >= %r' % (valac_name, min_version),
valac_version, valac_version and valac_version >= min_version)
if valac and valac_version < min_version:
self.fatal("%s version %r is too old, need >= %r" % (valac_name, valac_version, min_version))

self.env['VALAC_VERSION'] = valac_version
return valac

@conf
def check_vala(self, min_version=(0,8,0), branch=None):
"""
Check if vala compiler from a given branch exists of at least a given
version.

:param min_version: minimum version acceptable (0.8.0)
:type min_version: tuple
:param branch: first part of the version number, in case a snapshot is used (0, 8)
:type branch: tuple of int
"""
if not branch:
branch = min_version[:2]
try:
find_valac(self, 'valac-%d.%d' % (branch[0], branch[1]), min_version)
except self.errors.ConfigurationError:
find_valac(self, 'valac', min_version)

@conf
def check_vala_deps(self):
"""
Load the gobject and gthread packages if they are missing.
"""
if not self.env['HAVE_GOBJECT']:
pkg_args = {'package': 'gobject-2.0',
'uselib_store': 'GOBJECT',
'args': '--cflags --libs'}
if getattr(Options.options, 'vala_target_glib', None):
pkg_args['atleast_version'] = Options.options.vala_target_glib
self.check_cfg(**pkg_args)

if not self.env['HAVE_GTHREAD']:
pkg_args = {'package': 'gthread-2.0',
'uselib_store': 'GTHREAD',
'args': '--cflags --libs'}
if getattr(Options.options, 'vala_target_glib', None):
pkg_args['atleast_version'] = Options.options.vala_target_glib
self.check_cfg(**pkg_args)

def configure(self):
"""
Use the following to enforce minimum vala version::

def configure(conf):
conf.load('vala', funs='')
conf.check_vala(min_version=(0,10,0))
"""
self.load('gnu_dirs')
self.check_vala_deps()
self.check_vala()
self.env.VALAFLAGS = ['-C', '--quiet']

def options(opt):
"""
Load the :py:mod:`waflib.Tools.gnu_dirs` tool and add the ``--vala-target-glib`` command-line option
"""
opt.load('gnu_dirs')
valaopts = opt.add_option_group('Vala Compiler Options')
valaopts.add_option ('--vala-target-glib', default=None,
dest='vala_target_glib', metavar='MAJOR.MINOR',
help='Target version of glib for Vala GObject code generation')


+ 0
- 114
waflib/Tools/winres.py View File

@@ -1,114 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Brant Young, 2007

"Process *.rc* files for C/C++: X{.rc -> [.res|.rc.o]}"

import re, traceback
from waflib import Task, Logs, Utils
from waflib.TaskGen import extension
from waflib.Tools import c_preproc

@extension('.rc')
def rc_file(self, node):
"""
Bind the .rc extension to a winrc task
"""
obj_ext = '.rc.o'
if self.env['WINRC_TGT_F'] == '/fo':
obj_ext = '.res'
rctask = self.create_task('winrc', node, node.change_ext(obj_ext))
try:
self.compiled_tasks.append(rctask)
except AttributeError:
self.compiled_tasks = [rctask]

re_lines = re.compile(
'(?:^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*?)\s*$)|'\
'(?:^\w+[ \t]*(ICON|BITMAP|CURSOR|HTML|FONT|MESSAGETABLE|TYPELIB|REGISTRY|D3DFX)[ \t]*(.*?)\s*$)',
re.IGNORECASE | re.MULTILINE)

class rc_parser(c_preproc.c_parser):
def filter_comments(self, filepath):
code = Utils.readf(filepath)
if c_preproc.use_trigraphs:
for (a, b) in c_preproc.trig_def: code = code.split(a).join(b)
code = c_preproc.re_nl.sub('', code)
code = c_preproc.re_cpp.sub(c_preproc.repl, code)
ret = []
for m in re.finditer(re_lines, code):
if m.group(2):
ret.append((m.group(2), m.group(3)))
else:
ret.append(('include', m.group(5)))
return ret

def addlines(self, node):
self.currentnode_stack.append(node.parent)
filepath = node.abspath()

self.count_files += 1
if self.count_files > c_preproc.recursion_limit:
raise c_preproc.PreprocError("recursion limit exceeded")
pc = self.parse_cache
Logs.debug('preproc: reading file %r', filepath)
try:
lns = pc[filepath]
except KeyError:
pass
else:
self.lines.extend(lns)
return

try:
lines = self.filter_comments(filepath)
lines.append((c_preproc.POPFILE, ''))
lines.reverse()
pc[filepath] = lines
self.lines.extend(lines)
except IOError:
raise c_preproc.PreprocError("could not read the file %s" % filepath)
except Exception:
if Logs.verbose > 0:
Logs.error("parsing %s failed" % filepath)
traceback.print_exc()

class winrc(Task.Task):
"""
Task for compiling resource files
"""
run_str = '${WINRC} ${WINRCFLAGS} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${WINRC_TGT_F} ${TGT} ${WINRC_SRC_F} ${SRC}'
color = 'BLUE'

def scan(self):
tmp = rc_parser(self.generator.includes_nodes)
tmp.start(self.inputs[0], self.env)
nodes = tmp.nodes
names = tmp.names

if Logs.verbose:
Logs.debug('deps: deps for %s: %r; unresolved %r' % (str(self), nodes, names))

return (nodes, names)

def configure(conf):
"""
Detect the programs RC or windres, depending on the C/C++ compiler in use
"""
v = conf.env
v['WINRC_TGT_F'] = '-o'
v['WINRC_SRC_F'] = '-i'

# find rc.exe
if not conf.env.WINRC:
if v.CC_NAME == 'msvc':
conf.find_program('RC', var='WINRC', path_list = v['PATH'])
v['WINRC_TGT_F'] = '/fo'
v['WINRC_SRC_F'] = ''
else:
conf.find_program('windres', var='WINRC', path_list = v['PATH'])
if not conf.env.WINRC:
conf.fatal('winrc was not found!')

v['WINRCFLAGS'] = []


+ 0
- 7
waflib/extras/add_objects.py View File

@@ -1,7 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2011 (ita)

from waflib import Logs
Logs.warn('This tool has been merged to the main library, remove the references to "add_objects"')


+ 0
- 58
waflib/extras/biber.py View File

@@ -1,58 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2011 (ita)

"""
Latex processing using "biber"
"""

import os
from waflib import Task, Logs

from waflib.Tools import tex as texmodule

class tex(texmodule.tex):
biber_fun, _ = Task.compile_fun('${BIBER} ${BIBERFLAGS} ${SRCFILE}',shell=False)
biber_fun.__doc__ = """
Execute the program **biber**
"""

def bibfile(self):
return None

def bibunits(self):
self.env.env = {}
self.env.env.update(os.environ)
self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()})
self.env.SRCFILE = self.aux_nodes[0].name[:-4]

if not self.env['PROMPT_LATEX']:
self.env.append_unique('BIBERFLAGS', '--quiet')

path = self.aux_nodes[0].abspath()[:-4] + '.bcf'
if os.path.isfile(path):
Logs.warn('calling biber')
self.check_status('error when calling biber, check %s.blg for errors' % (self.env.SRCFILE), self.biber_fun())
else:
super(tex, self).bibfile()
super(tex, self).bibunits()

class latex(tex):
texfun, vars = Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}', shell=False)
class pdflatex(tex):
texfun, vars = Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}', shell=False)
class xelatex(tex):
texfun, vars = Task.compile_fun('${XELATEX} ${XELATEXFLAGS} ${SRCFILE}', shell=False)

def configure(self):
"""
Almost the same as in tex.py, but try to detect 'biber'
"""
v = self.env
for p in ' biber tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps'.split():
try:
self.find_program(p, var=p.upper())
except self.errors.ConfigurationError:
pass
v['DVIPSFLAGS'] = '-Ppdf'


+ 0
- 131
waflib/extras/bjam.py View File

@@ -1,131 +0,0 @@
#! /usr/bin/env python
# per rosengren 2011

from os import sep, readlink
from os.path import abspath
from waflib import Logs
from waflib.TaskGen import feature, after_method
from waflib.Task import Task, always_run

def options(opt):
grp = opt.add_option_group('Bjam Options')
grp.add_option('--bjam_src', default=None, help='You can find it in <boost root>/tools/jam/src')
grp.add_option('--bjam_uname', default='linuxx86_64', help='bjam is built in <src>/bin.<uname>/bjam')
grp.add_option('--bjam_config', default=None)
grp.add_option('--bjam_toolset', default=None)

def configure(cnf):
if not cnf.env.BJAM_SRC:
cnf.env.BJAM_SRC = cnf.options.bjam_src
if not cnf.env.BJAM_UNAME:
cnf.env.BJAM_UNAME = cnf.options.bjam_uname
try:
cnf.find_program('bjam', path_list=[
cnf.env.BJAM_SRC + sep + 'bin.' + cnf.env.BJAM_UNAME
])
except Exception as e:
cnf.env.BJAM = None
if not cnf.env.BJAM_CONFIG:
cnf.env.BJAM_CONFIG = cnf.options.bjam_config
if not cnf.env.BJAM_TOOLSET:
cnf.env.BJAM_TOOLSET = cnf.options.bjam_toolset

@feature('bjam')
@after_method('process_rule')
def process_bjam(self):
if not self.bld.env.BJAM:
self.create_task('bjam_creator')
self.create_task('bjam_build')
self.create_task('bjam_installer')
if getattr(self, 'always', False):
always_run(bjam_creator)
always_run(bjam_build)
always_run(bjam_installer)

class bjam_creator(Task):
ext_out = 'bjam_exe'
vars=['BJAM_SRC', 'BJAM_UNAME']
def run(self):
env = self.env
gen = self.generator
path = gen.path
bld = gen.bld
bjam = gen.bld.root.find_dir(env.BJAM_SRC)
if not bjam:
Logs.error('Can not find bjam source')
return -1
bjam_exe_relpath = 'bin.' + env.BJAM_UNAME + '/bjam'
bjam_exe = bjam.find_resource(bjam_exe_relpath)
if bjam_exe:
env.BJAM = bjam_exe.srcpath()
return 0
bjam_cmd = ['./build.sh']
Logs.debug('runner: ' + bjam.srcpath() + '> ' + str(bjam_cmd))
result = self.exec_command(bjam_cmd, cwd=bjam.srcpath())
if not result == 0:
Logs.error('bjam failed')
return -1
bjam_exe = bjam.find_resource(bjam_exe_relpath)
if bjam_exe:
env.BJAM = bjam_exe.srcpath()
return 0
Logs.error('bjam failed')
return -1

class bjam_build(Task):
ext_in = 'bjam_exe'
ext_out = 'install'
vars = ['BJAM_TOOLSET']
def run(self):
env = self.env
gen = self.generator
path = gen.path
bld = gen.bld
if hasattr(gen, 'root'):
build_root = path.find_node(gen.root)
else:
build_root = path
jam = bld.srcnode.find_resource(env.BJAM_CONFIG)
if jam:
Logs.debug('bjam: Using jam configuration from ' + jam.srcpath())
jam_rel = jam.relpath_gen(build_root)
else:
Logs.warn('No build configuration in build_config/user-config.jam. Using default')
jam_rel = None
bjam_exe = bld.srcnode.find_node(env.BJAM)
if not bjam_exe:
Logs.error('env.BJAM is not set')
return -1
bjam_exe_rel = bjam_exe.relpath_gen(build_root)
cmd = ([bjam_exe_rel] +
(['--user-config=' + jam_rel] if jam_rel else []) +
['--stagedir=' + path.get_bld().path_from(build_root)] +
['--debug-configuration'] +
['--with-' + lib for lib in self.generator.target] +
(['toolset=' + env.BJAM_TOOLSET] if env.BJAM_TOOLSET else []) +
['link=' + 'shared'] +
['variant=' + 'release']
)
Logs.debug('runner: ' + build_root.srcpath() + '> ' + str(cmd))
ret = self.exec_command(cmd, cwd=build_root.srcpath())
if ret != 0:
return ret
self.set_outputs(path.get_bld().ant_glob('lib/*') + path.get_bld().ant_glob('bin/*'))
return 0

class bjam_installer(Task):
ext_in = 'install'
def run(self):
gen = self.generator
path = gen.path
for idir, pat in (('${LIBDIR}', 'lib/*'), ('${BINDIR}', 'bin/*')):
files = []
for n in path.get_bld().ant_glob(pat):
try:
t = readlink(n.srcpath())
gen.bld.symlink_as(sep.join([idir, n.name]), t, postpone=False)
except OSError:
files.append(n)
gen.bld.install_files(idir, files, postpone=False)
return 0


+ 0
- 111
waflib/extras/blender.py View File

@@ -1,111 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Michal Proszek, 2014 (poxip)

"""
Detect the version of Blender, path
and install the extension:

def options(opt):
opt.load('blender')
def configure(cnf):
cnf.load('blender')
def build(bld):
bld(name='io_mesh_raw',
feature='blender',
files=['file1.py', 'file2.py']
)
If name variable is empty, files are installed in scripts/addons, otherwise scripts/addons/name
Use ./waf configure --system to set the installation directory to system path
"""
import os
import re
from sys import platform as _platform
from getpass import getuser

from waflib import Utils
from waflib.TaskGen import feature
from waflib.Configure import conf

def options(opt):
opt.add_option(
'-s', '--system',
dest='directory_system',
default=False,
action='store_true',
help='determines installation directory (default: user)'
)

@conf
def find_blender(ctx):
'''Return version number of blender, if not exist return None'''
blender = ctx.find_program('blender')
output = ctx.cmd_and_log(blender + ['--version'])
m = re.search(r'Blender\s*((\d+(\.|))*)', output)
if not m:
ctx.fatal('Could not retrieve blender version')

try:
blender_version = m.group(1)
except IndexError:
ctx.fatal('Could not retrieve blender version')

ctx.env['BLENDER_VERSION'] = blender_version
return blender

@conf
def configure_paths(ctx):
"""Setup blender paths"""
# Get the username
user = getuser()
_platform = Utils.unversioned_sys_platform()
config_path = {'user': '', 'system': ''}
if _platform.startswith('linux'):
config_path['user'] = '/home/%s/.config/blender/' % user
config_path['system'] = '/usr/share/blender/'
elif _platform == 'darwin':
# MAC OS X
config_path['user'] = \
'/Users/%s/Library/Application Support/Blender/' % user
config_path['system'] = '/Library/Application Support/Blender/'
elif Utils.is_win32:
# Windows
appdata_path = ctx.getenv('APPDATA').replace('\\', '/')
homedrive = ctx.getenv('HOMEDRIVE').replace('\\', '/')

config_path['user'] = '%s/Blender Foundation/Blender/' % appdata_path
config_path['system'] = \
'%sAll Users/AppData/Roaming/Blender Foundation/Blender/' % homedrive
else:
ctx.fatal(
'Unsupported platform. '
'Available platforms: Linux, OSX, MS-Windows.'
)

blender_version = ctx.env['BLENDER_VERSION']

config_path['user'] += blender_version + '/'
config_path['system'] += blender_version + '/'

ctx.env['BLENDER_CONFIG_DIR'] = os.path.abspath(config_path['user'])
if ctx.options.directory_system:
ctx.env['BLENDER_CONFIG_DIR'] = config_path['system']

ctx.env['BLENDER_ADDONS_DIR'] = os.path.join(
ctx.env['BLENDER_CONFIG_DIR'], 'scripts/addons'
)
Utils.check_dir(ctx.env['BLENDER_ADDONS_DIR'])

def configure(ctx):
ctx.find_blender()
ctx.configure_paths()

@feature('blender_list')
def blender(self):
# Two ways to install a blender extension: as a module or just .py files
dest_dir = os.path.join(self.env.BLENDER_ADDONS_DIR, self.get_name())
Utils.check_dir(dest_dir)
self.bld.install_files(
dest_dir,
getattr(self, 'files', '.')
)

+ 0
- 81
waflib/extras/boo.py View File

@@ -1,81 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# Yannick LM 2011

"""
Support for the boo programming language, for example::

bld(features = "boo", # necessary feature
source = "src.boo", # list of boo files
gen = "world.dll", # target
type = "library", # library/exe ("-target:xyz" flag)
name = "world" # necessary if the target is referenced by 'use'
)
"""

from waflib import Task
from waflib.Configure import conf
from waflib.TaskGen import feature, after_method, before_method, extension

@extension('.boo')
def boo_hook(self, node):
# Nothing here yet ...
# TODO filter the non-boo source files in 'apply_booc' and remove this method
pass

@feature('boo')
@before_method('process_source')
def apply_booc(self):
"""Create a booc task """
src_nodes = self.to_nodes(self.source)
out_node = self.path.find_or_declare(self.gen)

self.boo_task = self.create_task('booc', src_nodes, [out_node])

# Set variables used by the 'booc' task
self.boo_task.env.OUT = '-o:%s' % out_node.abspath()

# type is "exe" by default
type = getattr(self, "type", "exe")
self.boo_task.env.BOO_TARGET_TYPE = "-target:%s" % type

@feature('boo')
@after_method('apply_boo')
def use_boo(self):
""""
boo applications honor the **use** keyword::
"""
dep_names = self.to_list(getattr(self, 'use', []))
for dep_name in dep_names:
dep_task_gen = self.bld.get_tgen_by_name(dep_name)
if not dep_task_gen:
continue
dep_task_gen.post()
dep_task = getattr(dep_task_gen, 'boo_task', None)
if not dep_task:
# Try a cs task:
dep_task = getattr(dep_task_gen, 'cs_task', None)
if not dep_task:
# Try a link task:
dep_task = getattr(dep_task, 'link_task', None)
if not dep_task:
# Abort ...
continue
self.boo_task.set_run_after(dep_task) # order
self.boo_task.dep_nodes.extend(dep_task.outputs) # dependency
self.boo_task.env.append_value('BOO_FLAGS', '-reference:%s' % dep_task.outputs[0].abspath())

class booc(Task.Task):
"""Compiles .boo files """
color = 'YELLOW'
run_str = '${BOOC} ${BOO_FLAGS} ${BOO_TARGET_TYPE} ${OUT} ${SRC}'

@conf
def check_booc(self):
self.find_program('booc', 'BOOC')
self.env.BOO_FLAGS = ['-nologo']

def configure(self):
"""Check that booc is available """
self.check_booc()


+ 0
- 411
waflib/extras/boost.py View File

@@ -1,411 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
#
# partially based on boost.py written by Gernot Vormayr
# written by Ruediger Sonderfeld <ruediger@c-plusplus.de>, 2008
# modified by Bjoern Michaelsen, 2008
# modified by Luca Fossati, 2008
# rewritten for waf 1.5.1, Thomas Nagy, 2008
# rewritten for waf 1.6.2, Sylvain Rouquette, 2011

'''

This is an extra tool, not bundled with the default waf binary.
To add the boost tool to the waf file:
$ ./waf-light --tools=compat15,boost
or, if you have waf >= 1.6.2
$ ./waf update --files=boost

When using this tool, the wscript will look like:

def options(opt):
opt.load('compiler_cxx boost')

def configure(conf):
conf.load('compiler_cxx boost')
conf.check_boost(lib='system filesystem')

def build(bld):
bld(source='main.cpp', target='app', use='BOOST')

Options are generated, in order to specify the location of boost includes/libraries.
The `check_boost` configuration function allows to specify the used boost libraries.
It can also provide default arguments to the --boost-mt command-line arguments.
Everything will be packaged together in a BOOST component that you can use.

When using MSVC, a lot of compilation flags need to match your BOOST build configuration:
- you may have to add /EHsc to your CXXFLAGS or define boost::throw_exception if BOOST_NO_EXCEPTIONS is defined.
Errors: C4530
- boost libraries will try to be smart and use the (pretty but often not useful) auto-linking feature of MSVC
So before calling `conf.check_boost` you might want to disabling by adding
conf.env.DEFINES_BOOST += ['BOOST_ALL_NO_LIB']
Errors:
- boost might also be compiled with /MT, which links the runtime statically.
If you have problems with redefined symbols,
self.env['DEFINES_%s' % var] += ['BOOST_ALL_NO_LIB']
self.env['CXXFLAGS_%s' % var] += ['/MD', '/EHsc']
Passing `--boost-linkage_autodetect` might help ensuring having a correct linkage in some basic cases.

'''

import sys
import re
from waflib import Utils, Logs, Errors
from waflib.Configure import conf
from waflib.TaskGen import feature, after_method

BOOST_LIBS = ['/usr/lib/x86_64-linux-gnu', '/usr/lib/i386-linux-gnu',
'/usr/lib', '/usr/local/lib', '/opt/local/lib', '/sw/lib', '/lib']
BOOST_INCLUDES = ['/usr/include', '/usr/local/include', '/opt/local/include', '/sw/include']
BOOST_VERSION_FILE = 'boost/version.hpp'
BOOST_VERSION_CODE = '''
#include <iostream>
#include <boost/version.hpp>
int main() { std::cout << BOOST_LIB_VERSION << std::endl; }
'''

BOOST_ERROR_CODE = '''
#include <boost/system/error_code.hpp>
int main() { boost::system::error_code c; }
'''

BOOST_THREAD_CODE = '''
#include <boost/thread.hpp>
int main() { boost::thread t; }
'''

# toolsets from {boost_dir}/tools/build/v2/tools/common.jam
PLATFORM = Utils.unversioned_sys_platform()
detect_intel = lambda env: (PLATFORM == 'win32') and 'iw' or 'il'
detect_clang = lambda env: (PLATFORM == 'darwin') and 'clang-darwin' or 'clang'
detect_mingw = lambda env: (re.search('MinGW', env.CXX[0])) and 'mgw' or 'gcc'
BOOST_TOOLSETS = {
'borland': 'bcb',
'clang': detect_clang,
'como': 'como',
'cw': 'cw',
'darwin': 'xgcc',
'edg': 'edg',
'g++': detect_mingw,
'gcc': detect_mingw,
'icpc': detect_intel,
'intel': detect_intel,
'kcc': 'kcc',
'kylix': 'bck',
'mipspro': 'mp',
'mingw': 'mgw',
'msvc': 'vc',
'qcc': 'qcc',
'sun': 'sw',
'sunc++': 'sw',
'tru64cxx': 'tru',
'vacpp': 'xlc'
}


def options(opt):
opt.add_option('--boost-includes', type='string',
default='', dest='boost_includes',
help='''path to the boost includes root (~boost root)
e.g. /path/to/boost_1_47_0''')
opt.add_option('--boost-libs', type='string',
default='', dest='boost_libs',
help='''path to the directory where the boost libs are
e.g. /path/to/boost_1_47_0/stage/lib''')
opt.add_option('--boost-mt', action='store_true',
default=False, dest='boost_mt',
help='select multi-threaded libraries')
opt.add_option('--boost-abi', type='string', default='', dest='boost_abi',
help='''select libraries with tags (gd for debug, static is automatically added),
see doc Boost, Getting Started, chapter 6.1''')
opt.add_option('--boost-linkage_autodetect', action="store_true", dest='boost_linkage_autodetect',
help="auto-detect boost linkage options (don't get used to it / might break other stuff)")
opt.add_option('--boost-toolset', type='string',
default='', dest='boost_toolset',
help='force a toolset e.g. msvc, vc90, \
gcc, mingw, mgw45 (default: auto)')
py_version = '%d%d' % (sys.version_info[0], sys.version_info[1])
opt.add_option('--boost-python', type='string',
default=py_version, dest='boost_python',
help='select the lib python with this version \
(default: %s)' % py_version)


@conf
def __boost_get_version_file(self, d):
if not d:
return None
dnode = self.root.find_dir(d)
if dnode:
return dnode.find_node(BOOST_VERSION_FILE)
return None

@conf
def boost_get_version(self, d):
"""silently retrieve the boost version number"""
node = self.__boost_get_version_file(d)
if node:
try:
txt = node.read()
except EnvironmentError:
Logs.error("Could not read the file %r" % node.abspath())
else:
re_but = re.compile('^#define\\s+BOOST_LIB_VERSION\\s+"(.*)"', re.M)
m = re_but.search(txt)
if m:
return m.group(1)
return self.check_cxx(fragment=BOOST_VERSION_CODE, includes=[d], execute=True, define_ret=True)

@conf
def boost_get_includes(self, *k, **kw):
includes = k and k[0] or kw.get('includes', None)
if includes and self.__boost_get_version_file(includes):
return includes
for d in self.environ.get('INCLUDE', '').split(';') + BOOST_INCLUDES:
if self.__boost_get_version_file(d):
return d
if includes:
self.end_msg('headers not found in %s' % includes)
self.fatal('The configuration failed')
else:
self.end_msg('headers not found, please provide a --boost-includes argument (see help)')
self.fatal('The configuration failed')


@conf
def boost_get_toolset(self, cc):
toolset = cc
if not cc:
build_platform = Utils.unversioned_sys_platform()
if build_platform in BOOST_TOOLSETS:
cc = build_platform
else:
cc = self.env.CXX_NAME
if cc in BOOST_TOOLSETS:
toolset = BOOST_TOOLSETS[cc]
return isinstance(toolset, str) and toolset or toolset(self.env)


@conf
def __boost_get_libs_path(self, *k, **kw):
''' return the lib path and all the files in it '''
if 'files' in kw:
return self.root.find_dir('.'), Utils.to_list(kw['files'])
libs = k and k[0] or kw.get('libs', None)
if libs:
path = self.root.find_dir(libs)
files = path.ant_glob('*boost_*')
if not libs or not files:
for d in self.environ.get('LIB', '').split(';') + BOOST_LIBS:
if not d:
continue
path = self.root.find_dir(d)
if path:
files = path.ant_glob('*boost_*')
if files:
break
path = self.root.find_dir(d + '64')
if path:
files = path.ant_glob('*boost_*')
if files:
break
if not path:
if libs:
self.end_msg('libs not found in %s' % libs)
self.fatal('The configuration failed')
else:
self.end_msg('libs not found, please provide a --boost-libs argument (see help)')
self.fatal('The configuration failed')

self.to_log('Found the boost path in %r with the libraries:' % path)
for x in files:
self.to_log(' %r' % x)
return path, files

@conf
def boost_get_libs(self, *k, **kw):
'''
return the lib path and the required libs
according to the parameters
'''
path, files = self.__boost_get_libs_path(**kw)
files = sorted(files, key=lambda f: (len(f.name), f.name), reverse=True)
toolset = self.boost_get_toolset(kw.get('toolset', ''))
toolset_pat = '(-%s[0-9]{0,3})' % toolset
version = '-%s' % self.env.BOOST_VERSION

def find_lib(re_lib, files):
for file in files:
if re_lib.search(file.name):
self.to_log('Found boost lib %s' % file)
return file
return None

def format_lib_name(name):
if name.startswith('lib') and self.env.CC_NAME != 'msvc':
name = name[3:]
return name[:name.rfind('.')]

def match_libs(lib_names, is_static):
libs = []
lib_names = Utils.to_list(lib_names)
if not lib_names:
return libs
t = []
if kw.get('mt', False):
t.append('-mt')
if kw.get('abi', None):
t.append('%s%s' % (is_static and '-s' or '-', kw['abi']))
elif is_static:
t.append('-s')
tags_pat = t and ''.join(t) or ''
ext = is_static and self.env.cxxstlib_PATTERN or self.env.cxxshlib_PATTERN
ext = ext.partition('%s')[2] # remove '%s' or 'lib%s' from PATTERN

for lib in lib_names:
if lib == 'python':
# for instance, with python='27',
# accepts '-py27', '-py2', '27' and '2'
# but will reject '-py3', '-py26', '26' and '3'
tags = '({0})?((-py{2})|(-py{1}(?=[^0-9]))|({2})|({1}(?=[^0-9]))|(?=[^0-9])(?!-py))'.format(tags_pat, kw['python'][0], kw['python'])
else:
tags = tags_pat
# Trying libraries, from most strict match to least one
for pattern in ['boost_%s%s%s%s%s$' % (lib, toolset_pat, tags, version, ext),
'boost_%s%s%s%s$' % (lib, tags, version, ext),
# Give up trying to find the right version
'boost_%s%s%s%s$' % (lib, toolset_pat, tags, ext),
'boost_%s%s%s$' % (lib, tags, ext),
'boost_%s%s$' % (lib, ext),
'boost_%s' % lib]:
self.to_log('Trying pattern %s' % pattern)
file = find_lib(re.compile(pattern), files)
if file:
libs.append(format_lib_name(file.name))
break
else:
self.end_msg('lib %s not found in %s' % (lib, path.abspath()))
self.fatal('The configuration failed')
return libs

return path.abspath(), match_libs(kw.get('lib', None), False), match_libs(kw.get('stlib', None), True)


@conf
def check_boost(self, *k, **kw):
"""
Initialize boost libraries to be used.

Keywords: you can pass the same parameters as with the command line (without "--boost-").
Note that the command line has the priority, and should preferably be used.
"""
if not self.env['CXX']:
self.fatal('load a c++ compiler first, conf.load("compiler_cxx")')

params = {
'lib': k and k[0] or kw.get('lib', None),
'stlib': kw.get('stlib', None)
}
for key, value in self.options.__dict__.items():
if not key.startswith('boost_'):
continue
key = key[len('boost_'):]
params[key] = value and value or kw.get(key, '')

var = kw.get('uselib_store', 'BOOST')

self.start_msg('Checking boost includes')
self.env['INCLUDES_%s' % var] = inc = self.boost_get_includes(**params)
self.env.BOOST_VERSION = self.boost_get_version(inc)
self.end_msg(self.env.BOOST_VERSION)
if Logs.verbose:
Logs.pprint('CYAN', ' path : %s' % self.env['INCLUDES_%s' % var])

if not params['lib'] and not params['stlib']:
return
if 'static' in kw or 'static' in params:
Logs.warn('boost: static parameter is deprecated, use stlib instead.')
self.start_msg('Checking boost libs')
path, libs, stlibs = self.boost_get_libs(**params)
self.env['LIBPATH_%s' % var] = [path]
self.env['STLIBPATH_%s' % var] = [path]
self.env['LIB_%s' % var] = libs
self.env['STLIB_%s' % var] = stlibs
self.end_msg('ok')
if Logs.verbose:
Logs.pprint('CYAN', ' path : %s' % path)
Logs.pprint('CYAN', ' shared libs : %s' % libs)
Logs.pprint('CYAN', ' static libs : %s' % stlibs)


def try_link():
if (params['lib'] and 'system' in params['lib']) or \
params['stlib'] and 'system' in params['stlib']:
self.check_cxx(fragment=BOOST_ERROR_CODE, use=var, execute=False)
if (params['lib'] and 'thread' in params['lib']) or \
params['stlib'] and 'thread' in params['stlib']:
self.check_cxx(fragment=BOOST_THREAD_CODE, use=var, execute=False)

if params.get('linkage_autodetect', False):
self.start_msg("Attempting to detect boost linkage flags")
toolset = self.boost_get_toolset(kw.get('toolset', ''))
if toolset in ('vc',):
# disable auto-linking feature, causing error LNK1181
# because the code wants to be linked against
self.env['DEFINES_%s' % var] += ['BOOST_ALL_NO_LIB']

# if no dlls are present, we guess the .lib files are not stubs
has_dlls = False
for x in Utils.listdir(path):
if x.endswith(self.env.cxxshlib_PATTERN % ''):
has_dlls = True
break
if not has_dlls:
self.env['STLIBPATH_%s' % var] = [path]
self.env['STLIB_%s' % var] = libs
del self.env['LIB_%s' % var]
del self.env['LIBPATH_%s' % var]

# we attempt to play with some known-to-work CXXFLAGS combinations
for cxxflags in (['/MD', '/EHsc'], []):
self.env.stash()
self.env["CXXFLAGS_%s" % var] += cxxflags
try:
try_link()
self.end_msg("ok: winning cxxflags combination: %s" % (self.env["CXXFLAGS_%s" % var]))
exc = None
break
except Errors.ConfigurationError as e:
self.env.revert()
exc = e

if exc is not None:
self.end_msg("Could not auto-detect boost linking flags combination, you may report it to boost.py author", ex=exc)
self.fatal('The configuration failed')
else:
self.end_msg("Boost linkage flags auto-detection not implemented (needed ?) for this toolchain")
self.fatal('The configuration failed')
else:
self.start_msg('Checking for boost linkage')
try:
try_link()
except Errors.ConfigurationError as e:
self.end_msg("Could not link against boost libraries using supplied options")
self.fatal('The configuration failed')
self.end_msg('ok')


@feature('cxx')
@after_method('apply_link')
def install_boost(self):
if install_boost.done or not Utils.is_win32 or not self.bld.cmd.startswith('install'):
return
install_boost.done = True
inst_to = getattr(self, 'install_path', '${BINDIR}')
for lib in self.env.LIB_BOOST:
try:
file = self.bld.find_file(self.env.cxxshlib_PATTERN % lib, self.env.LIBPATH_BOOST)
self.bld.install_files(inst_to, self.bld.root.find_node(file))
except:
continue
install_boost.done = False

+ 0
- 73
waflib/extras/c_dumbpreproc.py View File

@@ -1,73 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)

"""
Dumb C/C++ preprocessor for finding dependencies

It will look at all include files it can find after removing the comments, so the following
will always add the dependency on both "a.h" and "b.h"::

#include "a.h"
#ifdef B
#include "b.h"
#endif
int main() {
return 0;
}

To use::

def configure(conf):
conf.load('compiler_c')
conf.load('c_dumbpreproc')
"""

import re, sys, os, string, traceback
from waflib import Logs, Build, Utils, Errors
from waflib.Logs import debug, error
from waflib.Tools import c_preproc

re_inc = re.compile(
'^[ \t]*(#|%:)[ \t]*(include)[ \t]*[<"](.*)[>"]\r*$',
re.IGNORECASE | re.MULTILINE)

def lines_includes(node):
code = node.read()
if c_preproc.use_trigraphs:
for (a, b) in c_preproc.trig_def: code = code.split(a).join(b)
code = c_preproc.re_nl.sub('', code)
code = c_preproc.re_cpp.sub(c_preproc.repl, code)
return [(m.group(2), m.group(3)) for m in re.finditer(re_inc, code)]

parser = c_preproc.c_parser
class dumb_parser(parser):
def addlines(self, node):
if node in self.nodes[:-1]:
return
self.currentnode_stack.append(node.parent)

# Avoid reading the same files again
try:
lines = self.parse_cache[node]
except KeyError:
lines = self.parse_cache[node] = lines_includes(node)

self.lines = lines + [(c_preproc.POPFILE, '')] + self.lines

def start(self, node, env):
try:
self.parse_cache = node.ctx.parse_cache
except AttributeError:
self.parse_cache = node.ctx.parse_cache = {}

self.addlines(node)
while self.lines:
(x, y) = self.lines.pop(0)
if x == c_preproc.POPFILE:
self.currentnode_stack.pop()
continue
self.tryfind(y)

c_preproc.c_parser = dumb_parser


+ 0
- 156
waflib/extras/cabal.py View File

@@ -1,156 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Anton Feldmann, 2012
# "Base for cabal"
import re
import time
from waflib import TaskGen, Task, Utils
from waflib.Configure import conf
from waflib.Task import always_run
from waflib.TaskGen import extension, feature, after, before, before_method
from waflib.Utils import threading
from shutil import rmtree
lock = threading.Lock()
registering = False
def configure(self):
self.find_program('cabal', var='CABAL')
self.find_program('ghc-pkg', var='GHCPKG')
pkgconfd = self.bldnode.abspath() + '/package.conf.d'
self.env.PREFIX = self.bldnode.abspath() + '/dist'
self.env.PKGCONFD = pkgconfd
if self.root.find_node(pkgconfd + '/package.cache'):
self.msg('Using existing package database', pkgconfd, color='CYAN')
else:
pkgdir = self.root.find_dir(pkgconfd)
if pkgdir:
self.msg('Deleting corrupt package database', pkgdir.abspath(), color ='RED')
rmtree(pkgdir.abspath())
pkgdir = None
self.cmd_and_log(self.env.GHCPKG + ['init', pkgconfd])
self.msg('Created package database', pkgconfd, color = 'YELLOW' if pkgdir else 'GREEN')
@extension('.cabal')
def process_cabal(self, node):
out_dir_node = self.bld.root.find_dir(self.bld.out_dir)
package_node = node.change_ext('.package')
package_node = out_dir_node.find_or_declare(package_node.name)
build_node = node.parent.get_bld()
build_path = build_node.abspath()
config_node = build_node.find_or_declare('setup-config')
inplace_node = build_node.find_or_declare('package.conf.inplace')
config_task = self.create_task('cabal_configure', node)
config_task.cwd = node.parent.abspath()
config_task.depends_on = getattr(self, 'depends_on', '')
config_task.build_path = build_path
config_task.set_outputs(config_node)
build_task = self.create_task('cabal_build', config_node)
build_task.cwd = node.parent.abspath()
build_task.build_path = build_path
build_task.set_outputs(inplace_node)
copy_task = self.create_task('cabal_copy', inplace_node)
copy_task.cwd = node.parent.abspath()
copy_task.depends_on = getattr(self, 'depends_on', '')
copy_task.build_path = build_path
last_task = copy_task
task_list = [config_task, build_task, copy_task]
if (getattr(self, 'register', False)):
register_task = self.create_task('cabal_register', inplace_node)
register_task.cwd = node.parent.abspath()
register_task.set_run_after(copy_task)
register_task.build_path = build_path
pkgreg_task = self.create_task('ghcpkg_register', inplace_node)
pkgreg_task.cwd = node.parent.abspath()
pkgreg_task.set_run_after(register_task)
pkgreg_task.build_path = build_path
last_task = pkgreg_task
task_list += [register_task, pkgreg_task]
touch_task = self.create_task('cabal_touch', inplace_node)
touch_task.set_run_after(last_task)
touch_task.set_outputs(package_node)
touch_task.build_path = build_path
task_list += [touch_task]
return task_list
def get_all_src_deps(node):
hs_deps = node.ant_glob('**/*.hs')
hsc_deps = node.ant_glob('**/*.hsc')
lhs_deps = node.ant_glob('**/*.lhs')
c_deps = node.ant_glob('**/*.c')
cpp_deps = node.ant_glob('**/*.cpp')
proto_deps = node.ant_glob('**/*.proto')
return sum([hs_deps, hsc_deps, lhs_deps, c_deps, cpp_deps, proto_deps], [])
class Cabal(Task.Task):
def scan(self):
return (get_all_src_deps(self.generator.path), ())
class cabal_configure(Cabal):
run_str = '${CABAL} configure -v0 --prefix=${PREFIX} --global --user --package-db=${PKGCONFD} --builddir=${tsk.build_path}'
shell = True
def scan(self):
out_node = self.generator.bld.root.find_dir(self.generator.bld.out_dir)
deps = [out_node.find_or_declare(dep).change_ext('.package') for dep in Utils.to_list(self.depends_on)]
return (deps, ())
class cabal_build(Cabal):
run_str = '${CABAL} build -v1 --builddir=${tsk.build_path}/'
shell = True
class cabal_copy(Cabal):
run_str = '${CABAL} copy -v0 --builddir=${tsk.build_path}'
shell = True
class cabal_register(Cabal):
run_str = '${CABAL} register -v0 --gen-pkg-config=${tsk.build_path}/pkg.config --builddir=${tsk.build_path}'
shell = True
class ghcpkg_register(Cabal):
run_str = '${GHCPKG} update -v0 --global --user --package-conf=${PKGCONFD} ${tsk.build_path}/pkg.config'
shell = True
def runnable_status(self):
global lock, registering
val = False
lock.acquire()
val = registering
lock.release()
if val:
return Task.ASK_LATER
ret = Task.Task.runnable_status(self)
if ret == Task.RUN_ME:
lock.acquire()
registering = True
lock.release()
return ret
def post_run(self):
global lock, registering
lock.acquire()
registering = False
lock.release()
return Task.Task.post_run(self)
class cabal_touch(Cabal):
run_str = 'touch ${TGT}'

+ 0
- 110
waflib/extras/cfg_altoptions.py View File

@@ -1,110 +0,0 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Tool to extend c_config.check_cfg()

__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
__copyright__ = "Jérôme Carretero, 2014"

"""

This tool allows to work around the absence of ``*-config`` programs
on systems, by keeping the same clean configuration syntax but inferring
values or permitting their modification via the options interface.

Note that pkg-config can also support setting ``PKG_CONFIG_PATH``,
so you can put custom files in a folder containing new .pc files.
This tool could also be implemented by taking advantage of this fact.

Usage::

def options(opt):
opt.load('c_config_alt')
opt.add_package_option('package')

def configure(cfg):
conf.load('c_config_alt')
conf.check_cfg(...)

Known issues:

- Behavior with different build contexts...

"""

import os
import functools
from waflib import Task, Utils, TaskGen, Configure, Options, Errors

def name_to_dest(x):
return x.lower().replace('-', '_')


def options(opt):
def x(opt, param):
dest = name_to_dest(param)
gr = opt.get_option_group("configure options")
gr.add_option('--%s-root' % dest,
help="path containing include and lib subfolders for %s" \
% param,
)

opt.add_package_option = functools.partial(x, opt)


check_cfg_old = getattr(Configure.ConfigurationContext, 'check_cfg')

@Configure.conf
def check_cfg(conf, *k, **kw):
if k:
lst = k[0].split()
kw['package'] = lst[0]
kw['args'] = ' '.join(lst[1:])

if not 'package' in kw:
return check_cfg_old(conf, **kw)

package = kw['package']

package_lo = name_to_dest(package)
package_hi = package.upper().replace('-', '_') # TODO FIXME
package_hi = kw.get('uselib_store', package_hi)

def check_folder(path, name):
try:
assert os.path.isdir(path)
except AssertionError:
raise Errors.ConfigurationError(
"%s_%s (%s) is not a folder!" \
% (package_lo, name, path))
return path

root = getattr(Options.options, '%s_root' % package_lo, None)

if root is None:
return check_cfg_old(conf, **kw)
else:
def add_manual_var(k, v):
conf.start_msg('Adding for %s a manual var' % (package))
conf.env["%s_%s" % (k, package_hi)] = v
conf.end_msg("%s = %s" % (k, v))


check_folder(root, 'root')

pkg_inc = check_folder(os.path.join(root, "include"), 'inc')
add_manual_var('INCLUDES', [pkg_inc])
pkg_lib = check_folder(os.path.join(root, "lib"), 'libpath')
add_manual_var('LIBPATH', [pkg_lib])
add_manual_var('LIB', [package])

for x in kw.get('manual_deps', []):
for k, v in sorted(conf.env.get_merged_dict().items()):
if k.endswith('_%s' % x):
k = k.replace('_%s' % x, '')
conf.start_msg('Adding for %s a manual dep' \
%(package))
conf.env["%s_%s" % (k, package_hi)] += v
conf.end_msg('%s += %s' % (k, v))

return True


+ 0
- 147
waflib/extras/cfg_cross_gnu.py View File

@@ -1,147 +0,0 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Tool to provide dedicated variables for cross-compilation

__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
__copyright__ = "Jérôme Carretero, 2014"

"""

This tool allows to use environment variables to define cross-compilation things,
mostly used when you use build variants.

Usage:

- In your build script::

def configure(cfg):
...
conf.load('c_cross_gnu')
for variant in x_variants:
conf.xcheck_host()
conf.xcheck_host_var('POUET')
...

...

- Then::

CHOST=arm-hardfloat-linux-gnueabi waf configure

env arm-hardfloat-linux-gnueabi-CC="clang -..." waf configure

CFLAGS=... CHOST=arm-hardfloat-linux-gnueabi HOST_CFLAGS=-g waf configure

HOST_CC="clang -..." waf configure

"""

import os
from waflib import Utils, Configure

try:
from shlex import quote
except ImportError:
from pipes import quote

@Configure.conf
def xcheck_prog(conf, var, tool, cross=False):
value = os.environ.get(var, '')
value = Utils.to_list(value)

if not value:
return

conf.env[var] = value
if cross:
pretty = 'cross-compilation %s' % var
else:
pretty = var
conf.msg('Will use %s' % pretty,
" ".join(quote(x) for x in value))

@Configure.conf
def xcheck_envar(conf, name, wafname=None, cross=False):
wafname = wafname or name
value = os.environ.get(name, None)
value = Utils.to_list(value)

if not value:
return

conf.env[wafname] += value
if cross:
pretty = 'cross-compilation %s' % wafname
else:
pretty = wafname
conf.msg('Will use %s' % pretty,
" ".join(quote(x) for x in value))

@Configure.conf
def xcheck_host_prog(conf, name, tool, wafname=None):
wafname = wafname or name
host = conf.env.CHOST
specific = None
if host:
specific = os.environ.get('%s-%s' % (host[0], name), None)

if specific:
value = Utils.to_list(specific)
conf.env[wafname] += value
conf.msg('Will use cross-compilation %s' % name,
" ".join(quote(x) for x in value))
return

conf.xcheck_prog('HOST_%s' % name, tool, cross=True)

if conf.env[wafname]:
return

value = None
if host:
value = '%s-%s' % (host[0], tool)

if value:
conf.env[wafname] = value
conf.msg('Will use cross-compilation %s' % wafname, value)

@Configure.conf
def xcheck_host_envar(conf, name, wafname=None):
wafname = wafname or name

host = conf.env.CHOST
specific = None
if host:
specific = os.environ.get('%s-%s' % (host[0], name), None)

if specific:
value = Utils.to_list(specific)
conf.env[wafname] += value
conf.msg('Will use cross-compilation %s' % name,
" ".join(quote(x) for x in value))
return

conf.xcheck_envar('HOST_%s' % name, wafname, cross=True)


@Configure.conf
def xcheck_host(conf):
conf.xcheck_envar('CHOST', cross=True)
conf.xcheck_host_prog('CC', 'gcc')
conf.xcheck_host_prog('CXX', 'g++')
conf.xcheck_host_prog('LINK_CC', 'gcc')
conf.xcheck_host_prog('LINK_CXX', 'g++')
conf.xcheck_host_prog('AR', 'ar')
conf.xcheck_host_prog('AS', 'as')
conf.xcheck_host_prog('LD', 'ld')
conf.xcheck_host_envar('CFLAGS')
conf.xcheck_host_envar('CXXFLAGS')
conf.xcheck_host_envar('LDFLAGS', 'LINKFLAGS')
conf.xcheck_host_envar('LIB')
conf.xcheck_host_envar('PKG_CONFIG_PATH')
# TODO find a better solution than this ugliness
if conf.env.PKG_CONFIG_PATH:
conf.find_program('pkg-config', var='PKGCONFIG')
conf.env.PKGCONFIG = [
'env', 'PKG_CONFIG_PATH=%s' % (conf.env.PKG_CONFIG_PATH[0])
] + conf.env.PKGCONFIG

+ 0
- 66
waflib/extras/clang_compilation_database.py View File

@@ -1,66 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Christoph Koke, 2013

"""
Writes the c and cpp compile commands into build/compile_commands.json
see http://clang.llvm.org/docs/JSONCompilationDatabase.html

Usage:

def configure(conf):
conf.load('compiler_cxx')
...
conf.load('clang_compilation_database')
"""

import sys, os, json, shlex, pipes
from waflib import Logs, TaskGen
from waflib.Tools import c, cxx

if sys.hexversion >= 0x3030000:
quote = shlex.quote
else:
quote = pipes.quote

@TaskGen.feature('*')
@TaskGen.after_method('process_use')
def collect_compilation_db_tasks(self):
"Add a compilation database entry for compiled tasks"
try:
clang_db = self.bld.clang_compilation_database_tasks
except AttributeError:
clang_db = self.bld.clang_compilation_database_tasks = []
self.bld.add_post_fun(write_compilation_database)

for task in getattr(self, 'compiled_tasks', []):
if isinstance(task, (c.c, cxx.cxx)):
clang_db.append(task)

def write_compilation_database(ctx):
"Write the clang compilation database as JSON"
database_file = ctx.bldnode.make_node('compile_commands.json')
Logs.info("Build commands will be stored in %s" % database_file.path_from(ctx.path))
try:
root = json.load(database_file)
except IOError:
root = []
clang_db = dict((x["file"], x) for x in root)
for task in getattr(ctx, 'clang_compilation_database_tasks', []):
try:
cmd = task.last_cmd
except AttributeError:
continue
directory = getattr(task, 'cwd', ctx.variant_dir)
f_node = task.inputs[0]
filename = os.path.relpath(f_node.abspath(), directory)
cmd = " ".join(map(quote, cmd))
entry = {
"directory": directory,
"command": cmd,
"file": filename,
}
clang_db[filename] = entry
root = list(clang_db.values())
database_file.write(json.dumps(root, indent=2))


+ 0
- 881
waflib/extras/codelite.py View File

@@ -1,881 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# CodeLite Project
# Christian Klein (chrikle@berlios.de)
# Created: Jan 2012
# As templete for this file I used the msvs.py
# I hope this template will work proper

"""
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:

1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.

2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.

3. The name of the author may not be used to endorse or promote products
derived from this software without specific prior written permission.

THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
"""

"""

To add this tool to your project:
def options(conf):
opt.load('codelite')

It can be a good idea to add the sync_exec tool too.

To generate solution files:
$ waf configure codelite

To customize the outputs, provide subclasses in your wscript files:

from waflib.extras import codelite
class vsnode_target(codelite.vsnode_target):
def get_build_command(self, props):
# likely to be required
return "waf.bat build"
def collect_source(self):
# likely to be required
...
class codelite_bar(codelite.codelite_generator):
def init(self):
codelite.codelite_generator.init(self)
self.vsnode_target = vsnode_target

The codelite class re-uses the same build() function for reading the targets (task generators),
you may therefore specify codelite settings on the context object:

def build(bld):
bld.codelite_solution_name = 'foo.workspace'
bld.waf_command = 'waf.bat'
bld.projects_dir = bld.srcnode.make_node('')
bld.projects_dir.mkdir()


ASSUMPTIONS:
* a project can be either a directory or a target, project files are written only for targets that have source files
* each project is a vcxproj file, therefore the project uuid needs only to be a hash of the absolute path
"""

import os, re, sys
import uuid # requires python 2.5
from waflib.Build import BuildContext
from waflib import Utils, TaskGen, Logs, Task, Context, Node, Options

HEADERS_GLOB = '**/(*.h|*.hpp|*.H|*.inl)'

PROJECT_TEMPLATE = r'''<?xml version="1.0" encoding="utf-8"?>
<CodeLite_Project Name="${project.name}" InternalType="Library">
<Plugins>
<Plugin Name="qmake">
<![CDATA[00010001N0005Release000000000000]]>
</Plugin>
</Plugins>
<Description/>
<Dependencies/>
<VirtualDirectory Name="src">
${for x in project.source}
${if (project.get_key(x)=="sourcefile")}
<File Name="${x.abspath()}"/>
${endif}
${endfor}
</VirtualDirectory>
<VirtualDirectory Name="include">
${for x in project.source}
${if (project.get_key(x)=="headerfile")}
<File Name="${x.abspath()}"/>
${endif}
${endfor}
</VirtualDirectory>
<Settings Type="Dynamic Library">
<GlobalSettings>
<Compiler Options="" C_Options="">
<IncludePath Value="."/>
</Compiler>
<Linker Options="">
<LibraryPath Value="."/>
</Linker>
<ResourceCompiler Options=""/>
</GlobalSettings>
<Configuration Name="Release" CompilerType="gnu gcc" ReleasegerType="GNU gdb Releaseger" Type="Dynamic Library" BuildCmpWithGlobalSettings="append" BuildLnkWithGlobalSettings="append" BuildResWithGlobalSettings="append">
<Compiler Options="" C_Options="" Required="yes" PreCompiledHeader="" PCHInCommandLine="no" UseDifferentPCHFlags="no" PCHFlags="">
<IncludePath Value="."/>
<IncludePath Value="."/>
</Compiler>
<Linker Options="" Required="yes">
<LibraryPath Value=""/>
</Linker>
<ResourceCompiler Options="" Required="no"/>
<General OutputFile="${xml:project.build_properties[0].output_file}" IntermediateDirectory="" Command="" CommandArguments="" PauseExecWhenProcTerminates="yes"/>
<Environment EnvVarSetName="&lt;Use Defaults&gt;" DbgSetName="&lt;Use Defaults&gt;">
<![CDATA[]]>
</Environment>
<Releaseger IsRemote="no" RemoteHostName="" RemoteHostPort="" ReleasegerPath="">
<PostConnectCommands/>
<StartupCommands/>
</Releaseger>
<PreBuild/>
<PostBuild/>
<CustomBuild Enabled="yes">
$b = project.build_properties[0]}
<RebuildCommand>${xml:project.get_rebuild_command(project.build_properties[0])}</RebuildCommand>
<CleanCommand>${xml:project.get_clean_command(project.build_properties[0])}</CleanCommand>
<BuildCommand>${xml:project.get_build_command(project.build_properties[0])}</BuildCommand>
<Target Name="Install">${xml:project.get_install_command(project.build_properties[0])}</Target>
<Target Name="Build and Install">${xml:project.get_build_and_install_command(project.build_properties[0])}</Target>
<Target Name="Build All">${xml:project.get_build_all_command(project.build_properties[0])}</Target>
<Target Name="Rebuild All">${xml:project.get_rebuild_all_command(project.build_properties[0])}</Target>
<Target Name="Clean All">${xml:project.get_clean_all_command(project.build_properties[0])}</Target>
<Target Name="Build and Install All">${xml:project.get_build_and_install_all_command(project.build_properties[0])}</Target>
<PreprocessFileCommand/>
<SingleFileCommand/>
<MakefileGenerationCommand/>
<ThirdPartyToolName>None</ThirdPartyToolName>
<WorkingDirectory/>
</CustomBuild>
<AdditionalRules>
<CustomPostBuild/>
<CustomPreBuild/>
</AdditionalRules>
<Completion>
<ClangCmpFlags/>
<ClangPP/>
<SearchPaths/>
</Completion>
</Configuration>
<Configuration Name="Release" CompilerType="gnu gcc" ReleasegerType="GNU gdb Releaseger" Type="" BuildCmpWithGlobalSettings="append" BuildLnkWithGlobalSettings="append" BuildResWithGlobalSettings="append">
<Compiler Options="" C_Options="" Required="yes" PreCompiledHeader="" PCHInCommandLine="no" UseDifferentPCHFlags="no" PCHFlags="">
<IncludePath Value="."/>
</Compiler>
<Linker Options="" Required="yes"/>
<ResourceCompiler Options="" Required="no"/>
<General OutputFile="" IntermediateDirectory="./Release" Command="" CommandArguments="" UseSeparateReleaseArgs="no" ReleaseArguments="" WorkingDirectory="$(IntermediateDirectory)" PauseExecWhenProcTerminates="yes"/>
<Environment EnvVarSetName="&lt;Use Defaults&gt;" DbgSetName="&lt;Use Defaults&gt;">
<![CDATA[
]]>
</Environment>
<Releaseger IsRemote="no" RemoteHostName="" RemoteHostPort="" ReleasegerPath="">
<PostConnectCommands/>
<StartupCommands/>
</Releaseger>
<PreBuild/>
<PostBuild/>
<CustomBuild Enabled="no">
<RebuildCommand/>
<CleanCommand/>
<BuildCommand/>
<PreprocessFileCommand/>
<SingleFileCommand/>
<MakefileGenerationCommand/>
<ThirdPartyToolName/>
<WorkingDirectory/>
</CustomBuild>
<AdditionalRules>
<CustomPostBuild/>
<CustomPreBuild/>
</AdditionalRules>
<Completion>
<ClangCmpFlags/>
<ClangPP/>
<SearchPaths/>
</Completion>
</Configuration>
</Settings>
</CodeLite_Project>'''




SOLUTION_TEMPLATE = '''<?xml version="1.0" encoding="utf-8"?>
<CodeLite_Workspace Name="${getattr(project, 'codelite_solution_name', None)[:-10]}" Database="./${getattr(project, 'codelite_solution_name', None)[:-10]}.tags">
${for p in project.all_projects}
<Project Name = "${p.name}" Path = "${p.title}" Active="No"/>
${endfor}
<BuildMatrix>
<WorkspaceConfiguration Name="Release" Selected="yes">
${for p in project.all_projects}
<Project Name="${p.name}" ConfigName="Release"/>
${endfor}
</WorkspaceConfiguration>
</BuildMatrix>
</CodeLite_Workspace>'''



COMPILE_TEMPLATE = '''def f(project):
lst = []
def xml_escape(value):
return value.replace("&", "&amp;").replace('"', "&quot;").replace("'", "&apos;").replace("<", "&lt;").replace(">", "&gt;")

%s

#f = open('cmd.txt', 'w')
#f.write(str(lst))
#f.close()
return ''.join(lst)
'''
reg_act = re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<code>[^}]*?)\})", re.M)
def compile_template(line):
"""
Compile a template expression into a python function (like jsps, but way shorter)
"""
extr = []
def repl(match):
g = match.group
if g('dollar'): return "$"
elif g('backslash'):
return "\\"
elif g('subst'):
extr.append(g('code'))
return "<<|@|>>"
return None

line2 = reg_act.sub(repl, line)
params = line2.split('<<|@|>>')
assert(extr)


indent = 0
buf = []
app = buf.append

def app(txt):
buf.append(indent * '\t' + txt)

for x in range(len(extr)):
if params[x]:
app("lst.append(%r)" % params[x])

f = extr[x]
if f.startswith('if') or f.startswith('for'):
app(f + ':')
indent += 1
elif f.startswith('py:'):
app(f[3:])
elif f.startswith('endif') or f.startswith('endfor'):
indent -= 1
elif f.startswith('else') or f.startswith('elif'):
indent -= 1
app(f + ':')
indent += 1
elif f.startswith('xml:'):
app('lst.append(xml_escape(%s))' % f[4:])
else:
#app('lst.append((%s) or "cannot find %s")' % (f, f))
app('lst.append(%s)' % f)

if extr:
if params[-1]:
app("lst.append(%r)" % params[-1])

fun = COMPILE_TEMPLATE % "\n\t".join(buf)
#print(fun)
return Task.funex(fun)


re_blank = re.compile('(\n|\r|\\s)*\n', re.M)
def rm_blank_lines(txt):
txt = re_blank.sub('\r\n', txt)
return txt

BOM = '\xef\xbb\xbf'
try:
BOM = bytes(BOM, 'iso8859-1') # python 3
except NameError:
pass

def stealth_write(self, data, flags='wb'):
try:
x = unicode
except NameError:
data = data.encode('utf-8') # python 3
else:
data = data.decode(sys.getfilesystemencoding(), 'replace')
data = data.encode('utf-8')

if self.name.endswith('.project') or self.name.endswith('.project'):
data = BOM + data

try:
txt = self.read(flags='rb')
if txt != data:
raise ValueError('must write')
except (IOError, ValueError):
self.write(data, flags=flags)
else:
Logs.debug('codelite: skipping %s' % self.abspath())
Node.Node.stealth_write = stealth_write

re_quote = re.compile("[^a-zA-Z0-9-]")
def quote(s):
return re_quote.sub("_", s)

def xml_escape(value):
return value.replace("&", "&amp;").replace('"', "&quot;").replace("'", "&apos;").replace("<", "&lt;").replace(">", "&gt;")

def make_uuid(v, prefix = None):
"""
simple utility function
"""
if isinstance(v, dict):
keys = list(v.keys())
keys.sort()
tmp = str([(k, v[k]) for k in keys])
else:
tmp = str(v)
d = Utils.md5(tmp.encode()).hexdigest().upper()
if prefix:
d = '%s%s' % (prefix, d[8:])
gid = uuid.UUID(d, version = 4)
return str(gid).upper()

def diff(node, fromnode):
# difference between two nodes, but with "(..)" instead of ".."
c1 = node
c2 = fromnode

c1h = c1.height()
c2h = c2.height()

lst = []
up = 0

while c1h > c2h:
lst.append(c1.name)
c1 = c1.parent
c1h -= 1

while c2h > c1h:
up += 1
c2 = c2.parent
c2h -= 1

while id(c1) != id(c2):
lst.append(c1.name)
up += 1

c1 = c1.parent
c2 = c2.parent

for i in range(up):
lst.append('(..)')
lst.reverse()
return tuple(lst)

class build_property(object):
pass

class vsnode(object):
"""
Abstract class representing visual studio elements
We assume that all visual studio nodes have a uuid and a parent
"""
def __init__(self, ctx):
self.ctx = ctx # codelite context
self.name = '' # string, mandatory
self.vspath = '' # path in visual studio (name for dirs, absolute path for projects)
self.uuid = '' # string, mandatory
self.parent = None # parent node for visual studio nesting

def get_waf(self):
"""
Override in subclasses...
"""
return '%s/%s' % (self.ctx.srcnode.abspath(), getattr(self.ctx, 'waf_command', 'waf'))

def ptype(self):
"""
Return a special uuid for projects written in the solution file
"""
pass

def write(self):
"""
Write the project file, by default, do nothing
"""
pass

def make_uuid(self, val):
"""
Alias for creating uuid values easily (the templates cannot access global variables)
"""
return make_uuid(val)

class vsnode_vsdir(vsnode):
"""
Nodes representing visual studio folders (which do not match the filesystem tree!)
"""
VS_GUID_SOLUTIONFOLDER = "2150E333-8FDC-42A3-9474-1A3956D46DE8"
def __init__(self, ctx, uuid, name, vspath=''):
vsnode.__init__(self, ctx)
self.title = self.name = name
self.uuid = uuid
self.vspath = vspath or name

def ptype(self):
return self.VS_GUID_SOLUTIONFOLDER

class vsnode_project(vsnode):
"""
Abstract class representing visual studio project elements
A project is assumed to be writable, and has a node representing the file to write to
"""
VS_GUID_VCPROJ = "8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942"
def ptype(self):
return self.VS_GUID_VCPROJ

def __init__(self, ctx, node):
vsnode.__init__(self, ctx)
self.path = node
self.uuid = make_uuid(node.abspath())
self.name = node.name
self.title = self.path.abspath()
self.source = [] # list of node objects
self.build_properties = [] # list of properties (nmake commands, output dir, etc)

def dirs(self):
"""
Get the list of parent folders of the source files (header files included)
for writing the filters
"""
lst = []
def add(x):
if x.height() > self.tg.path.height() and x not in lst:
lst.append(x)
add(x.parent)
for x in self.source:
add(x.parent)
return lst

def write(self):
Logs.debug('codelite: creating %r' % self.path)
#print "self.name:",self.name

# first write the project file
template1 = compile_template(PROJECT_TEMPLATE)
proj_str = template1(self)
proj_str = rm_blank_lines(proj_str)
self.path.stealth_write(proj_str)

# then write the filter
#template2 = compile_template(FILTER_TEMPLATE)
#filter_str = template2(self)
#filter_str = rm_blank_lines(filter_str)
#tmp = self.path.parent.make_node(self.path.name + '.filters')
#tmp.stealth_write(filter_str)

def get_key(self, node):
"""
required for writing the source files
"""
name = node.name
if name.endswith('.cpp') or name.endswith('.c'):
return 'sourcefile'
return 'headerfile'

def collect_properties(self):
"""
Returns a list of triplet (configuration, platform, output_directory)
"""
ret = []
for c in self.ctx.configurations:
for p in self.ctx.platforms:
x = build_property()
x.outdir = ''

x.configuration = c
x.platform = p

x.preprocessor_definitions = ''
x.includes_search_path = ''

# can specify "deploy_dir" too
ret.append(x)
self.build_properties = ret

def get_build_params(self, props):
opt = ''
return (self.get_waf(), opt)

def get_build_command(self, props):
return "%s build %s" % self.get_build_params(props)

def get_clean_command(self, props):
return "%s clean %s" % self.get_build_params(props)

def get_rebuild_command(self, props):
return "%s clean build %s" % self.get_build_params(props)
def get_install_command(self, props):
return "%s install %s" % self.get_build_params(props)
def get_build_and_install_command(self, props):
return "%s build install %s" % self.get_build_params(props)
def get_build_and_install_all_command(self, props):
return "%s build install" % self.get_build_params(props)[0]
def get_clean_all_command(self, props):
return "%s clean" % self.get_build_params(props)[0]
def get_build_all_command(self, props):
return "%s build" % self.get_build_params(props)[0]
def get_rebuild_all_command(self, props):
return "%s clean build" % self.get_build_params(props)[0]

def get_filter_name(self, node):
lst = diff(node, self.tg.path)
return '\\'.join(lst) or '.'

class vsnode_alias(vsnode_project):
def __init__(self, ctx, node, name):
vsnode_project.__init__(self, ctx, node)
self.name = name
self.output_file = ''

class vsnode_build_all(vsnode_alias):
"""
Fake target used to emulate the behaviour of "make all" (starting one process by target is slow)
This is the only alias enabled by default
"""
def __init__(self, ctx, node, name='build_all_projects'):
vsnode_alias.__init__(self, ctx, node, name)
self.is_active = True

class vsnode_install_all(vsnode_alias):
"""
Fake target used to emulate the behaviour of "make install"
"""
def __init__(self, ctx, node, name='install_all_projects'):
vsnode_alias.__init__(self, ctx, node, name)

def get_build_command(self, props):
return "%s build install %s" % self.get_build_params(props)

def get_clean_command(self, props):
return "%s clean %s" % self.get_build_params(props)

def get_rebuild_command(self, props):
return "%s clean build install %s" % self.get_build_params(props)

class vsnode_project_view(vsnode_alias):
"""
Fake target used to emulate a file system view
"""
def __init__(self, ctx, node, name='project_view'):
vsnode_alias.__init__(self, ctx, node, name)
self.tg = self.ctx() # fake one, cannot remove
self.exclude_files = Node.exclude_regs + '''
waf-1.8.*
waf3-1.8.*/**
.waf-1.8.*
.waf3-1.8.*/**
**/*.sdf
**/*.suo
**/*.ncb
**/%s
''' % Options.lockfile

def collect_source(self):
# this is likely to be slow
self.source = self.ctx.srcnode.ant_glob('**', excl=self.exclude_files)

def get_build_command(self, props):
params = self.get_build_params(props) + (self.ctx.cmd,)
return "%s %s %s" % params

def get_clean_command(self, props):
return ""

def get_rebuild_command(self, props):
return self.get_build_command(props)

class vsnode_target(vsnode_project):
"""
CodeLite project representing a targets (programs, libraries, etc) and bound
to a task generator
"""
def __init__(self, ctx, tg):
"""
A project is more or less equivalent to a file/folder
"""
base = getattr(ctx, 'projects_dir', None) or tg.path
node = base.make_node(quote(tg.name) + ctx.project_extension) # the project file as a Node
vsnode_project.__init__(self, ctx, node)
self.name = quote(tg.name)
self.tg = tg # task generator

def get_build_params(self, props):
"""
Override the default to add the target name
"""
opt = ''
if getattr(self, 'tg', None):
opt += " --targets=%s" % self.tg.name
return (self.get_waf(), opt)

def collect_source(self):
tg = self.tg
source_files = tg.to_nodes(getattr(tg, 'source', []))
include_dirs = Utils.to_list(getattr(tg, 'codelite_includes', []))
include_files = []
for x in include_dirs:
if isinstance(x, str):
x = tg.path.find_node(x)
if x:
lst = [y for y in x.ant_glob(HEADERS_GLOB, flat=False)]
include_files.extend(lst)

# remove duplicates
self.source.extend(list(set(source_files + include_files)))
self.source.sort(key=lambda x: x.abspath())

def collect_properties(self):
"""
CodeLite projects are associated with platforms and configurations (for building especially)
"""
super(vsnode_target, self).collect_properties()
for x in self.build_properties:
x.outdir = self.path.parent.abspath()
x.preprocessor_definitions = ''
x.includes_search_path = ''

try:
tsk = self.tg.link_task
except AttributeError:
pass
else:
x.output_file = tsk.outputs[0].abspath()
x.preprocessor_definitions = ';'.join(tsk.env.DEFINES)
x.includes_search_path = ';'.join(self.tg.env.INCPATHS)

class codelite_generator(BuildContext):
'''generates a CodeLite workspace'''
cmd = 'codelite'
fun = 'build'

def init(self):
"""
Some data that needs to be present
"""
if not getattr(self, 'configurations', None):
self.configurations = ['Release'] # LocalRelease, RemoteDebug, etc
if not getattr(self, 'platforms', None):
self.platforms = ['Win32']
if not getattr(self, 'all_projects', None):
self.all_projects = []
if not getattr(self, 'project_extension', None):
self.project_extension = '.project'
if not getattr(self, 'projects_dir', None):
self.projects_dir = self.srcnode.make_node('')
self.projects_dir.mkdir()

# bind the classes to the object, so that subclass can provide custom generators
if not getattr(self, 'vsnode_vsdir', None):
self.vsnode_vsdir = vsnode_vsdir
if not getattr(self, 'vsnode_target', None):
self.vsnode_target = vsnode_target
if not getattr(self, 'vsnode_build_all', None):
self.vsnode_build_all = vsnode_build_all
if not getattr(self, 'vsnode_install_all', None):
self.vsnode_install_all = vsnode_install_all
if not getattr(self, 'vsnode_project_view', None):
self.vsnode_project_view = vsnode_project_view

self.numver = '11.00'
self.vsver = '2010'

def execute(self):
"""
Entry point
"""
self.restore()
if not self.all_envs:
self.load_envs()
self.recurse([self.run_dir])

# user initialization
self.init()

# two phases for creating the solution
self.collect_projects() # add project objects into "self.all_projects"
self.write_files() # write the corresponding project and solution files

def collect_projects(self):
"""
Fill the list self.all_projects with project objects
Fill the list of build targets
"""
self.collect_targets()
#self.add_aliases()
#self.collect_dirs()
default_project = getattr(self, 'default_project', None)
def sortfun(x):
if x.name == default_project:
return ''
return getattr(x, 'path', None) and x.path.abspath() or x.name
self.all_projects.sort(key=sortfun)

def write_files(self):
"""
Write the project and solution files from the data collected
so far. It is unlikely that you will want to change this
"""
for p in self.all_projects:
p.write()

# and finally write the solution file
node = self.get_solution_node()
node.parent.mkdir()
Logs.warn('Creating %r' % node)
#a = dir(self.root)
#for b in a:
# print b
#print self.group_names
#print "Hallo2: ",self.root.listdir()
#print getattr(self, 'codelite_solution_name', None)
template1 = compile_template(SOLUTION_TEMPLATE)
sln_str = template1(self)
sln_str = rm_blank_lines(sln_str)
node.stealth_write(sln_str)

def get_solution_node(self):
"""
The solution filename is required when writing the .vcproj files
return self.solution_node and if it does not exist, make one
"""
try:
return self.solution_node
except:
pass

codelite_solution_name = getattr(self, 'codelite_solution_name', None)
if not codelite_solution_name:
codelite_solution_name = getattr(Context.g_module, Context.APPNAME, 'project') + '.workspace'
setattr(self, 'codelite_solution_name', codelite_solution_name)
if os.path.isabs(codelite_solution_name):
self.solution_node = self.root.make_node(codelite_solution_name)
else:
self.solution_node = self.srcnode.make_node(codelite_solution_name)
return self.solution_node

def project_configurations(self):
"""
Helper that returns all the pairs (config,platform)
"""
ret = []
for c in self.configurations:
for p in self.platforms:
ret.append((c, p))
return ret

def collect_targets(self):
"""
Process the list of task generators
"""
for g in self.groups:
for tg in g:
if not isinstance(tg, TaskGen.task_gen):
continue

if not hasattr(tg, 'codelite_includes'):
tg.codelite_includes = tg.to_list(getattr(tg, 'includes', [])) + tg.to_list(getattr(tg, 'export_includes', []))
tg.post()
if not getattr(tg, 'link_task', None):
continue

p = self.vsnode_target(self, tg)
p.collect_source() # delegate this processing
p.collect_properties()
self.all_projects.append(p)

def add_aliases(self):
"""
Add a specific target that emulates the "make all" necessary for Visual studio when pressing F7
We also add an alias for "make install" (disabled by default)
"""
base = getattr(self, 'projects_dir', None) or self.tg.path

node_project = base.make_node('build_all_projects' + self.project_extension) # Node
p_build = self.vsnode_build_all(self, node_project)
p_build.collect_properties()
self.all_projects.append(p_build)

node_project = base.make_node('install_all_projects' + self.project_extension) # Node
p_install = self.vsnode_install_all(self, node_project)
p_install.collect_properties()
self.all_projects.append(p_install)

node_project = base.make_node('project_view' + self.project_extension) # Node
p_view = self.vsnode_project_view(self, node_project)
p_view.collect_source()
p_view.collect_properties()
self.all_projects.append(p_view)

n = self.vsnode_vsdir(self, make_uuid(self.srcnode.abspath() + 'build_aliases'), "build_aliases")
p_build.parent = p_install.parent = p_view.parent = n
self.all_projects.append(n)

def collect_dirs(self):
"""
Create the folder structure in the CodeLite project view
"""
seen = {}
def make_parents(proj):
# look at a project, try to make a parent
if getattr(proj, 'parent', None):
# aliases already have parents
return
x = proj.iter_path
if x in seen:
proj.parent = seen[x]
return

# There is not vsnode_vsdir for x.
# So create a project representing the folder "x"
n = proj.parent = seen[x] = self.vsnode_vsdir(self, make_uuid(x.abspath()), x.name)
n.iter_path = x.parent
self.all_projects.append(n)

# recurse up to the project directory
if x.height() > self.srcnode.height() + 1:
make_parents(n)

for p in self.all_projects[:]: # iterate over a copy of all projects
if not getattr(p, 'tg', None):
# but only projects that have a task generator
continue

# make a folder for each task generator
p.iter_path = p.tg.path
make_parents(p)



def options(ctx):
pass


+ 0
- 39
waflib/extras/color_gcc.py View File

@@ -1,39 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8

# Replaces the default formatter by one which understands GCC output and colorizes it.

__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
__copyright__ = "Jérôme Carretero, 2012"

import sys
from waflib import Logs

class ColorGCCFormatter(Logs.formatter):
def __init__(self, colors):
self.colors = colors
Logs.formatter.__init__(self)
def format(self, rec):
frame = sys._getframe()
while frame:
func = frame.f_code.co_name
if func == 'exec_command':
cmd = frame.f_locals['cmd']
if isinstance(cmd, list) and ('gcc' in cmd[0] or 'g++' in cmd[0]):
lines = []
for line in rec.msg.splitlines():
if 'warning: ' in line:
lines.append(self.colors.YELLOW + line)
elif 'error: ' in line:
lines.append(self.colors.RED + line)
elif 'note: ' in line:
lines.append(self.colors.CYAN + line)
else:
lines.append(line)
rec.msg = "\n".join(lines)
frame = frame.f_back
return Logs.formatter.format(self, rec)

def options(opt):
Logs.log.handlers[0].setFormatter(ColorGCCFormatter(Logs.colors))


+ 0
- 51
waflib/extras/color_rvct.py View File

@@ -1,51 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8

# Replaces the default formatter by one which understands RVCT output and colorizes it.

__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
__copyright__ = "Jérôme Carretero, 2012"

import sys
import atexit
from waflib import Logs

errors = []

def show_errors():
for i, e in enumerate(errors):
if i > 5:
break
print("Error: %s" % e)

atexit.register(show_errors)

class RcvtFormatter(Logs.formatter):
def __init__(self, colors):
Logs.formatter.__init__(self)
self.colors = colors
def format(self, rec):
frame = sys._getframe()
while frame:
func = frame.f_code.co_name
if func == 'exec_command':
cmd = frame.f_locals['cmd']
if isinstance(cmd, list) and ('armcc' in cmd[0] or 'armld' in cmd[0]):
lines = []
for line in rec.msg.splitlines():
if 'Warning: ' in line:
lines.append(self.colors.YELLOW + line)
elif 'Error: ' in line:
lines.append(self.colors.RED + line)
errors.append(line)
elif 'note: ' in line:
lines.append(self.colors.CYAN + line)
else:
lines.append(line)
rec.msg = "\n".join(lines)
frame = frame.f_back
return Logs.formatter.format(self, rec)

def options(opt):
Logs.log.handlers[0].setFormatter(RcvtFormatter(Logs.colors))


+ 0
- 370
waflib/extras/compat15.py View File

@@ -1,370 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2010 (ita)

"""
This file is provided to enable compatibility with waf 1.5
It was enabled by default in waf 1.6, but it is not used in waf 1.7
"""

import sys
from waflib import ConfigSet, Logs, Options, Scripting, Task, Build, Configure, Node, Runner, TaskGen, Utils, Errors, Context

# the following is to bring some compatibility with waf 1.5 "import waflib.Configure → import Configure"
sys.modules['Environment'] = ConfigSet
ConfigSet.Environment = ConfigSet.ConfigSet

sys.modules['Logs'] = Logs
sys.modules['Options'] = Options
sys.modules['Scripting'] = Scripting
sys.modules['Task'] = Task
sys.modules['Build'] = Build
sys.modules['Configure'] = Configure
sys.modules['Node'] = Node
sys.modules['Runner'] = Runner
sys.modules['TaskGen'] = TaskGen
sys.modules['Utils'] = Utils

from waflib.Tools import c_preproc
sys.modules['preproc'] = c_preproc

from waflib.Tools import c_config
sys.modules['config_c'] = c_config

ConfigSet.ConfigSet.copy = ConfigSet.ConfigSet.derive
ConfigSet.ConfigSet.set_variant = Utils.nada

Build.BuildContext.add_subdirs = Build.BuildContext.recurse
Build.BuildContext.new_task_gen = Build.BuildContext.__call__
Build.BuildContext.is_install = 0
Node.Node.relpath_gen = Node.Node.path_from

Utils.pproc = Utils.subprocess
Utils.get_term_cols = Logs.get_term_cols

def cmd_output(cmd, **kw):

silent = False
if 'silent' in kw:
silent = kw['silent']
del(kw['silent'])

if 'e' in kw:
tmp = kw['e']
del(kw['e'])
kw['env'] = tmp

kw['shell'] = isinstance(cmd, str)
kw['stdout'] = Utils.subprocess.PIPE
if silent:
kw['stderr'] = Utils.subprocess.PIPE

try:
p = Utils.subprocess.Popen(cmd, **kw)
output = p.communicate()[0]
except OSError as e:
raise ValueError(str(e))

if p.returncode:
if not silent:
msg = "command execution failed: %s -> %r" % (cmd, str(output))
raise ValueError(msg)
output = ''
return output
Utils.cmd_output = cmd_output

def name_to_obj(self, s, env=None):
if Logs.verbose:
Logs.warn('compat: change "name_to_obj(name, env)" by "get_tgen_by_name(name)"')
return self.get_tgen_by_name(s)
Build.BuildContext.name_to_obj = name_to_obj

def env_of_name(self, name):
try:
return self.all_envs[name]
except KeyError:
Logs.error('no such environment: '+name)
return None
Build.BuildContext.env_of_name = env_of_name


def set_env_name(self, name, env):
self.all_envs[name] = env
return env
Configure.ConfigurationContext.set_env_name = set_env_name

def retrieve(self, name, fromenv=None):
try:
env = self.all_envs[name]
except KeyError:
env = ConfigSet.ConfigSet()
self.prepare_env(env)
self.all_envs[name] = env
else:
if fromenv:
Logs.warn("The environment %s may have been configured already" % name)
return env
Configure.ConfigurationContext.retrieve = retrieve

Configure.ConfigurationContext.sub_config = Configure.ConfigurationContext.recurse
Configure.ConfigurationContext.check_tool = Configure.ConfigurationContext.load
Configure.conftest = Configure.conf
Configure.ConfigurationError = Errors.ConfigurationError
Utils.WafError = Errors.WafError

Options.OptionsContext.sub_options = Options.OptionsContext.recurse
Options.OptionsContext.tool_options = Context.Context.load
Options.Handler = Options.OptionsContext

Task.simple_task_type = Task.task_type_from_func = Task.task_factory
Task.TaskBase.classes = Task.classes

def setitem(self, key, value):
if key.startswith('CCFLAGS'):
key = key[1:]
self.table[key] = value
ConfigSet.ConfigSet.__setitem__ = setitem

@TaskGen.feature('d')
@TaskGen.before('apply_incpaths')
def old_importpaths(self):
if getattr(self, 'importpaths', []):
self.includes = self.importpaths

from waflib import Context
eld = Context.load_tool
def load_tool(*k, **kw):
ret = eld(*k, **kw)
if 'set_options' in ret.__dict__:
if Logs.verbose:
Logs.warn('compat: rename "set_options" to options')
ret.options = ret.set_options
if 'detect' in ret.__dict__:
if Logs.verbose:
Logs.warn('compat: rename "detect" to "configure"')
ret.configure = ret.detect
return ret
Context.load_tool = load_tool

def get_curdir(self):
return self.path.abspath()
Context.Context.curdir = property(get_curdir, Utils.nada)


rev = Context.load_module
def load_module(path, encoding=None):
ret = rev(path, encoding)
if 'set_options' in ret.__dict__:
if Logs.verbose:
Logs.warn('compat: rename "set_options" to "options" (%r)' % path)
ret.options = ret.set_options
if 'srcdir' in ret.__dict__:
if Logs.verbose:
Logs.warn('compat: rename "srcdir" to "top" (%r)' % path)
ret.top = ret.srcdir
if 'blddir' in ret.__dict__:
if Logs.verbose:
Logs.warn('compat: rename "blddir" to "out" (%r)' % path)
ret.out = ret.blddir
return ret
Context.load_module = load_module

old_post = TaskGen.task_gen.post
def post(self):
self.features = self.to_list(self.features)
if 'cc' in self.features:
if Logs.verbose:
Logs.warn('compat: the feature cc does not exist anymore (use "c")')
self.features.remove('cc')
self.features.append('c')
if 'cstaticlib' in self.features:
if Logs.verbose:
Logs.warn('compat: the feature cstaticlib does not exist anymore (use "cstlib" or "cxxstlib")')
self.features.remove('cstaticlib')
self.features.append(('cxx' in self.features) and 'cxxstlib' or 'cstlib')
if getattr(self, 'ccflags', None):
if Logs.verbose:
Logs.warn('compat: "ccflags" was renamed to "cflags"')
self.cflags = self.ccflags
return old_post(self)
TaskGen.task_gen.post = post

def waf_version(*k, **kw):
Logs.warn('wrong version (waf_version was removed in waf 1.6)')
Utils.waf_version = waf_version


import os
@TaskGen.feature('c', 'cxx', 'd')
@TaskGen.before('apply_incpaths', 'propagate_uselib_vars')
@TaskGen.after('apply_link', 'process_source')
def apply_uselib_local(self):
"""
process the uselib_local attribute
execute after apply_link because of the execution order set on 'link_task'
"""
env = self.env
from waflib.Tools.ccroot import stlink_task

# 1. the case of the libs defined in the project (visit ancestors first)
# the ancestors external libraries (uselib) will be prepended
self.uselib = self.to_list(getattr(self, 'uselib', []))
self.includes = self.to_list(getattr(self, 'includes', []))
names = self.to_list(getattr(self, 'uselib_local', []))
get = self.bld.get_tgen_by_name
seen = set([])
seen_uselib = set([])
tmp = Utils.deque(names) # consume a copy of the list of names
if tmp:
if Logs.verbose:
Logs.warn('compat: "uselib_local" is deprecated, replace by "use"')
while tmp:
lib_name = tmp.popleft()
# visit dependencies only once
if lib_name in seen:
continue

y = get(lib_name)
y.post()
seen.add(lib_name)

# object has ancestors to process (shared libraries): add them to the end of the list
if getattr(y, 'uselib_local', None):
for x in self.to_list(getattr(y, 'uselib_local', [])):
obj = get(x)
obj.post()
if getattr(obj, 'link_task', None):
if not isinstance(obj.link_task, stlink_task):
tmp.append(x)

# link task and flags
if getattr(y, 'link_task', None):

link_name = y.target[y.target.rfind(os.sep) + 1:]
if isinstance(y.link_task, stlink_task):
env.append_value('STLIB', [link_name])
else:
# some linkers can link against programs
env.append_value('LIB', [link_name])

# the order
self.link_task.set_run_after(y.link_task)

# for the recompilation
self.link_task.dep_nodes += y.link_task.outputs

# add the link path too
tmp_path = y.link_task.outputs[0].parent.bldpath()
if not tmp_path in env['LIBPATH']:
env.prepend_value('LIBPATH', [tmp_path])

# add ancestors uselib too - but only propagate those that have no staticlib defined
for v in self.to_list(getattr(y, 'uselib', [])):
if v not in seen_uselib:
seen_uselib.add(v)
if not env['STLIB_' + v]:
if not v in self.uselib:
self.uselib.insert(0, v)

# if the library task generator provides 'export_includes', add to the include path
# the export_includes must be a list of paths relative to the other library
if getattr(y, 'export_includes', None):
self.includes.extend(y.to_incnodes(y.export_includes))

@TaskGen.feature('cprogram', 'cxxprogram', 'cstlib', 'cxxstlib', 'cshlib', 'cxxshlib', 'dprogram', 'dstlib', 'dshlib')
@TaskGen.after('apply_link')
def apply_objdeps(self):
"add the .o files produced by some other object files in the same manner as uselib_local"
names = getattr(self, 'add_objects', [])
if not names:
return
names = self.to_list(names)

get = self.bld.get_tgen_by_name
seen = []
while names:
x = names[0]

# visit dependencies only once
if x in seen:
names = names[1:]
continue

# object does not exist ?
y = get(x)

# object has ancestors to process first ? update the list of names
if getattr(y, 'add_objects', None):
added = 0
lst = y.to_list(y.add_objects)
lst.reverse()
for u in lst:
if u in seen: continue
added = 1
names = [u]+names
if added: continue # list of names modified, loop

# safe to process the current object
y.post()
seen.append(x)

for t in getattr(y, 'compiled_tasks', []):
self.link_task.inputs.extend(t.outputs)

@TaskGen.after('apply_link')
def process_obj_files(self):
if not hasattr(self, 'obj_files'):
return
for x in self.obj_files:
node = self.path.find_resource(x)
self.link_task.inputs.append(node)

@TaskGen.taskgen_method
def add_obj_file(self, file):
"""Small example on how to link object files as if they were source
obj = bld.create_obj('cc')
obj.add_obj_file('foo.o')"""
if not hasattr(self, 'obj_files'): self.obj_files = []
if not 'process_obj_files' in self.meths: self.meths.append('process_obj_files')
self.obj_files.append(file)


old_define = Configure.ConfigurationContext.__dict__['define']

@Configure.conf
def define(self, key, val, quote=True):
old_define(self, key, val, quote)
if key.startswith('HAVE_'):
self.env[key] = 1

old_undefine = Configure.ConfigurationContext.__dict__['undefine']

@Configure.conf
def undefine(self, key):
old_undefine(self, key)
if key.startswith('HAVE_'):
self.env[key] = 0

# some people might want to use export_incdirs, but it was renamed
def set_incdirs(self, val):
Logs.warn('compat: change "export_incdirs" by "export_includes"')
self.export_includes = val
TaskGen.task_gen.export_incdirs = property(None, set_incdirs)

def install_dir(self, path):
if not path:
return []

destpath = Utils.subst_vars(path, self.env)

if self.is_install > 0:
Logs.info('* creating %s' % destpath)
Utils.check_dir(destpath)
elif self.is_install < 0:
Logs.info('* removing %s' % destpath)
try:
os.remove(destpath)
except OSError:
pass
Build.BuildContext.install_dir = install_dir


+ 0
- 547
waflib/extras/cppcheck.py View File

@@ -1,547 +0,0 @@
#! /usr/bin/env python
# -*- encoding: utf-8 -*-
# Michel Mooij, michel.mooij7@gmail.com

"""
Tool Description
================
This module provides a waf wrapper (i.e. waftool) around the C/C++ source code
checking tool 'cppcheck'.

See http://cppcheck.sourceforge.net/ for more information on the cppcheck tool
itself.
Note that many linux distributions already provide a ready to install version
of cppcheck. On fedora, for instance, it can be installed using yum:

'sudo yum install cppcheck'


Usage
=====
In order to use this waftool simply add it to the 'options' and 'configure'
functions of your main waf script as shown in the example below:

def options(opt):
opt.load('cppcheck', tooldir='./waftools')

def configure(conf):
conf.load('cppcheck')
Note that example shown above assumes that the cppcheck waftool is located in
the sub directory named 'waftools'.

When configured as shown in the example above, cppcheck will automatically
perform a source code analysis on all C/C++ build tasks that have been
defined in your waf build system.

The example shown below for a C program will be used as input for cppcheck when
building the task.

def build(bld):
bld.program(name='foo', src='foobar.c')

The result of the source code analysis will be stored both as xml and html
files in the build location for the task. Should any error be detected by
cppcheck the build will be aborted and a link to the html report will be shown.

When needed source code checking by cppcheck can be disabled per task, per
detected error or warning for a particular task. It can be also be disabled for
all tasks.

In order to exclude a task from source code checking add the skip option to the
task as shown below:

def build(bld):
bld.program(
name='foo',
src='foobar.c'
cppcheck_skip=True
)

When needed problems detected by cppcheck may be suppressed using a file
containing a list of suppression rules. The relative or absolute path to this
file can be added to the build task as shown in the example below:

bld.program(
name='bar',
src='foobar.c',
cppcheck_suppress='bar.suppress'
)

A cppcheck suppress file should contain one suppress rule per line. Each of
these rules will be passed as an '--suppress=<rule>' argument to cppcheck.

Dependencies
================
This waftool depends on the python pygments module, it is used for source code
syntax highlighting when creating the html reports. see http://pygments.org/ for
more information on this package.

Remarks
================
The generation of the html report is originally based on the cppcheck-htmlreport.py
script that comes shipped with the cppcheck tool.
"""

import os, sys
import xml.etree.ElementTree as ElementTree
from waflib import Task, TaskGen, Logs, Context

PYGMENTS_EXC_MSG= '''
The required module 'pygments' could not be found. Please install it using your
platform package manager (e.g. apt-get or yum), using 'pip' or 'easy_install',
see 'http://pygments.org/download/' for installation instructions.
'''

try:
import pygments
from pygments import formatters, lexers
except ImportError as e:
Logs.warn(PYGMENTS_EXC_MSG)
raise e


def options(opt):
opt.add_option('--cppcheck-skip', dest='cppcheck_skip',
default=False, action='store_true',
help='do not check C/C++ sources (default=False)')

opt.add_option('--cppcheck-err-resume', dest='cppcheck_err_resume',
default=False, action='store_true',
help='continue in case of errors (default=False)')

opt.add_option('--cppcheck-bin-enable', dest='cppcheck_bin_enable',
default='warning,performance,portability,style,unusedFunction', action='store',
help="cppcheck option '--enable=' for binaries (default=warning,performance,portability,style,unusedFunction)")

opt.add_option('--cppcheck-lib-enable', dest='cppcheck_lib_enable',
default='warning,performance,portability,style', action='store',
help="cppcheck option '--enable=' for libraries (default=warning,performance,portability,style)")

opt.add_option('--cppcheck-std-c', dest='cppcheck_std_c',
default='c99', action='store',
help='cppcheck standard to use when checking C (default=c99)')

opt.add_option('--cppcheck-std-cxx', dest='cppcheck_std_cxx',
default='c++03', action='store',
help='cppcheck standard to use when checking C++ (default=c++03)')

opt.add_option('--cppcheck-check-config', dest='cppcheck_check_config',
default=False, action='store_true',
help='forced check for missing buildin include files, e.g. stdio.h (default=False)')

opt.add_option('--cppcheck-max-configs', dest='cppcheck_max_configs',
default='20', action='store',
help='maximum preprocessor (--max-configs) define iterations (default=20)')


def configure(conf):
if conf.options.cppcheck_skip:
conf.env.CPPCHECK_SKIP = [True]
conf.env.CPPCHECK_STD_C = conf.options.cppcheck_std_c
conf.env.CPPCHECK_STD_CXX = conf.options.cppcheck_std_cxx
conf.env.CPPCHECK_MAX_CONFIGS = conf.options.cppcheck_max_configs
conf.env.CPPCHECK_BIN_ENABLE = conf.options.cppcheck_bin_enable
conf.env.CPPCHECK_LIB_ENABLE = conf.options.cppcheck_lib_enable
conf.find_program('cppcheck', var='CPPCHECK')


@TaskGen.feature('c')
@TaskGen.feature('cxx')
def cppcheck_execute(self):
if len(self.env.CPPCHECK_SKIP) or self.bld.options.cppcheck_skip:
return
if getattr(self, 'cppcheck_skip', False):
return
task = self.create_task('cppcheck')
task.cmd = _tgen_create_cmd(self)
task.fatal = []
if not self.bld.options.cppcheck_err_resume:
task.fatal.append('error')


def _tgen_create_cmd(self):
features = getattr(self, 'features', [])
std_c = self.env.CPPCHECK_STD_C
std_cxx = self.env.CPPCHECK_STD_CXX
max_configs = self.env.CPPCHECK_MAX_CONFIGS
bin_enable = self.env.CPPCHECK_BIN_ENABLE
lib_enable = self.env.CPPCHECK_LIB_ENABLE

cmd = '%s' % self.env.CPPCHECK
args = ['--inconclusive','--report-progress','--verbose','--xml','--xml-version=2']
args.append('--max-configs=%s' % max_configs)

if 'cxx' in features:
args.append('--language=c++')
args.append('--std=%s' % std_cxx)
else:
args.append('--language=c')
args.append('--std=%s' % std_c)

if self.bld.options.cppcheck_check_config:
args.append('--check-config')

if set(['cprogram','cxxprogram']) & set(features):
args.append('--enable=%s' % bin_enable)
else:
args.append('--enable=%s' % lib_enable)

for src in self.to_list(getattr(self, 'source', [])):
args.append('%r' % src)
for inc in self.to_incnodes(self.to_list(getattr(self, 'includes', []))):
args.append('-I%r' % inc)
for inc in self.to_incnodes(self.to_list(self.env.INCLUDES)):
args.append('-I%r' % inc)
return '%s %s' % (cmd, ' '.join(args))


class cppcheck(Task.Task):
quiet = True

def run(self):
stderr = self.generator.bld.cmd_and_log(self.cmd, quiet=Context.STDERR, output=Context.STDERR)
self._save_xml_report(stderr)
defects = self._get_defects(stderr)
index = self._create_html_report(defects)
self._errors_evaluate(defects, index)
return 0

def _save_xml_report(self, s):
'''use cppcheck xml result string, add the command string used to invoke cppcheck
and save as xml file.
'''
header = '%s\n' % s.splitlines()[0]
root = ElementTree.fromstring(s)
cmd = ElementTree.SubElement(root.find('cppcheck'), 'cmd')
cmd.text = str(self.cmd)
body = ElementTree.tostring(root)
node = self.generator.path.get_bld().find_or_declare('cppcheck.xml')
node.write(header + body)

def _get_defects(self, xml_string):
'''evaluate the xml string returned by cppcheck (on sdterr) and use it to create
a list of defects.
'''
defects = []
for error in ElementTree.fromstring(xml_string).iter('error'):
defect = {}
defect['id'] = error.get('id')
defect['severity'] = error.get('severity')
defect['msg'] = str(error.get('msg')).replace('<','&lt;')
defect['verbose'] = error.get('verbose')
for location in error.findall('location'):
defect['file'] = location.get('file')
defect['line'] = str(int(location.get('line')) - 1)
defects.append(defect)
return defects

def _create_html_report(self, defects):
files, css_style_defs = self._create_html_files(defects)
index = self._create_html_index(files)
self._create_css_file(css_style_defs)
return index

def _create_html_files(self, defects):
sources = {}
defects = [defect for defect in defects if defect.has_key('file')]
for defect in defects:
name = defect['file']
if not sources.has_key(name):
sources[name] = [defect]
else:
sources[name].append(defect)
files = {}
css_style_defs = None
bpath = self.generator.path.get_bld().abspath()
names = sources.keys()
for i in range(0,len(names)):
name = names[i]
htmlfile = 'cppcheck/%i.html' % (i)
errors = sources[name]
files[name] = { 'htmlfile': '%s/%s' % (bpath, htmlfile), 'errors': errors }
css_style_defs = self._create_html_file(name, htmlfile, errors)
return files, css_style_defs

def _create_html_file(self, sourcefile, htmlfile, errors):
name = self.generator.get_name()
root = ElementTree.fromstring(CPPCHECK_HTML_FILE)
title = root.find('head/title')
title.text = 'cppcheck - report - %s' % name

body = root.find('body')
for div in body.findall('div'):
if div.get('id') == 'page':
page = div
break
for div in page.findall('div'):
if div.get('id') == 'header':
h1 = div.find('h1')
h1.text = 'cppcheck report - %s' % name
if div.get('id') == 'content':
content = div
srcnode = self.generator.bld.root.find_node(sourcefile)
hl_lines = [e['line'] for e in errors if e.has_key('line')]
formatter = CppcheckHtmlFormatter(linenos=True, style='colorful', hl_lines=hl_lines, lineanchors='line')
formatter.errors = [e for e in errors if e.has_key('line')]
css_style_defs = formatter.get_style_defs('.highlight')
lexer = pygments.lexers.guess_lexer_for_filename(sourcefile, "")
s = pygments.highlight(srcnode.read(), lexer, formatter)
table = ElementTree.fromstring(s)
content.append(table)

s = ElementTree.tostring(root, method='html')
s = CCPCHECK_HTML_TYPE + s
node = self.generator.path.get_bld().find_or_declare(htmlfile)
node.write(s)
return css_style_defs

def _create_html_index(self, files):
name = self.generator.get_name()
root = ElementTree.fromstring(CPPCHECK_HTML_FILE)
title = root.find('head/title')
title.text = 'cppcheck - report - %s' % name

body = root.find('body')
for div in body.findall('div'):
if div.get('id') == 'page':
page = div
break
for div in page.findall('div'):
if div.get('id') == 'header':
h1 = div.find('h1')
h1.text = 'cppcheck report - %s' % name
if div.get('id') == 'content':
content = div
self._create_html_table(content, files)

s = ElementTree.tostring(root, method='html')
s = CCPCHECK_HTML_TYPE + s
node = self.generator.path.get_bld().find_or_declare('cppcheck/index.html')
node.write(s)
return node

def _create_html_table(self, content, files):
table = ElementTree.fromstring(CPPCHECK_HTML_TABLE)
for name, val in files.items():
f = val['htmlfile']
s = '<tr><td colspan="4"><a href="%s">%s</a></td></tr>\n' % (f,name)
row = ElementTree.fromstring(s)
table.append(row)

errors = sorted(val['errors'], key=lambda e: int(e['line']) if e.has_key('line') else sys.maxint)
for e in errors:
if not e.has_key('line'):
s = '<tr><td></td><td>%s</td><td>%s</td><td>%s</td></tr>\n' % (e['id'], e['severity'], e['msg'])
else:
attr = ''
if e['severity'] == 'error':
attr = 'class="error"'
s = '<tr><td><a href="%s#line-%s">%s</a></td>' % (f, e['line'], e['line'])
s+= '<td>%s</td><td>%s</td><td %s>%s</td></tr>\n' % (e['id'], e['severity'], attr, e['msg'])
row = ElementTree.fromstring(s)
table.append(row)
content.append(table)

def _create_css_file(self, css_style_defs):
css = str(CPPCHECK_CSS_FILE)
if css_style_defs:
css = "%s\n%s\n" % (css, css_style_defs)
node = self.generator.path.get_bld().find_or_declare('cppcheck/style.css')
node.write(css)

def _errors_evaluate(self, errors, http_index):
name = self.generator.get_name()
fatal = self.fatal
severity = [err['severity'] for err in errors]
problems = [err for err in errors if err['severity'] != 'information']

if set(fatal) & set(severity):
exc = "\n"
exc += "\nccpcheck detected fatal error(s) in task '%s', see report for details:" % name
exc += "\n file://%r" % (http_index)
exc += "\n"
self.generator.bld.fatal(exc)

elif len(problems):
msg = "\nccpcheck detected (possible) problem(s) in task '%s', see report for details:" % name
msg += "\n file://%r" % http_index
msg += "\n"
Logs.error(msg)


class CppcheckHtmlFormatter(pygments.formatters.HtmlFormatter):
errors = []

def wrap(self, source, outfile):
line_no = 1
for i, t in super(CppcheckHtmlFormatter, self).wrap(source, outfile):
# If this is a source code line we want to add a span tag at the end.
if i == 1:
for error in self.errors:
if int(error['line']) == line_no:
t = t.replace('\n', CPPCHECK_HTML_ERROR % error['msg'])
line_no = line_no + 1
yield i, t


CCPCHECK_HTML_TYPE = \
'<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">\n'

CPPCHECK_HTML_FILE = """
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd" [<!ENTITY nbsp "&#160;">]>
<html>
<head>
<title>cppcheck - report - XXX</title>
<link href="style.css" rel="stylesheet" type="text/css" />
<style type="text/css">
</style>
</head>
<body class="body">
<div id="page-header">&nbsp;</div>
<div id="page">
<div id="header">
<h1>cppcheck report - XXX</h1>
</div>
<div id="menu">
<a href="index.html">Defect list</a>
</div>
<div id="content">
</div>
<div id="footer">
<div>cppcheck - a tool for static C/C++ code analysis</div>
<div>
Internet: <a href="http://cppcheck.sourceforge.net">http://cppcheck.sourceforge.net</a><br/>
Forum: <a href="http://apps.sourceforge.net/phpbb/cppcheck/">http://apps.sourceforge.net/phpbb/cppcheck/</a><br/>
IRC: #cppcheck at irc.freenode.net
</div>
&nbsp;
</div>
&nbsp;
</div>
<div id="page-footer">&nbsp;</div>
</body>
</html>
"""

CPPCHECK_HTML_TABLE = """
<table>
<tr>
<th>Line</th>
<th>Id</th>
<th>Severity</th>
<th>Message</th>
</tr>
</table>
"""

CPPCHECK_HTML_ERROR = \
'<span style="background: #ffaaaa;padding: 3px;">&lt;--- %s</span>\n'

CPPCHECK_CSS_FILE = """
body.body {
font-family: Arial;
font-size: 13px;
background-color: black;
padding: 0px;
margin: 0px;
}

.error {
font-family: Arial;
font-size: 13px;
background-color: #ffb7b7;
padding: 0px;
margin: 0px;
}

th, td {
min-width: 100px;
text-align: left;
}

#page-header {
clear: both;
width: 1200px;
margin: 20px auto 0px auto;
height: 10px;
border-bottom-width: 2px;
border-bottom-style: solid;
border-bottom-color: #aaaaaa;
}

#page {
width: 1160px;
margin: auto;
border-left-width: 2px;
border-left-style: solid;
border-left-color: #aaaaaa;
border-right-width: 2px;
border-right-style: solid;
border-right-color: #aaaaaa;
background-color: White;
padding: 20px;
}

#page-footer {
clear: both;
width: 1200px;
margin: auto;
height: 10px;
border-top-width: 2px;
border-top-style: solid;
border-top-color: #aaaaaa;
}

#header {
width: 100%;
height: 70px;
background-image: url(logo.png);
background-repeat: no-repeat;
background-position: left top;
border-bottom-style: solid;
border-bottom-width: thin;
border-bottom-color: #aaaaaa;
}

#menu {
margin-top: 5px;
text-align: left;
float: left;
width: 100px;
height: 300px;
}

#menu > a {
margin-left: 10px;
display: block;
}

#content {
float: left;
width: 1020px;
margin: 5px;
padding: 0px 10px 10px 10px;
border-left-style: solid;
border-left-width: thin;
border-left-color: #aaaaaa;
}

#footer {
padding-bottom: 5px;
padding-top: 5px;
border-top-style: solid;
border-top-width: thin;
border-top-color: #aaaaaa;
clear: both;
font-size: 10px;
}

#footer > div {
float: left;
width: 33%;
}

"""


+ 0
- 224
waflib/extras/cpplint.py View File

@@ -1,224 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
#
# written by Sylvain Rouquette, 2014

'''

This is an extra tool, not bundled with the default waf binary.
To add the cpplint tool to the waf file:
$ ./waf-light --tools=compat15,cpplint
or, if you have waf >= 1.6.2
$ ./waf update --files=cpplint

this tool also requires cpplint for python.
If you have PIP, you can install it like this: pip install cpplint

But I'd recommend getting the latest version from the SVN,
the PIP version is outdated.
https://code.google.com/p/google-styleguide/source/browse/trunk/cpplint/cpplint.py
Apply this patch if you want to run it with Python 3:
https://code.google.com/p/google-styleguide/issues/detail?id=19


When using this tool, the wscript will look like:

def options(opt):
opt.load('compiler_cxx cpplint')

def configure(conf):
conf.load('compiler_cxx cpplint')
# optional, you can also specify them on the command line
conf.env.CPPLINT_FILTERS = ','.join((
'-whitespace/newline', # c++11 lambda
'-readability/braces', # c++11 constructor
'-whitespace/braces', # c++11 constructor
'-build/storage_class', # c++11 for-range
'-whitespace/blank_line', # user pref
'-whitespace/labels' # user pref
))

def build(bld):
bld(features='cpplint', source='main.cpp', target='app')
# add include files, because they aren't usually built
bld(features='cpplint', source=bld.path.ant_glob('**/*.hpp'))
'''

import sys, re
import logging
import threading
from waflib import Task, Build, TaskGen, Logs, Utils
try:
from cpplint.cpplint import ProcessFile, _cpplint_state
except ImportError:
pass


critical_errors = 0
CPPLINT_FORMAT = '[CPPLINT] %(filename)s:\nline %(linenum)s, severity %(confidence)s, category: %(category)s\n%(message)s\n'
RE_EMACS = re.compile('(?P<filename>.*):(?P<linenum>\d+): (?P<message>.*) \[(?P<category>.*)\] \[(?P<confidence>\d+)\]');
CPPLINT_RE = {
'waf': RE_EMACS,
'emacs': RE_EMACS,
'vs7': re.compile('(?P<filename>.*)\((?P<linenum>\d+)\): (?P<message>.*) \[(?P<category>.*)\] \[(?P<confidence>\d+)\]'),
'eclipse': re.compile('(?P<filename>.*):(?P<linenum>\d+): warning: (?P<message>.*) \[(?P<category>.*)\] \[(?P<confidence>\d+)\]'),
}



def init_env_from_options(env):
from waflib.Options import options
for key, value in options.__dict__.items():
if not key.startswith('CPPLINT_') or env[key]:
continue
env[key] = value
if env.CPPLINT_OUTPUT != 'waf':
_cpplint_state.output_format = env.CPPLINT_OUTPUT


def options(opt):
opt.add_option('--cpplint-filters', type='string',
default='', dest='CPPLINT_FILTERS',
help='add filters to cpplint')
opt.add_option('--cpplint-level', default=1, type='int', dest='CPPLINT_LEVEL',
help='specify the log level (default: 1)')
opt.add_option('--cpplint-break', default=5, type='int', dest='CPPLINT_BREAK',
help='break the build if error >= level (default: 5)')
opt.add_option('--cpplint-skip', action='store_true',
default=False, dest='CPPLINT_SKIP',
help='skip cpplint during build')
opt.add_option('--cpplint-output', type='string',
default='waf', dest='CPPLINT_OUTPUT',
help='select output format (waf, emacs, vs7)')


def configure(conf):
conf.start_msg('Checking cpplint')
try:
import cpplint
conf.end_msg('ok')
except ImportError:
conf.env.CPPLINT_SKIP = True
conf.end_msg('not found, skipping it.')


class cpplint_formatter(Logs.formatter):
def __init__(self, fmt):
logging.Formatter.__init__(self, CPPLINT_FORMAT)
self.fmt = fmt

def format(self, rec):
if self.fmt == 'waf':
result = CPPLINT_RE[self.fmt].match(rec.msg).groupdict()
rec.msg = CPPLINT_FORMAT % result
if rec.levelno <= logging.INFO:
rec.c1 = Logs.colors.CYAN
return super(cpplint_formatter, self).format(rec)


class cpplint_handler(Logs.log_handler):
def __init__(self, stream=sys.stderr, **kw):
super(cpplint_handler, self).__init__(stream, **kw)
self.stream = stream

def emit(self, rec):
rec.stream = self.stream
self.emit_override(rec)
self.flush()


class cpplint_wrapper(object):
stream = None
tasks_count = 0
lock = threading.RLock()

def __init__(self, logger, threshold, fmt):
self.logger = logger
self.threshold = threshold
self.error_count = 0
self.fmt = fmt

def __enter__(self):
with cpplint_wrapper.lock:
cpplint_wrapper.tasks_count += 1
if cpplint_wrapper.tasks_count == 1:
sys.stderr.flush()
cpplint_wrapper.stream = sys.stderr
sys.stderr = self
return self

def __exit__(self, exc_type, exc_value, traceback):
with cpplint_wrapper.lock:
cpplint_wrapper.tasks_count -= 1
if cpplint_wrapper.tasks_count == 0:
sys.stderr = cpplint_wrapper.stream
sys.stderr.flush()

def isatty(self):
return True

def write(self, message):
global critical_errors
result = CPPLINT_RE[self.fmt].match(message)
if not result:
return
level = int(result.groupdict()['confidence'])
if level >= self.threshold:
critical_errors += 1
if level <= 2:
self.logger.info(message)
elif level <= 4:
self.logger.warning(message)
else:
self.logger.error(message)


cpplint_logger = None
def get_cpplint_logger(fmt):
global cpplint_logger
if cpplint_logger:
return cpplint_logger
cpplint_logger = logging.getLogger('cpplint')
hdlr = cpplint_handler()
hdlr.setFormatter(cpplint_formatter(fmt))
cpplint_logger.addHandler(hdlr)
cpplint_logger.setLevel(logging.DEBUG)
return cpplint_logger


class cpplint(Task.Task):
color = 'PINK'

def __init__(self, *k, **kw):
super(cpplint, self).__init__(*k, **kw)

def run(self):
global critical_errors
_cpplint_state.SetFilters(self.env.CPPLINT_FILTERS)
break_level = self.env.CPPLINT_BREAK
verbosity = self.env.CPPLINT_LEVEL
with cpplint_wrapper(get_cpplint_logger(self.env.CPPLINT_OUTPUT),
break_level, self.env.CPPLINT_OUTPUT):
ProcessFile(self.inputs[0].abspath(), verbosity)
return critical_errors


@TaskGen.extension('.h', '.hh', '.hpp', '.hxx')
def cpplint_includes(self, node):
pass

@TaskGen.feature('cpplint')
@TaskGen.before_method('process_source')
def run_cpplint(self):
if not self.env.CPPLINT_INITIALIZED:
self.env.CPPLINT_INITIALIZED = True
init_env_from_options(self.env)
if self.env.CPPLINT_SKIP:
return
if not self.env.CPPLINT_OUTPUT in CPPLINT_RE:
return
for src in self.to_list(getattr(self, 'source', [])):
if isinstance(src, str):
self.create_task('cpplint', self.path.find_or_declare(src))
else:
self.create_task('cpplint', src)

+ 0
- 149
waflib/extras/cython.py View File

@@ -1,149 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2010-2015

import os,re

import waflib
import waflib.Logs as _msg
from waflib import Task, Logs
from waflib.TaskGen import extension, feature, before_method, after_method

cy_api_pat = re.compile(r'\s*?cdef\s*?(public|api)\w*')
re_cyt = re.compile(r"""
(?:from\s+(\w+)\s+)? # optionally match "from foo" and capture foo
c?import\s(\w+|[*]) # require "import bar" and capture bar
""", re.M | re.VERBOSE)

@extension('.pyx')
def add_cython_file(self, node):
"""
Process a *.pyx* file given in the list of source files. No additional
feature is required::

def build(bld):
bld(features='c cshlib pyext', source='main.c foo.pyx', target='app')
"""
ext = '.c'
if 'cxx' in self.features:
self.env.append_unique('CYTHONFLAGS', '--cplus')
ext = '.cc'

for x in getattr(self, 'cython_includes', []):
# TODO re-use these nodes in "scan" below
d = self.path.find_dir(x)
if d:
self.env.append_unique('CYTHONFLAGS', '-I%s' % d.abspath())

tsk = self.create_task('cython', node, node.change_ext(ext))
self.source += tsk.outputs

class cython(Task.Task):
run_str = '${CYTHON} ${CYTHONFLAGS} -o ${TGT[0].abspath()} ${SRC}'
color = 'GREEN'

vars = ['INCLUDES']
"""
Rebuild whenever the INCLUDES change. The variables such as CYTHONFLAGS will be appended
by the metaclass.
"""

ext_out = ['.h']
"""
The creation of a .h file is known only after the build has begun, so it is not
possible to compute a build order just by looking at the task inputs/outputs.
"""

def runnable_status(self):
"""
Perform a double-check to add the headers created by cython
to the output nodes. The scanner is executed only when the cython task
must be executed (optimization).
"""
ret = super(cython, self).runnable_status()
if ret == Task.ASK_LATER:
return ret
for x in self.generator.bld.raw_deps[self.uid()]:
if x.startswith('header:'):
self.outputs.append(self.inputs[0].parent.find_or_declare(x.replace('header:', '')))
return super(cython, self).runnable_status()

def post_run(self):
for x in self.outputs:
if x.name.endswith('.h'):
if not os.path.exists(x.abspath()):
if Logs.verbose:
Logs.warn('Expected %r' % x.abspath())
x.write('')
return Task.Task.post_run(self)

def scan(self):
"""
Return the dependent files (.pxd) by looking in the include folders.
Put the headers to generate in the custom list "bld.raw_deps".
To inspect the scanne results use::

$ waf clean build --zones=deps
"""
node = self.inputs[0]
txt = node.read()

mods = []
for m in re_cyt.finditer(txt):
if m.group(1): # matches "from foo import bar"
mods.append(m.group(1))
else:
mods.append(m.group(2))

_msg.debug("cython: mods %r" % mods)
incs = getattr(self.generator, 'cython_includes', [])
incs = [self.generator.path.find_dir(x) for x in incs]
incs.append(node.parent)

found = []
missing = []
for x in mods:
for y in incs:
k = y.find_resource(x + '.pxd')
if k:
found.append(k)
break
else:
missing.append(x)

# the cython file implicitly depends on a pxd file that might be present
implicit = node.parent.find_resource(node.name[:-3] + 'pxd')
if implicit:
found.append(implicit)

_msg.debug("cython: found %r" % found)

# Now the .h created - store them in bld.raw_deps for later use
has_api = False
has_public = False
for l in txt.splitlines():
if cy_api_pat.match(l):
if ' api ' in l:
has_api = True
if ' public ' in l:
has_public = True
name = node.name.replace('.pyx', '')
if has_api:
missing.append('header:%s_api.h' % name)
if has_public:
missing.append('header:%s.h' % name)

return (found, missing)

def options(ctx):
ctx.add_option('--cython-flags', action='store', default='', help='space separated list of flags to pass to cython')

def configure(ctx):
if not ctx.env.CC and not ctx.env.CXX:
ctx.fatal('Load a C/C++ compiler first')
if not ctx.env.PYTHON:
ctx.fatal('Load the python tool first!')
ctx.find_program('cython', var='CYTHON')
if ctx.options.cython_flags:
ctx.env.CYTHONFLAGS = ctx.options.cython_flags


+ 0
- 72
waflib/extras/dcc.py View File

@@ -1,72 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Jérôme Carretero, 2011 (zougloub)

from waflib import Configure, Options, Utils
from waflib.Tools import ccroot
from waflib.Configure import conf

@conf
def find_dcc(conf):
cc = conf.find_program(['dcc'], var='CC', path_list=getattr(Options.options, 'diabbindir', ""))
conf.env.CC_NAME = 'dcc'

@conf
def find_dld(conf):
ld = conf.find_program(['dld'], var='LINK_CC', path_list=getattr(Options.options, 'diabbindir', ""))
conf.env.LINK_CC_NAME = 'dld'

@conf
def find_dar(conf):
ar = conf.find_program(['dar'], var='DAR', path_list=getattr(Options.options, 'diabbindir', ""))
conf.env.AR = ar
conf.env.AR_NAME = 'dar'
conf.env.ARFLAGS = 'rcs'

@conf
def find_ddump(conf):
prg = conf.find_program(['ddump'], var='DDUMP', path_list=getattr(Options.options, 'diabbindir', ""))

@conf
def dcc_common_flags(conf):
v = conf.env
v['CC_SRC_F'] = []
v['CC_TGT_F'] = ['-c', '-o']

# linker
if not v['LINK_CC']: v['LINK_CC'] = v['CC']
v['CCLNK_SRC_F'] = []
v['CCLNK_TGT_F'] = ['-o']
v['CPPPATH_ST'] = '-I%s'
v['DEFINES_ST'] = '-D%s'

v['LIB_ST'] = '-l:%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STLIB_ST'] = '-l:%s'
v['STLIBPATH_ST'] = '-L%s'
v['RPATH_ST'] = '-Wl,-rpath,%s'
#v['STLIB_MARKER'] = '-Wl,-Bstatic'

# program
v['cprogram_PATTERN'] = '%s.elf'

# static lib
v['LINKFLAGS_cstlib'] = ['-Wl,-Bstatic']
v['cstlib_PATTERN'] = 'lib%s.a'

def configure(conf):
conf.find_dcc()
conf.find_dar()
conf.find_dld()
conf.find_ddump()
conf.dcc_common_flags()
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()

def options(opt):
"""
Add the ``--with-diab-bindir`` command-line options.
"""
opt.add_option('--with-diab-bindir', type='string', dest='diabbindir', help = 'Specify alternate diab bin folder', default="")


+ 0
- 432
waflib/extras/distnet.py View File

@@ -1,432 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8

"""
waf-powered distributed network builds, with a network cache.

Caching files from a server has advantages over a NFS/Samba shared folder:

- builds are much faster because they use local files
- builds just continue to work in case of a network glitch
- permissions are much simpler to manage
"""

import os, urllib, tarfile, re, shutil, tempfile, sys
from collections import OrderedDict
from waflib import Context, Utils, Logs

try:
from urllib.parse import urlencode
except ImportError:
urlencode = urllib.urlencode

def safe_urlencode(data):
x = urlencode(data)
try:
x = x.encode('utf-8')
except Exception:
pass
return x

try:
from urllib.error import URLError
except ImportError:
from urllib2 import URLError

try:
from urllib.request import Request, urlopen
except ImportError:
from urllib2 import Request, urlopen

DISTNETCACHE = os.environ.get('DISTNETCACHE', '/tmp/distnetcache')
DISTNETSERVER = os.environ.get('DISTNETSERVER', 'http://localhost:8000/cgi-bin/')
TARFORMAT = 'w:bz2'
TIMEOUT = 60
REQUIRES = 'requires.txt'

re_com = re.compile('\s*#.*', re.M)

def total_version_order(num):
lst = num.split('.')
template = '%10s' * len(lst)
ret = template % tuple(lst)
return ret

def get_distnet_cache():
return getattr(Context.g_module, 'DISTNETCACHE', DISTNETCACHE)

def get_server_url():
return getattr(Context.g_module, 'DISTNETSERVER', DISTNETSERVER)

def get_download_url():
return '%s/download.py' % get_server_url()

def get_upload_url():
return '%s/upload.py' % get_server_url()

def get_resolve_url():
return '%s/resolve.py' % get_server_url()

def send_package_name():
out = getattr(Context.g_module, 'out', 'build')
pkgfile = '%s/package_to_upload.tarfile' % out
return pkgfile

class package(Context.Context):
fun = 'package'
cmd = 'package'

def execute(self):
try:
files = self.files
except AttributeError:
files = self.files = []

Context.Context.execute(self)
pkgfile = send_package_name()
if not pkgfile in files:
if not REQUIRES in files:
files.append(REQUIRES)
self.make_tarfile(pkgfile, files, add_to_package=False)

def make_tarfile(self, filename, files, **kw):
if kw.get('add_to_package', True):
self.files.append(filename)

with tarfile.open(filename, TARFORMAT) as tar:
endname = os.path.split(filename)[-1]
endname = endname.split('.')[0] + '/'
for x in files:
tarinfo = tar.gettarinfo(x, x)
tarinfo.uid = tarinfo.gid = 0
tarinfo.uname = tarinfo.gname = 'root'
tarinfo.size = os.stat(x).st_size

# TODO - more archive creation options?
if kw.get('bare', True):
tarinfo.name = os.path.split(x)[1]
else:
tarinfo.name = endname + x # todo, if tuple, then..
Logs.debug("adding %r to %s" % (tarinfo.name, filename))
with open(x, 'rb') as f:
tar.addfile(tarinfo, f)
Logs.info('Created %s' % filename)

class publish(Context.Context):
fun = 'publish'
cmd = 'publish'
def execute(self):
if hasattr(Context.g_module, 'publish'):
Context.Context.execute(self)
mod = Context.g_module

rfile = getattr(self, 'rfile', send_package_name())
if not os.path.isfile(rfile):
self.fatal('Create the release file with "waf release" first! %r' % rfile)

fdata = Utils.readf(rfile, m='rb')
data = safe_urlencode([('pkgdata', fdata), ('pkgname', mod.APPNAME), ('pkgver', mod.VERSION)])

req = Request(get_upload_url(), data)
response = urlopen(req, timeout=TIMEOUT)
data = response.read().strip()

if sys.hexversion>0x300000f:
data = data.decode('utf-8')

if data != 'ok':
self.fatal('Could not publish the package %r' % data)

class constraint(object):
def __init__(self, line=''):
self.required_line = line
self.info = []

line = line.strip()
if not line:
return

lst = line.split(',')
if lst:
self.pkgname = lst[0]
self.required_version = lst[1]
for k in lst:
a, b, c = k.partition('=')
if a and c:
self.info.append((a, c))
def __str__(self):
buf = []
buf.append(self.pkgname)
buf.append(self.required_version)
for k in self.info:
buf.append('%s=%s' % k)
return ','.join(buf)

def __repr__(self):
return "requires %s-%s" % (self.pkgname, self.required_version)

def human_display(self, pkgname, pkgver):
return '%s-%s requires %s-%s' % (pkgname, pkgver, self.pkgname, self.required_version)

def why(self):
ret = []
for x in self.info:
if x[0] == 'reason':
ret.append(x[1])
return ret

def add_reason(self, reason):
self.info.append(('reason', reason))

def parse_constraints(text):
assert(text is not None)
constraints = []
text = re.sub(re_com, '', text)
lines = text.splitlines()
for line in lines:
line = line.strip()
if not line:
continue
constraints.append(constraint(line))
return constraints

def list_package_versions(cachedir, pkgname):
pkgdir = os.path.join(cachedir, pkgname)
try:
versions = os.listdir(pkgdir)
except OSError:
return []
versions.sort(key=total_version_order)
versions.reverse()
return versions

class package_reader(Context.Context):
cmd = 'solver'
fun = 'solver'

def __init__(self, **kw):
Context.Context.__init__(self, **kw)

self.myproject = getattr(Context.g_module, 'APPNAME', 'project')
self.myversion = getattr(Context.g_module, 'VERSION', '1.0')
self.cache_constraints = {}
self.constraints = []

def compute_dependencies(self, filename=REQUIRES):
text = Utils.readf(filename)
data = safe_urlencode([('text', text)])

if '--offline' in sys.argv:
self.constraints = self.local_resolve(text)
else:
req = Request(get_resolve_url(), data)
try:
response = urlopen(req, timeout=TIMEOUT)
except URLError as e:
Logs.warn('The package server is down! %r' % e)
self.constraints = self.local_resolve(text)
else:
ret = response.read()
try:
ret = ret.decode('utf-8')
except Exception:
pass
self.trace(ret)
self.constraints = parse_constraints(ret)
self.check_errors()

def check_errors(self):
errors = False
for c in self.constraints:
if not c.required_version:
errors = True

reasons = c.why()
if len(reasons) == 1:
Logs.error('%s but no matching package could be found in this repository' % reasons[0])
else:
Logs.error('Conflicts on package %r:' % c.pkgname)
for r in reasons:
Logs.error(' %s' % r)
if errors:
self.fatal('The package requirements cannot be satisfied!')

def load_constraints(self, pkgname, pkgver, requires=REQUIRES):
try:
return self.cache_constraints[(pkgname, pkgver)]
except KeyError:
#Logs.error("no key %r" % (pkgname, pkgver))
text = Utils.readf(os.path.join(get_distnet_cache(), pkgname, pkgver, requires))
ret = parse_constraints(text)
self.cache_constraints[(pkgname, pkgver)] = ret
return ret

def apply_constraint(self, domain, constraint):
vname = constraint.required_version.replace('*', '.*')
rev = re.compile(vname, re.M)
ret = [x for x in domain if rev.match(x)]
return ret

def trace(self, *k):
if getattr(self, 'debug', None):
Logs.error(*k)

def solve(self, packages_to_versions={}, packages_to_constraints={}, pkgname='', pkgver='', todo=[], done=[]):
# breadth first search
n_packages_to_versions = dict(packages_to_versions)
n_packages_to_constraints = dict(packages_to_constraints)

self.trace("calling solve with %r %r %r" % (packages_to_versions, todo, done))
done = done + [pkgname]

constraints = self.load_constraints(pkgname, pkgver)
self.trace("constraints %r" % constraints)

for k in constraints:
try:
domain = n_packages_to_versions[k.pkgname]
except KeyError:
domain = list_package_versions(get_distnet_cache(), k.pkgname)


self.trace("constraints?")
if not k.pkgname in done:
todo = todo + [k.pkgname]

self.trace("domain before %s -> %s, %r" % (pkgname, k.pkgname, domain))

# apply the constraint
domain = self.apply_constraint(domain, k)

self.trace("domain after %s -> %s, %r" % (pkgname, k.pkgname, domain))

n_packages_to_versions[k.pkgname] = domain

# then store the constraint applied
constraints = list(packages_to_constraints.get(k.pkgname, []))
constraints.append((pkgname, pkgver, k))
n_packages_to_constraints[k.pkgname] = constraints

if not domain:
self.trace("no domain while processing constraint %r from %r %r" % (domain, pkgname, pkgver))
return (n_packages_to_versions, n_packages_to_constraints)

# next package on the todo list
if not todo:
return (n_packages_to_versions, n_packages_to_constraints)

n_pkgname = todo[0]
n_pkgver = n_packages_to_versions[n_pkgname][0]
tmp = dict(n_packages_to_versions)
tmp[n_pkgname] = [n_pkgver]

self.trace("fixed point %s" % n_pkgname)

return self.solve(tmp, n_packages_to_constraints, n_pkgname, n_pkgver, todo[1:], done)

def get_results(self):
return '\n'.join([str(c) for c in self.constraints])

def solution_to_constraints(self, versions, constraints):
solution = []
for p in versions.keys():
c = constraint()
solution.append(c)

c.pkgname = p
if versions[p]:
c.required_version = versions[p][0]
else:
c.required_version = ''
for (from_pkgname, from_pkgver, c2) in constraints.get(p, ''):
c.add_reason(c2.human_display(from_pkgname, from_pkgver))
return solution

def local_resolve(self, text):
self.cache_constraints[(self.myproject, self.myversion)] = parse_constraints(text)
p2v = OrderedDict({self.myproject: [self.myversion]})
(versions, constraints) = self.solve(p2v, {}, self.myproject, self.myversion, [])
return self.solution_to_constraints(versions, constraints)

def download_to_file(self, pkgname, pkgver, subdir, tmp):
data = safe_urlencode([('pkgname', pkgname), ('pkgver', pkgver), ('pkgfile', subdir)])
req = urlopen(get_download_url(), data, timeout=TIMEOUT)
with open(tmp, 'wb') as f:
while True:
buf = req.read(8192)
if not buf:
break
f.write(buf)

def extract_tar(self, subdir, pkgdir, tmpfile):
with tarfile.open(tmpfile) as f:
temp = tempfile.mkdtemp(dir=pkgdir)
try:
f.extractall(temp)
os.rename(temp, os.path.join(pkgdir, subdir))
finally:
try:
shutil.rmtree(temp)
except Exception:
pass

def get_pkg_dir(self, pkgname, pkgver, subdir):
pkgdir = os.path.join(get_distnet_cache(), pkgname, pkgver)
if not os.path.isdir(pkgdir):
os.makedirs(pkgdir)

target = os.path.join(pkgdir, subdir)

if os.path.exists(target):
return target

(fd, tmp) = tempfile.mkstemp(dir=pkgdir)
try:
os.close(fd)
self.download_to_file(pkgname, pkgver, subdir, tmp)
if subdir == REQUIRES:
os.rename(tmp, target)
else:
self.extract_tar(subdir, pkgdir, tmp)
finally:
try:
os.remove(tmp)
except OSError:
pass

return target

def __iter__(self):
if not self.constraints:
self.compute_dependencies()
for x in self.constraints:
if x.pkgname == self.myproject:
continue
yield x
raise StopIteration

def execute(self):
self.compute_dependencies()

packages = package_reader()

def load_tools(ctx, extra):
global packages
for c in packages:
packages.get_pkg_dir(c.pkgname, c.required_version, extra)
noarchdir = packages.get_pkg_dir(c.pkgname, c.required_version, 'noarch')
for x in os.listdir(noarchdir):
if x.startswith('waf_') and x.endswith('.py'):
ctx.load([x.rstrip('.py')], tooldir=[noarchdir])

def options(opt):
opt.add_option('--offline', action='store_true')
packages.execute()
load_tools(opt, REQUIRES)

def configure(conf):
load_tools(conf, conf.variant)

def build(bld):
load_tools(bld, bld.variant)


+ 0
- 219
waflib/extras/doxygen.py View File

@@ -1,219 +0,0 @@
#! /usr/bin/env python
# encoding: UTF-8
# Thomas Nagy 2008-2010 (ita)

"""

Doxygen support

Variables passed to bld():
* doxyfile -- the Doxyfile to use
* doxy_tar -- destination archive for generated documentation (if desired)
* install_path -- where to install the documentation
* pars -- dictionary overriding doxygen configuration settings

When using this tool, the wscript will look like:

def options(opt):
opt.load('doxygen')

def configure(conf):
conf.load('doxygen')
# check conf.env.DOXYGEN, if it is mandatory

def build(bld):
if bld.env.DOXYGEN:
bld(features="doxygen", doxyfile='Doxyfile', ...)
"""

from fnmatch import fnmatchcase
import os, os.path, re, stat
from waflib import Task, Utils, Node, Logs, Errors
from waflib.TaskGen import feature

DOXY_STR = '"${DOXYGEN}" - '
DOXY_FMTS = 'html latex man rft xml'.split()
DOXY_FILE_PATTERNS = '*.' + ' *.'.join('''
c cc cxx cpp c++ java ii ixx ipp i++ inl h hh hxx hpp h++ idl odl cs php php3
inc m mm py f90c cc cxx cpp c++ java ii ixx ipp i++ inl h hh hxx
'''.split())

re_rl = re.compile('\\\\\r*\n', re.MULTILINE)
re_nl = re.compile('\r*\n', re.M)
def parse_doxy(txt):
tbl = {}
txt = re_rl.sub('', txt)
lines = re_nl.split(txt)
for x in lines:
x = x.strip()
if not x or x.startswith('#') or x.find('=') < 0:
continue
if x.find('+=') >= 0:
tmp = x.split('+=')
key = tmp[0].strip()
if key in tbl:
tbl[key] += ' ' + '+='.join(tmp[1:]).strip()
else:
tbl[key] = '+='.join(tmp[1:]).strip()
else:
tmp = x.split('=')
tbl[tmp[0].strip()] = '='.join(tmp[1:]).strip()
return tbl

class doxygen(Task.Task):
vars = ['DOXYGEN', 'DOXYFLAGS']
color = 'BLUE'

def runnable_status(self):
'''
self.pars are populated in runnable_status - because this function is being
run *before* both self.pars "consumers" - scan() and run()

set output_dir (node) for the output
'''

for x in self.run_after:
if not x.hasrun:
return Task.ASK_LATER

if not getattr(self, 'pars', None):
txt = self.inputs[0].read()
self.pars = parse_doxy(txt)
if self.pars.get('OUTPUT_DIRECTORY'):
# Use the path parsed from the Doxyfile as an absolute path
output_node = self.inputs[0].parent.get_bld().make_node(self.pars['OUTPUT_DIRECTORY'])
else:
# If no OUTPUT_PATH was specified in the Doxyfile, build path from the Doxyfile name + '.doxy'
output_node = self.inputs[0].parent.get_bld().make_node(self.inputs[0].name + '.doxy')
output_node.mkdir()
self.pars['OUTPUT_DIRECTORY'] = output_node.abspath()

# Override with any parameters passed to the task generator
if getattr(self.generator, 'pars', None):
for k, v in self.generator.pars.iteritems():
self.pars[k] = v

self.doxy_inputs = getattr(self, 'doxy_inputs', [])
if not self.pars.get('INPUT'):
self.doxy_inputs.append(self.inputs[0].parent)
else:
for i in self.pars.get('INPUT').split():
if os.path.isabs(i):
node = self.generator.bld.root.find_node(i)
else:
node = self.inputs[0].parent.find_node(i)
if not node:
self.generator.bld.fatal('Could not find the doxygen input %r' % i)
self.doxy_inputs.append(node)

if not getattr(self, 'output_dir', None):
bld = self.generator.bld
# Output path is always an absolute path as it was transformed above.
self.output_dir = bld.root.find_dir(self.pars['OUTPUT_DIRECTORY'])

self.signature()
return Task.Task.runnable_status(self)

def scan(self):
exclude_patterns = self.pars.get('EXCLUDE_PATTERNS','').split()
file_patterns = self.pars.get('FILE_PATTERNS','').split()
if not file_patterns:
file_patterns = DOXY_FILE_PATTERNS
if self.pars.get('RECURSIVE') == 'YES':
file_patterns = ["**/%s" % pattern for pattern in file_patterns]
nodes = []
names = []
for node in self.doxy_inputs:
if os.path.isdir(node.abspath()):
for m in node.ant_glob(incl=file_patterns, excl=exclude_patterns):
nodes.append(m)
else:
nodes.append(node)
return (nodes, names)

def run(self):
dct = self.pars.copy()
code = '\n'.join(['%s = %s' % (x, dct[x]) for x in self.pars])
code = code.encode() # for python 3
#fmt = DOXY_STR % (self.inputs[0].parent.abspath())
cmd = Utils.subst_vars(DOXY_STR, self.env)
env = self.env.env or None
proc = Utils.subprocess.Popen(cmd, shell=True, stdin=Utils.subprocess.PIPE, env=env, cwd=self.inputs[0].parent.abspath())
proc.communicate(code)
return proc.returncode

def post_run(self):
nodes = self.output_dir.ant_glob('**/*', quiet=True)
for x in nodes:
x.sig = Utils.h_file(x.abspath())
self.outputs += nodes
if getattr(self.generator, 'install_path', None):
if not getattr(self.generator, 'doxy_tar', None):
self.generator.bld.install_files(self.generator.install_path,
self.outputs,
postpone=False,
cwd=self.output_dir,
relative_trick=True)
return Task.Task.post_run(self)

class tar(Task.Task):
"quick tar creation"
run_str = '${TAR} ${TAROPTS} ${TGT} ${SRC}'
color = 'RED'
after = ['doxygen']
def runnable_status(self):
for x in getattr(self, 'input_tasks', []):
if not x.hasrun:
return Task.ASK_LATER

if not getattr(self, 'tar_done_adding', None):
# execute this only once
self.tar_done_adding = True
for x in getattr(self, 'input_tasks', []):
self.set_inputs(x.outputs)
if not self.inputs:
return Task.SKIP_ME
return Task.Task.runnable_status(self)

def __str__(self):
tgt_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.outputs])
return '%s: %s\n' % (self.__class__.__name__, tgt_str)

@feature('doxygen')
def process_doxy(self):
if not getattr(self, 'doxyfile', None):
self.generator.bld.fatal('no doxyfile??')

node = self.doxyfile
if not isinstance(node, Node.Node):
node = self.path.find_resource(node)
if not node:
raise ValueError('doxygen file not found')

# the task instance
dsk = self.create_task('doxygen', node)

if getattr(self, 'doxy_tar', None):
tsk = self.create_task('tar')
tsk.input_tasks = [dsk]
tsk.set_outputs(self.path.find_or_declare(self.doxy_tar))
if self.doxy_tar.endswith('bz2'):
tsk.env['TAROPTS'] = ['cjf']
elif self.doxy_tar.endswith('gz'):
tsk.env['TAROPTS'] = ['czf']
else:
tsk.env['TAROPTS'] = ['cf']
if getattr(self, 'install_path', None):
self.bld.install_files(self.install_path, tsk.outputs)

def configure(conf):
'''
Check if doxygen and tar commands are present in the system

If the commands are present, then conf.env.DOXYGEN and conf.env.TAR
variables will be set. Detection can be controlled by setting DOXYGEN and
TAR environmental variables.
'''

conf.find_program('doxygen', var='DOXYGEN', mandatory=False)
conf.find_program('tar', var='TAR', mandatory=False)

+ 0
- 87
waflib/extras/dpapi.py View File

@@ -1,87 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# Matt Clarkson, 2012

'''
DPAPI access library (http://msdn.microsoft.com/en-us/library/ms995355.aspx)
This file uses code originally created by Crusher Joe:
http://article.gmane.org/gmane.comp.python.ctypes/420
And modified by Wayne Koorts:
http://stackoverflow.com/questions/463832/using-dpapi-with-python
'''

from ctypes import windll, byref, cdll, Structure, POINTER, c_char, c_buffer
from ctypes.wintypes import DWORD
from waflib.Configure import conf

LocalFree = windll.kernel32.LocalFree
memcpy = cdll.msvcrt.memcpy
CryptProtectData = windll.crypt32.CryptProtectData
CryptUnprotectData = windll.crypt32.CryptUnprotectData
CRYPTPROTECT_UI_FORBIDDEN = 0x01
try:
extra_entropy = 'cl;ad13 \0al;323kjd #(adl;k$#ajsd'.encode('ascii')
except AttributeError:
extra_entropy = 'cl;ad13 \0al;323kjd #(adl;k$#ajsd'

class DATA_BLOB(Structure):
_fields_ = [
('cbData', DWORD),
('pbData', POINTER(c_char))
]

def get_data(blob_out):
cbData = int(blob_out.cbData)
pbData = blob_out.pbData
buffer = c_buffer(cbData)
memcpy(buffer, pbData, cbData)
LocalFree(pbData);
return buffer.raw

@conf
def dpapi_encrypt_data(self, input_bytes, entropy = extra_entropy):
'''
Encrypts data and returns byte string

:param input_bytes: The data to be encrypted
:type input_bytes: String or Bytes
:param entropy: Extra entropy to add to the encryption process (optional)
:type entropy: String or Bytes
'''
if not isinstance(input_bytes, bytes) or not isinstance(entropy, bytes):
self.fatal('The inputs to dpapi must be bytes')
buffer_in = c_buffer(input_bytes, len(input_bytes))
buffer_entropy = c_buffer(entropy, len(entropy))
blob_in = DATA_BLOB(len(input_bytes), buffer_in)
blob_entropy = DATA_BLOB(len(entropy), buffer_entropy)
blob_out = DATA_BLOB()

if CryptProtectData(byref(blob_in), 'python_data', byref(blob_entropy),
None, None, CRYPTPROTECT_UI_FORBIDDEN, byref(blob_out)):
return get_data(blob_out)
else:
self.fatal('Failed to decrypt data')

@conf
def dpapi_decrypt_data(self, encrypted_bytes, entropy = extra_entropy):
'''
Decrypts data and returns byte string

:param encrypted_bytes: The encrypted data
:type encrypted_bytes: Bytes
:param entropy: Extra entropy to add to the encryption process (optional)
:type entropy: String or Bytes
'''
if not isinstance(encrypted_bytes, bytes) or not isinstance(entropy, bytes):
self.fatal('The inputs to dpapi must be bytes')
buffer_in = c_buffer(encrypted_bytes, len(encrypted_bytes))
buffer_entropy = c_buffer(entropy, len(entropy))
blob_in = DATA_BLOB(len(encrypted_bytes), buffer_in)
blob_entropy = DATA_BLOB(len(entropy), buffer_entropy)
blob_out = DATA_BLOB()
if CryptUnprotectData(byref(blob_in), None, byref(blob_entropy), None,
None, CRYPTPROTECT_UI_FORBIDDEN, byref(blob_out)):
return get_data(blob_out)
else:
self.fatal('Failed to decrypt data')


+ 0
- 317
waflib/extras/eclipse.py View File

@@ -1,317 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# Eclipse CDT 5.0 generator for Waf
# Richard Quirk 2009-1011 (New BSD License)
# Thomas Nagy 2011 (ported to Waf 1.6)

"""
Usage:

def options(opt):
opt.load('eclipse')

$ waf configure eclipse
"""

import sys, os
from waflib import Utils, Logs, Context, Options, Build, TaskGen, Scripting
from xml.dom.minidom import Document

STANDARD_INCLUDES = [ '/usr/local/include', '/usr/include' ]

oe_cdt = 'org.eclipse.cdt'
cdt_mk = oe_cdt + '.make.core'
cdt_core = oe_cdt + '.core'
cdt_bld = oe_cdt + '.build.core'

class eclipse(Build.BuildContext):
cmd = 'eclipse'
fun = Scripting.default_cmd

def execute(self):
"""
Entry point
"""
self.restore()
if not self.all_envs:
self.load_envs()
self.recurse([self.run_dir])

appname = getattr(Context.g_module, Context.APPNAME, os.path.basename(self.srcnode.abspath()))
self.create_cproject(appname, pythonpath=self.env['ECLIPSE_PYTHON_PATH'])

def create_cproject(self, appname, workspace_includes=[], pythonpath=[]):
"""
Create the Eclipse CDT .project and .cproject files
@param appname The name that will appear in the Project Explorer
@param build The BuildContext object to extract includes from
@param workspace_includes Optional project includes to prevent
"Unresolved Inclusion" errors in the Eclipse editor
@param pythonpath Optional project specific python paths
"""
source_dirs = []
cpppath = self.env['CPPPATH']
if sys.platform != 'win32':
cpppath += STANDARD_INCLUDES
Logs.warn('Generating Eclipse CDT project files')

for g in self.groups:
for tg in g:
if not isinstance(tg, TaskGen.task_gen):
continue

tg.post()
if not getattr(tg, 'link_task', None):
continue

l = Utils.to_list(getattr(tg, "includes", ''))
sources = Utils.to_list(getattr(tg, 'source', ''))
features = Utils.to_list(getattr(tg, 'features', ''))

is_cc = 'c' in features or 'cxx' in features

bldpath = tg.path.bldpath()

base = os.path.normpath(os.path.join(self.bldnode.name, tg.path.srcpath()))

if is_cc:
sources_dirs = set([src.parent for src in tg.to_nodes(sources)])

incnodes = tg.to_incnodes(tg.to_list(getattr(tg, 'includes', [])) + tg.env['INCLUDES'])
for p in incnodes:
path = p.path_from(self.srcnode)
workspace_includes.append(path)

if is_cc and path not in source_dirs:
source_dirs.append(path)

project = self.impl_create_project(sys.executable, appname)
self.srcnode.make_node('.project').write(project.toprettyxml())

waf = os.path.abspath(sys.argv[0])
project = self.impl_create_cproject(sys.executable, waf, appname, workspace_includes, cpppath, source_dirs)
self.srcnode.make_node('.cproject').write(project.toprettyxml())

project = self.impl_create_pydevproject(appname, sys.path, pythonpath)
self.srcnode.make_node('.pydevproject').write(project.toprettyxml())

def impl_create_project(self, executable, appname):
doc = Document()
projectDescription = doc.createElement('projectDescription')
self.add(doc, projectDescription, 'name', appname)
self.add(doc, projectDescription, 'comment')
self.add(doc, projectDescription, 'projects')
buildSpec = self.add(doc, projectDescription, 'buildSpec')
buildCommand = self.add(doc, buildSpec, 'buildCommand')
self.add(doc, buildCommand, 'name', oe_cdt + '.managedbuilder.core.genmakebuilder')
self.add(doc, buildCommand, 'triggers', 'clean,full,incremental,')
arguments = self.add(doc, buildCommand, 'arguments')
# the default make-style targets are overwritten by the .cproject values
dictionaries = {
cdt_mk + '.contents': cdt_mk + '.activeConfigSettings',
cdt_mk + '.enableAutoBuild': 'false',
cdt_mk + '.enableCleanBuild': 'true',
cdt_mk + '.enableFullBuild': 'true',
}
for k, v in dictionaries.items():
self.addDictionary(doc, arguments, k, v)

natures = self.add(doc, projectDescription, 'natures')
nature_list = """
core.ccnature
managedbuilder.core.ScannerConfigNature
managedbuilder.core.managedBuildNature
core.cnature
""".split()
for n in nature_list:
self.add(doc, natures, 'nature', oe_cdt + '.' + n)

self.add(doc, natures, 'nature', 'org.python.pydev.pythonNature')

doc.appendChild(projectDescription)
return doc

def impl_create_cproject(self, executable, waf, appname, workspace_includes, cpppath, source_dirs=[]):
doc = Document()
doc.appendChild(doc.createProcessingInstruction('fileVersion', '4.0.0'))
cconf_id = cdt_core + '.default.config.1'
cproject = doc.createElement('cproject')
storageModule = self.add(doc, cproject, 'storageModule',
{'moduleId': cdt_core + '.settings'})
cconf = self.add(doc, storageModule, 'cconfiguration', {'id':cconf_id})

storageModule = self.add(doc, cconf, 'storageModule',
{'buildSystemId': oe_cdt + '.managedbuilder.core.configurationDataProvider',
'id': cconf_id,
'moduleId': cdt_core + '.settings',
'name': 'Default'})

self.add(doc, storageModule, 'externalSettings')

extensions = self.add(doc, storageModule, 'extensions')
extension_list = """
VCErrorParser
MakeErrorParser
GCCErrorParser
GASErrorParser
GLDErrorParser
""".split()
ext = self.add(doc, extensions, 'extension',
{'id': cdt_core + '.ELF', 'point':cdt_core + '.BinaryParser'})
for e in extension_list:
ext = self.add(doc, extensions, 'extension',
{'id': cdt_core + '.' + e, 'point':cdt_core + '.ErrorParser'})

storageModule = self.add(doc, cconf, 'storageModule',
{'moduleId': 'cdtBuildSystem', 'version': '4.0.0'})
config = self.add(doc, storageModule, 'configuration',
{'artifactName': appname,
'id': cconf_id,
'name': 'Default',
'parent': cdt_bld + '.prefbase.cfg'})
folderInfo = self.add(doc, config, 'folderInfo',
{'id': cconf_id+'.', 'name': '/', 'resourcePath': ''})

toolChain = self.add(doc, folderInfo, 'toolChain',
{'id': cdt_bld + '.prefbase.toolchain.1',
'name': 'No ToolChain',
'resourceTypeBasedDiscovery': 'false',
'superClass': cdt_bld + '.prefbase.toolchain'})

targetPlatform = self.add(doc, toolChain, 'targetPlatform',
{ 'binaryParser': 'org.eclipse.cdt.core.ELF',
'id': cdt_bld + '.prefbase.toolchain.1', 'name': ''})

waf_build = '"%s" %s'%(waf, eclipse.fun)
waf_clean = '"%s" clean'%(waf)
builder = self.add(doc, toolChain, 'builder',
{'autoBuildTarget': waf_build,
'command': executable,
'enableAutoBuild': 'false',
'cleanBuildTarget': waf_clean,
'enableIncrementalBuild': 'true',
'id': cdt_bld + '.settings.default.builder.1',
'incrementalBuildTarget': waf_build,
'managedBuildOn': 'false',
'name': 'Gnu Make Builder',
'superClass': cdt_bld + '.settings.default.builder'})

for tool_name in ("Assembly", "GNU C++", "GNU C"):
tool = self.add(doc, toolChain, 'tool',
{'id': cdt_bld + '.settings.holder.1',
'name': tool_name,
'superClass': cdt_bld + '.settings.holder'})
if cpppath or workspace_includes:
incpaths = cdt_bld + '.settings.holder.incpaths'
option = self.add(doc, tool, 'option',
{'id': incpaths+'.1',
'name': 'Include Paths',
'superClass': incpaths,
'valueType': 'includePath'})
for i in workspace_includes:
self.add(doc, option, 'listOptionValue',
{'builtIn': 'false',
'value': '"${workspace_loc:/%s/%s}"'%(appname, i)})
for i in cpppath:
self.add(doc, option, 'listOptionValue',
{'builtIn': 'false',
'value': '"%s"'%(i)})
if tool_name == "GNU C++" or tool_name == "GNU C":
self.add(doc,tool,'inputType',{ 'id':'org.eclipse.cdt.build.core.settings.holder.inType.1', \
'languageId':'org.eclipse.cdt.core.gcc','languageName':tool_name, \
'sourceContentType':'org.eclipse.cdt.core.cSource,org.eclipse.cdt.core.cHeader', \
'superClass':'org.eclipse.cdt.build.core.settings.holder.inType' })
if source_dirs:
sourceEntries = self.add(doc, config, 'sourceEntries')
for i in source_dirs:
self.add(doc, sourceEntries, 'entry',
{'excluding': i,
'flags': 'VALUE_WORKSPACE_PATH|RESOLVED',
'kind': 'sourcePath',
'name': ''})
self.add(doc, sourceEntries, 'entry',
{
'flags': 'VALUE_WORKSPACE_PATH|RESOLVED',
'kind': 'sourcePath',
'name': i})

storageModule = self.add(doc, cconf, 'storageModule',
{'moduleId': cdt_mk + '.buildtargets'})
buildTargets = self.add(doc, storageModule, 'buildTargets')
def addTargetWrap(name, runAll):
return self.addTarget(doc, buildTargets, executable, name,
'"%s" %s'%(waf, name), runAll)
addTargetWrap('configure', True)
addTargetWrap('dist', False)
addTargetWrap('install', False)
addTargetWrap('check', False)

storageModule = self.add(doc, cproject, 'storageModule',
{'moduleId': 'cdtBuildSystem',
'version': '4.0.0'})

project = self.add(doc, storageModule, 'project',
{'id': '%s.null.1'%appname, 'name': appname})

doc.appendChild(cproject)
return doc

def impl_create_pydevproject(self, appname, system_path, user_path):
# create a pydevproject file
doc = Document()
doc.appendChild(doc.createProcessingInstruction('eclipse-pydev', 'version="1.0"'))
pydevproject = doc.createElement('pydev_project')
prop = self.add(doc, pydevproject,
'pydev_property',
'python %d.%d'%(sys.version_info[0], sys.version_info[1]))
prop.setAttribute('name', 'org.python.pydev.PYTHON_PROJECT_VERSION')
prop = self.add(doc, pydevproject, 'pydev_property', 'Default')
prop.setAttribute('name', 'org.python.pydev.PYTHON_PROJECT_INTERPRETER')
# add waf's paths
wafadmin = [p for p in system_path if p.find('wafadmin') != -1]
if wafadmin:
prop = self.add(doc, pydevproject, 'pydev_pathproperty',
{'name':'org.python.pydev.PROJECT_EXTERNAL_SOURCE_PATH'})
for i in wafadmin:
self.add(doc, prop, 'path', i)
if user_path:
prop = self.add(doc, pydevproject, 'pydev_pathproperty',
{'name':'org.python.pydev.PROJECT_SOURCE_PATH'})
for i in user_path:
self.add(doc, prop, 'path', '/'+appname+'/'+i)

doc.appendChild(pydevproject)
return doc

def addDictionary(self, doc, parent, k, v):
dictionary = self.add(doc, parent, 'dictionary')
self.add(doc, dictionary, 'key', k)
self.add(doc, dictionary, 'value', v)
return dictionary

def addTarget(self, doc, buildTargets, executable, name, buildTarget, runAllBuilders=True):
target = self.add(doc, buildTargets, 'target',
{'name': name,
'path': '',
'targetID': oe_cdt + '.build.MakeTargetBuilder'})
self.add(doc, target, 'buildCommand', executable)
self.add(doc, target, 'buildArguments', None)
self.add(doc, target, 'buildTarget', buildTarget)
self.add(doc, target, 'stopOnError', 'true')
self.add(doc, target, 'useDefaultCommand', 'false')
self.add(doc, target, 'runAllBuilders', str(runAllBuilders).lower())

def add(self, doc, parent, tag, value = None):
el = doc.createElement(tag)
if (value):
if type(value) == type(str()):
el.appendChild(doc.createTextNode(value))
elif type(value) == type(dict()):
self.setAttributes(el, value)
parent.appendChild(el)
return el

def setAttributes(self, node, attrs):
for k, v in attrs.items():
node.setAttribute(k, v)


+ 0
- 19
waflib/extras/erlang.py View File

@@ -1,19 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2010 (ita)

"""
Erlang support
"""

from waflib import TaskGen

TaskGen.declare_chain(name = 'erlc',
rule = '${ERLC} ${ERLC_FLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}',
ext_in = '.erl',
ext_out = '.beam')

def configure(conf):
conf.find_program('erlc', var='ERLC')
conf.env.ERLC_FLAGS = []


+ 0
- 33
waflib/extras/fc_bgxlf.py View File

@@ -1,33 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# harald at klimachs.de

import re
from waflib.Tools import fc, fc_config, fc_scan
from waflib.Configure import conf

from waflib.Tools.compiler_fc import fc_compiler
fc_compiler['linux'].insert(0, 'fc_bgxlf')

@conf
def find_bgxlf(conf):
fc = conf.find_program(['bgxlf2003_r','bgxlf2003'], var='FC')
conf.get_xlf_version(fc)
conf.env.FC_NAME = 'BGXLF'

@conf
def bg_flags(self):
self.env.SONAME_ST = ''
self.env.FCSHLIB_MARKER = ''
self.env.FCSTLIB_MARKER = ''
self.env.FCFLAGS_fcshlib = ['-fPIC']
self.env.LINKFLAGS_fcshlib = ['-G', '-Wl,-bexpfull']

def configure(conf):
conf.find_bgxlf()
conf.find_ar()
conf.fc_flags()
conf.fc_add_flags()
conf.xlf_flags()
conf.bg_flags()


+ 0
- 50
waflib/extras/fc_cray.py View File

@@ -1,50 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# harald at klimachs.de

import re
from waflib import Utils
from waflib.Tools import fc, fc_config, fc_scan
from waflib.Configure import conf

from waflib.Tools.compiler_fc import fc_compiler
fc_compiler['linux'].append('fc_cray')

@conf
def find_crayftn(conf):
"""Find the Cray fortran compiler (will look in the environment variable 'FC')"""
fc = conf.find_program(['crayftn'], var='FC')
conf.get_crayftn_version(fc)
conf.env.FC_NAME = 'CRAY'
conf.env.FC_MOD_CAPITALIZATION = 'UPPER.mod'

@conf
def crayftn_flags(conf):
v = conf.env
v['_FCMODOUTFLAGS'] = ['-em', '-J.'] # enable module files and put them in the current directoy
v['FCFLAGS_DEBUG'] = ['-m1'] # more verbose compiler warnings
v['FCFLAGS_fcshlib'] = ['-h pic']
v['LINKFLAGS_fcshlib'] = ['-h shared']

v['FCSTLIB_MARKER'] = '-h static'
v['FCSHLIB_MARKER'] = '-h dynamic'

@conf
def get_crayftn_version(conf, fc):
version_re = re.compile(r"Cray Fortran\s*:\s*Version\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
cmd = fc + ['-V']
out,err = fc_config.getoutput(conf, cmd, stdin=False)
if out: match = version_re(out)
else: match = version_re(err)
if not match:
conf.fatal('Could not determine the Cray Fortran compiler version.')
k = match.groupdict()
conf.env['FC_VERSION'] = (k['major'], k['minor'])

def configure(conf):
conf.find_crayftn()
conf.find_ar()
conf.fc_flags()
conf.fc_add_flags()
conf.crayftn_flags()


+ 0
- 59
waflib/extras/fc_nag.py View File

@@ -1,59 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# harald at klimachs.de

import re
from waflib import Utils
from waflib.Tools import fc,fc_config,fc_scan
from waflib.Configure import conf

from waflib.Tools.compiler_fc import fc_compiler
fc_compiler['linux'].insert(0, 'fc_nag')

@conf
def find_nag(conf):
"""Find the NAG Fortran Compiler (will look in the environment variable 'FC')"""

fc = conf.find_program(['nagfor'], var='FC')
conf.get_nag_version(fc)
conf.env.FC_NAME = 'NAG'
conf.env.FC_MOD_CAPITALIZATION = 'lower'

@conf
def nag_flags(conf):
v = conf.env
v['FCFLAGS_DEBUG'] = ['-C=all']

@conf
def nag_modifier_platform(conf):
dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
nag_modifier_func = getattr(conf, 'nag_modifier_' + dest_os, None)
if nag_modifier_func:
nag_modifier_func()

@conf
def get_nag_version(conf, fc):
"""Get the NAG compiler version"""

version_re = re.compile(r"^NAG Fortran Compiler *Release *(?P<major>\d*)\.(?P<minor>\d*)", re.M).search
cmd = fc + ['-v']

out, err = fc_config.getoutput(conf,cmd,stdin=False)
if out:
match = version_re(out)
if not match:
match = version_re(err)
else: match = version_re(err)
if not match:
conf.fatal('Could not determine the NAG version.')
k = match.groupdict()
conf.env['FC_VERSION'] = (k['major'], k['minor'])

def configure(conf):
conf.find_nag()
conf.find_ar()
conf.fc_flags()
conf.fc_add_flags()
conf.nag_flags()
conf.nag_modifier_platform()


+ 0
- 57
waflib/extras/fc_nec.py View File

@@ -1,57 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# harald at klimachs.de

import re
from waflib import Utils
from waflib.Tools import fc, fc_config, fc_scan
from waflib.Configure import conf

from waflib.Tools.compiler_fc import fc_compiler
fc_compiler['linux'].append('fc_nec')

@conf
def find_sxfc(conf):
"""Find the NEC fortran compiler (will look in the environment variable 'FC')"""
fc = conf.find_program(['sxf90','sxf03'], var='FC')
conf.get_sxfc_version(fc)
conf.env.FC_NAME = 'NEC'
conf.env.FC_MOD_CAPITALIZATION = 'lower'

@conf
def sxfc_flags(conf):
v = conf.env
v['_FCMODOUTFLAGS'] = [] # enable module files and put them in the current directoy
v['FCFLAGS_DEBUG'] = [] # more verbose compiler warnings
v['FCFLAGS_fcshlib'] = []
v['LINKFLAGS_fcshlib'] = []

v['FCSTLIB_MARKER'] = ''
v['FCSHLIB_MARKER'] = ''

@conf
def get_sxfc_version(conf, fc):
version_re = re.compile(r"FORTRAN90/SX\s*Version\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
cmd = fc + ['-V']
out,err = fc_config.getoutput(conf, cmd, stdin=False)
if out: match = version_re(out)
else: match = version_re(err)
if not match:
version_re=re.compile(r"NEC Fortran 2003 Compiler for\s*(?P<major>\S*)\s*\(c\)\s*(?P<minor>\d*)",re.I).search
if out: match = version_re(out)
else: match = version_re(err)
if not match:
conf.fatal('Could not determine the NEC Fortran compiler version.')
k = match.groupdict()
conf.env['FC_VERSION'] = (k['major'], k['minor'])

def configure(conf):
conf.find_sxfc()
conf.find_program('sxar',var='AR')
conf.add_os_flags('ARFLAGS')
if not conf.env.ARFLAGS:
conf.env.ARFLAGS=['rcs']

conf.fc_flags()
conf.fc_add_flags()
conf.sxfc_flags()

+ 0
- 56
waflib/extras/fc_open64.py View File

@@ -1,56 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# harald at klimachs.de

import re
from waflib import Utils
from waflib.Tools import fc,fc_config,fc_scan
from waflib.Configure import conf

from waflib.Tools.compiler_fc import fc_compiler
fc_compiler['linux'].insert(0, 'fc_open64')

@conf
def find_openf95(conf):
"""Find the Open64 Fortran Compiler (will look in the environment variable 'FC')"""

fc = conf.find_program(['openf95', 'openf90'], var='FC')
conf.get_open64_version(fc)
conf.env.FC_NAME = 'OPEN64'
conf.env.FC_MOD_CAPITALIZATION = 'UPPER.mod'

@conf
def openf95_flags(conf):
v = conf.env
v['FCFLAGS_DEBUG'] = ['-fullwarn']

@conf
def openf95_modifier_platform(conf):
dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
openf95_modifier_func = getattr(conf, 'openf95_modifier_' + dest_os, None)
if openf95_modifier_func:
openf95_modifier_func()

@conf
def get_open64_version(conf, fc):
"""Get the Open64 compiler version"""

version_re = re.compile(r"Open64 Compiler Suite: *Version *(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
cmd = fc + ['-version']

out, err = fc_config.getoutput(conf,cmd,stdin=False)
if out: match = version_re(out)
else: match = version_re(err)
if not match:
conf.fatal('Could not determine the Open64 version.')
k = match.groupdict()
conf.env['FC_VERSION'] = (k['major'], k['minor'])

def configure(conf):
conf.find_openf95()
conf.find_ar()
conf.fc_flags()
conf.fc_add_flags()
conf.openf95_flags()
conf.openf95_modifier_platform()


+ 0
- 65
waflib/extras/fc_pgfortran.py View File

@@ -1,65 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# harald at klimachs.de

import re
from waflib import Utils
from waflib.Tools import fc, fc_config, fc_scan
from waflib.Configure import conf

from waflib.Tools.compiler_fc import fc_compiler
fc_compiler['linux'].append('fc_pgfortran')

@conf
def find_pgfortran(conf):
"""Find the PGI fortran compiler (will look in the environment variable 'FC')"""
fc = conf.find_program(['pgfortran', 'pgf95', 'pgf90'], var='FC')
conf.get_pgfortran_version(fc)
conf.env.FC_NAME = 'PGFC'

@conf
def pgfortran_flags(conf):
v = conf.env
v['FCFLAGS_fcshlib'] = ['-shared']
v['FCFLAGS_DEBUG'] = ['-Minform=inform', '-Mstandard'] # why not
v['FCSTLIB_MARKER'] = '-Bstatic'
v['FCSHLIB_MARKER'] = '-Bdynamic'
v['SONAME_ST'] = '-soname %s'

@conf
def get_pgfortran_version(conf,fc):
version_re = re.compile(r"The Portland Group", re.I).search
cmd = fc + ['-V']
out,err = fc_config.getoutput(conf, cmd, stdin=False)
if out: match = version_re(out)
else: match = version_re(err)
if not match:
conf.fatal('Could not verify PGI signature')
cmd = fc + ['-help=variable']
out,err = fc_config.getoutput(conf, cmd, stdin=False)
if out.find('COMPVER')<0:
conf.fatal('Could not determine the compiler type')
k = {}
prevk = ''
out = out.splitlines()
for line in out:
lst = line.partition('=')
if lst[1] == '=':
key = lst[0].rstrip()
if key == '': key = prevk
val = lst[2].rstrip()
k[key] = val
else: prevk = line.partition(' ')[0]
def isD(var):
return var in k
def isT(var):
return var in k and k[var]!='0'
conf.env['FC_VERSION'] = (k['COMPVER'].split('.'))

def configure(conf):
conf.find_pgfortran()
conf.find_ar()
conf.fc_flags()
conf.fc_add_flags()
conf.pgfortran_flags()


+ 0
- 60
waflib/extras/fc_solstudio.py View File

@@ -1,60 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# harald at klimachs.de

import re
from waflib import Utils
from waflib.Tools import fc,fc_config,fc_scan
from waflib.Configure import conf

from waflib.Tools.compiler_fc import fc_compiler
fc_compiler['linux'].append('fc_solstudio')

@conf
def find_solstudio(conf):
"""Find the Solaris Studio compiler (will look in the environment variable 'FC')"""

fc = conf.find_program(['sunf95', 'f95', 'sunf90', 'f90'], var='FC')
conf.get_solstudio_version(fc)
conf.env.FC_NAME = 'SOL'

@conf
def solstudio_flags(conf):
v = conf.env
v['FCFLAGS_fcshlib'] = ['-Kpic']
v['FCFLAGS_DEBUG'] = ['-w3']
v['LINKFLAGS_fcshlib'] = ['-G']
v['FCSTLIB_MARKER'] = '-Bstatic'
v['FCSHLIB_MARKER'] = '-Bdynamic'
v['SONAME_ST'] = '-h %s'

@conf
def solstudio_modifier_platform(conf):
dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
solstudio_modifier_func = getattr(conf, 'solstudio_modifier_' + dest_os, None)
if solstudio_modifier_func:
solstudio_modifier_func()

@conf
def get_solstudio_version(conf, fc):
"""Get the compiler version"""

version_re = re.compile(r"Sun Fortran 95 *(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
cmd = fc + ['-V']

out, err = fc_config.getoutput(conf,cmd,stdin=False)
if out: match = version_re(out)
else: match = version_re(err)
if not match:
conf.fatal('Could not determine the Sun Studio Fortran version.')
k = match.groupdict()
conf.env['FC_VERSION'] = (k['major'], k['minor'])

def configure(conf):
conf.find_solstudio()
conf.find_ar()
conf.fc_flags()
conf.fc_add_flags()
conf.solstudio_flags()
conf.solstudio_modifier_platform()


+ 0
- 63
waflib/extras/fc_xlf.py View File

@@ -1,63 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# harald at klimachs.de

import re
from waflib import Utils,Errors
from waflib.Tools import fc,fc_config,fc_scan
from waflib.Configure import conf

from waflib.Tools.compiler_fc import fc_compiler
fc_compiler['aix'].insert(0, 'fc_xlf')

@conf
def find_xlf(conf):
"""Find the xlf program (will look in the environment variable 'FC')"""

fc = conf.find_program(['xlf2003_r', 'xlf2003', 'xlf95_r', 'xlf95', 'xlf90_r', 'xlf90', 'xlf_r', 'xlf'], var='FC')
conf.get_xlf_version(fc)
conf.env.FC_NAME='XLF'

@conf
def xlf_flags(conf):
v = conf.env
v['FCDEFINES_ST'] = '-WF,-D%s'
v['FCFLAGS_fcshlib'] = ['-qpic=small']
v['FCFLAGS_DEBUG'] = ['-qhalt=w']
v['LINKFLAGS_fcshlib'] = ['-Wl,-shared']

@conf
def xlf_modifier_platform(conf):
dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
xlf_modifier_func = getattr(conf, 'xlf_modifier_' + dest_os, None)
if xlf_modifier_func:
xlf_modifier_func()

@conf
def get_xlf_version(conf, fc):
"""Get the compiler version"""

cmd = fc + ['-qversion']
try:
out, err = conf.cmd_and_log(cmd, output=0)
except Errors.WafError:
conf.fatal('Could not find xlf %r' % cmd)

for v in (r"IBM XL Fortran.* V(?P<major>\d*)\.(?P<minor>\d*)",):
version_re = re.compile(v, re.I).search
match = version_re(out or err)
if match:
k = match.groupdict()
conf.env['FC_VERSION'] = (k['major'], k['minor'])
break
else:
conf.fatal('Could not determine the XLF version.')

def configure(conf):
conf.find_xlf()
conf.find_ar()
conf.fc_flags()
conf.fc_add_flags()
conf.xlf_flags()
conf.xlf_modifier_platform()


+ 0
- 150
waflib/extras/file_to_object.py View File

@@ -1,150 +0,0 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Tool to embed file into objects

__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
__copyright__ = "Jérôme Carretero, 2014"

"""

This tool allows to embed file contents in object files (.o).
It is not exactly portable, and the file contents are reachable
using various non-portable fashions.
The goal here is to provide a functional interface to the embedding
of file data in objects.
See the ``playground/embedded_resources`` example for an example.

Usage::

bld(
name='pipeline',
# ^ Reference this in use="..." for things using the generated code
features='file_to_object',
source='some.file',
# ^ Name of the file to embed in binary section.
)

Known issues:

- Currently only handles elf files with GNU ld.

- Destination is named like source, with extension renamed to .o
eg. some.file -> some.o

"""

import os, binascii

from waflib import Task, Utils, TaskGen, Errors


def filename_c_escape(x):
return x.replace("\\", "\\\\")

class file_to_object_s(Task.Task):
color = 'CYAN'
dep_vars = ('DEST_CPU', 'DEST_BINFMT')

def run(self):
name = []
for i, x in enumerate(self.inputs[0].name):
if x.isalnum():
name.append(x)
else:
name.append('_')
file = self.inputs[0].abspath()
size = os.path.getsize(file)
if self.env.DEST_CPU in ('x86_64', 'ia', 'aarch64'):
unit = 'quad'
align = 8
elif self.env.DEST_CPU in ('x86','arm', 'thumb', 'm68k'):
unit = 'long'
align = 4
else:
raise Errors.WafError("Unsupported DEST_CPU, please report bug!")

file = filename_c_escape(file)
name = "_binary_" + "".join(name)
rodata = ".section .rodata"
if self.env.DEST_BINFMT == "mac-o":
name = "_" + name
rodata = ".section __TEXT,__const"

with open(self.outputs[0].abspath(), 'w') as f:
f.write(\
"""
.global %(name)s_start
.global %(name)s_end
.global %(name)s_size
%(rodata)s
%(name)s_start:
.incbin "%(file)s"
%(name)s_end:
.align %(align)d
%(name)s_size:
.%(unit)s 0x%(size)x
""" % locals())

class file_to_object_c(Task.Task):
color = 'CYAN'
def run(self):
name = []
for i, x in enumerate(self.inputs[0].name):
if x.isalnum():
name.append(x)
else:
name.append('_')
file = self.inputs[0].abspath()
size = os.path.getsize(file)

name = "_binary_" + "".join(name)

data = []
data = self.inputs[0].read()
data = binascii.hexlify(data)
data = [ ("0x%s" % (data[i:i+2])) for i in range(0, len(data), 2) ]
data = ",\n ".join(data)

with open(self.outputs[0].abspath(), 'w') as f:
f.write(\
"""
char const %(name)s[] = {
%(data)s
};
unsigned long %(name)s_size = %(size)dL;
char const * %(name)s_start = %(name)s;
char const * %(name)s_end = &%(name)s[%(size)d];
""" % locals())
with open(self.outputs[0].abspath(), 'w') as f:
f.write(\
"""
unsigned long %(name)s_size = %(size)dL;
char const %(name)s_start[] = {
%(data)s
};
char const %(name)s_end[] = {
};
""" % locals())

@TaskGen.feature('file_to_object')
@TaskGen.before_method('process_source')
def tg_file_to_object(self):
bld = self.bld
sources = self.to_nodes(self.source)
targets = []
for src in sources:
if bld.env.F2O_METHOD == ["asm"]:
tgt = src.parent.find_or_declare(src.name + '.f2o.s')
task = self.create_task('file_to_object_s',
src, tgt, cwd=src.parent.abspath())
else:
tgt = src.parent.find_or_declare(src.name + '.f2o.c')
task = self.create_task('file_to_object_c',
src, tgt, cwd=src.parent.abspath())
targets.append(tgt)
self.source = targets

def configure(conf):
conf.load('gas')
conf.env.F2O_METHOD = ["asm"]


+ 0
- 30
waflib/extras/fluid.py View File

@@ -1,30 +0,0 @@
#!/usr/bin/python
# encoding: utf-8
# Grygoriy Fuchedzhy 2009

"""
Compile fluid files (fltk graphic library). Use the 'fluid' feature in conjuction with the 'cxx' feature.
"""

from waflib import Task
from waflib.TaskGen import extension

class fluid(Task.Task):
color = 'BLUE'
ext_out = ['.h']
run_str = '${FLUID} -c -o ${TGT[0].abspath()} -h ${TGT[1].abspath()} ${SRC}'

@extension('.fl')
def fluid(self, node):
"""add the .fl to the source list; the cxx file generated will be compiled when possible"""
cpp = node.change_ext('.cpp')
hpp = node.change_ext('.hpp')
self.create_task('fluid', node, [cpp, hpp])

if 'cxx' in self.features:
self.source.append(cpp)

def configure(conf):
conf.find_program('fluid', var='FLUID')
conf.check_cfg(path='fltk-config', package='', args='--cxxflags --ldflags', uselib_store='FLTK', mandatory=True)


+ 0
- 74
waflib/extras/freeimage.py View File

@@ -1,74 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
#
# written by Sylvain Rouquette, 2011

'''
To add the freeimage tool to the waf file:
$ ./waf-light --tools=compat15,freeimage
or, if you have waf >= 1.6.2
$ ./waf update --files=freeimage

The wscript will look like:

def options(opt):
opt.load('compiler_cxx freeimage')

def configure(conf):
conf.load('compiler_cxx freeimage')

# you can call check_freeimage with some parameters.
# It's optional on Linux, it's 'mandatory' on Windows if
# you didn't use --fi-path on the command-line

# conf.check_freeimage(path='FreeImage/Dist', fip=True)

def build(bld):
bld(source='main.cpp', target='app', use='FREEIMAGE')
'''

from waflib import Utils
from waflib.Configure import conf


def options(opt):
opt.add_option('--fi-path', type='string', default='', dest='fi_path',
help='''path to the FreeImage directory \
where the files are e.g. /FreeImage/Dist''')
opt.add_option('--fip', action='store_true', default=False, dest='fip',
help='link with FreeImagePlus')
opt.add_option('--fi-static', action='store_true',
default=False, dest='fi_static',
help="link as shared libraries")


@conf
def check_freeimage(self, path=None, fip=False):
self.start_msg('Checking FreeImage')
if not self.env['CXX']:
self.fatal('you must load compiler_cxx before loading freeimage')
prefix = self.options.fi_static and 'ST' or ''
platform = Utils.unversioned_sys_platform()
if platform == 'win32':
if not path:
self.fatal('you must specify the path to FreeImage. \
use --fi-path=/FreeImage/Dist')
else:
self.env['INCLUDES_FREEIMAGE'] = path
self.env['%sLIBPATH_FREEIMAGE' % prefix] = path
libs = ['FreeImage']
if self.options.fip:
libs.append('FreeImagePlus')
if platform == 'win32':
self.env['%sLIB_FREEIMAGE' % prefix] = libs
else:
self.env['%sLIB_FREEIMAGE' % prefix] = [i.lower() for i in libs]
self.end_msg('ok')


def configure(conf):
platform = Utils.unversioned_sys_platform()
if platform == 'win32' and not conf.options.fi_path:
return
conf.check_freeimage(conf.options.fi_path, conf.options.fip)


+ 0
- 31
waflib/extras/fsb.py View File

@@ -1,31 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2011 (ita)

"""
Fully sequential builds

The previous tasks from task generators are re-processed, and this may lead to speed issues
Yet, if you are using this, speed is probably a minor concern
"""

from waflib import Build

def options(opt):
pass

def configure(conf):
pass

class FSBContext(Build.BuildContext):
def __call__(self, *k, **kw):
ret = Build.BuildContext.__call__(self, *k, **kw)

# evaluate the results immediately
Build.BuildContext.compile(self)

return ret

def compile(self):
pass


+ 0
- 65
waflib/extras/fsc.py View File

@@ -1,65 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2011 (ita)

"""
Experimental F# stuff

FSC="mono /path/to/fsc.exe" waf configure build
"""

from waflib import Utils, Task, Options, Logs, Errors
from waflib.TaskGen import before_method, after_method, feature
from waflib.Tools import ccroot, cs
from waflib.Configure import conf

ccroot.USELIB_VARS['fsc'] = set(['CSFLAGS', 'ASSEMBLIES', 'RESOURCES'])

@feature('fs')
@before_method('process_source')
def apply_fsc(self):
cs_nodes = []
no_nodes = []
for x in self.to_nodes(self.source):
if x.name.endswith('.fs'):
cs_nodes.append(x)
else:
no_nodes.append(x)
self.source = no_nodes

bintype = getattr(self, 'type', self.gen.endswith('.dll') and 'library' or 'exe')
self.cs_task = tsk = self.create_task('fsc', cs_nodes, self.path.find_or_declare(self.gen))
tsk.env.CSTYPE = '/target:%s' % bintype
tsk.env.OUT = '/out:%s' % tsk.outputs[0].abspath()

inst_to = getattr(self, 'install_path', bintype=='exe' and '${BINDIR}' or '${LIBDIR}')
if inst_to:
# note: we are making a copy, so the files added to cs_task.outputs won't be installed automatically
mod = getattr(self, 'chmod', bintype=='exe' and Utils.O755 or Utils.O644)
self.install_task = self.bld.install_files(inst_to, self.cs_task.outputs[:], env=self.env, chmod=mod)

feature('fs')(cs.use_cs)
after_method('apply_fsc')(cs.use_cs)

feature('fs')(cs.debug_cs)
after_method('apply_fsc', 'use_cs')(cs.debug_cs)

class fsc(Task.Task):
"""
Compile F# files
"""
color = 'YELLOW'
run_str = '${FSC} ${CSTYPE} ${CSFLAGS} ${ASS_ST:ASSEMBLIES} ${RES_ST:RESOURCES} ${OUT} ${SRC}'

def configure(conf):
"""
Find a F# compiler, set the variable FSC for the compiler and FS_NAME (mono or fsc)
"""
conf.find_program(['fsc.exe', 'fsharpc'], var='FSC')
conf.env.ASS_ST = '/r:%s'
conf.env.RES_ST = '/resource:%s'

conf.env.FS_NAME = 'fsc'
if str(conf.env.FSC).lower().find('fsharpc') > -1:
conf.env.FS_NAME = 'mono'


+ 0
- 206
waflib/extras/gccdeps.py View File

@@ -1,206 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2008-2010 (ita)

"""
Execute the tasks with gcc -MD, read the dependencies from the .d file
and prepare the dependency calculation for the next run.

Usage:
def configure(conf):
conf.load('gccdeps')
"""

import os, re, threading
from waflib import Task, Logs, Utils, Errors
from waflib.Tools import c_preproc
from waflib.TaskGen import before_method, feature

lock = threading.Lock()

gccdeps_flags = ['-MD']
if not c_preproc.go_absolute:
gccdeps_flags = ['-MMD']

# Third-party tools are allowed to add extra names in here with append()
supported_compilers = ['gcc', 'icc', 'clang']

def scan(self):
if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS:
if not self.env.GCCDEPS:
self.generator.bld.fatal('Load gccdeps in configure!')
return self.no_gccdeps_scan()
nodes = self.generator.bld.node_deps.get(self.uid(), [])
names = []
return (nodes, names)

re_o = re.compile("\.o$")
re_splitter = re.compile(r'(?<!\\)\s+') # split by space, except when spaces are escaped

def remove_makefile_rule_lhs(line):
# Splitting on a plain colon would accidentally match inside a
# Windows absolute-path filename, so we must search for a colon
# followed by whitespace to find the divider between LHS and RHS
# of the Makefile rule.
rulesep = ': '

sep_idx = line.find(rulesep)
if sep_idx >= 0:
return line[sep_idx + 2:]
else:
return line

def path_to_node(base_node, path, cached_nodes):
# Take the base node and the path and return a node
# Results are cached because searching the node tree is expensive
# The following code is executed by threads, it is not safe, so a lock is needed...
if getattr(path, '__hash__'):
node_lookup_key = (base_node, path)
else:
# Not hashable, assume it is a list and join into a string
node_lookup_key = (base_node, os.path.sep.join(path))
try:
lock.acquire()
node = cached_nodes[node_lookup_key]
except KeyError:
node = base_node.find_resource(path)
cached_nodes[node_lookup_key] = node
finally:
lock.release()
return node

def post_run(self):
# The following code is executed by threads, it is not safe, so a lock is needed...

if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS:
return self.no_gccdeps_post_run()

if getattr(self, 'cached', None):
return Task.Task.post_run(self)

name = self.outputs[0].abspath()
name = re_o.sub('.d', name)
txt = Utils.readf(name)
#os.remove(name)

# Compilers have the choice to either output the file's dependencies
# as one large Makefile rule:
#
# /path/to/file.o: /path/to/dep1.h \
# /path/to/dep2.h \
# /path/to/dep3.h \
# ...
#
# or as many individual rules:
#
# /path/to/file.o: /path/to/dep1.h
# /path/to/file.o: /path/to/dep2.h
# /path/to/file.o: /path/to/dep3.h
# ...
#
# So the first step is to sanitize the input by stripping out the left-
# hand side of all these lines. After that, whatever remains are the
# implicit dependencies of task.outputs[0]
txt = '\n'.join([remove_makefile_rule_lhs(line) for line in txt.splitlines()])

# Now join all the lines together
txt = txt.replace('\\\n', '')

val = txt.strip()
lst = val.split(':')
val = [x.replace('\\ ', ' ') for x in re_splitter.split(val) if x]

nodes = []
bld = self.generator.bld

# Dynamically bind to the cache
try:
cached_nodes = bld.cached_nodes
except AttributeError:
cached_nodes = bld.cached_nodes = {}

for x in val:

node = None
if os.path.isabs(x):
node = path_to_node(bld.root, x, cached_nodes)
else:
path = bld.bldnode
# when calling find_resource, make sure the path does not begin by '..'
x = [k for k in Utils.split_path(x) if k and k != '.']
while lst and x[0] == '..':
x = x[1:]
path = path.parent
node = path_to_node(path, x, cached_nodes)

if not node:
raise ValueError('could not find %r for %r' % (x, self))
if id(node) == id(self.inputs[0]):
# ignore the source file, it is already in the dependencies
# this way, successful config tests may be retrieved from the cache
continue
nodes.append(node)

Logs.debug('deps: gccdeps for %s returned %s' % (str(self), str(nodes)))

bld.node_deps[self.uid()] = nodes
bld.raw_deps[self.uid()] = []

try:
del self.cache_sig
except:
pass

Task.Task.post_run(self)

def sig_implicit_deps(self):
if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS:
return self.no_gccdeps_sig_implicit_deps()
try:
return Task.Task.sig_implicit_deps(self)
except Errors.WafError:
return Utils.SIG_NIL

for name in 'c cxx'.split():
try:
cls = Task.classes[name]
except KeyError:
pass
else:
cls.no_gccdeps_scan = cls.scan
cls.no_gccdeps_post_run = cls.post_run
cls.no_gccdeps_sig_implicit_deps = cls.sig_implicit_deps

cls.scan = scan
cls.post_run = post_run
cls.sig_implicit_deps = sig_implicit_deps

@before_method('process_source')
@feature('force_gccdeps')
def force_gccdeps(self):
self.env.ENABLE_GCCDEPS = ['c', 'cxx']

def configure(conf):
# record that the configuration was executed properly
conf.env.GCCDEPS = True

global gccdeps_flags
flags = conf.env.GCCDEPS_FLAGS or gccdeps_flags
if conf.env.CC_NAME in supported_compilers:
try:
conf.check(fragment='int main() { return 0; }', features='c force_gccdeps', cflags=flags, msg='Checking for c flags %r' % ''.join(flags))
except Errors.ConfigurationError:
pass
else:
conf.env.append_value('CFLAGS', gccdeps_flags)
conf.env.append_unique('ENABLE_GCCDEPS', 'c')

if conf.env.CXX_NAME in supported_compilers:
try:
conf.check(fragment='int main() { return 0; }', features='cxx force_gccdeps', cxxflags=flags, msg='Checking for cxx flags %r' % ''.join(flags))
except Errors.ConfigurationError:
pass
else:
conf.env.append_value('CXXFLAGS', gccdeps_flags)
conf.env.append_unique('ENABLE_GCCDEPS', 'cxx')


+ 0
- 257
waflib/extras/go.py View File

@@ -1,257 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Tom Wambold tom5760 gmail.com 2009
# Thomas Nagy 2010

"""
Go as a language may look nice, but its toolchain is one of the worse a developer
has ever seen. It keeps changing though, and I would like to believe that it will get
better eventually, but the crude reality is that this tool and the examples are
getting broken every few months.

If you have been lured into trying to use Go, you should stick to their Makefiles.
"""

import os, platform

from waflib import Utils, Task, TaskGen
from waflib.TaskGen import feature, extension, after_method, before_method
from waflib.Tools.ccroot import link_task, stlink_task, propagate_uselib_vars, process_use

class go(Task.Task):
run_str = '${GOC} ${GOCFLAGS} ${CPPPATH_ST:INCPATHS} -o ${TGT} ${SRC}'

class gopackage(stlink_task):
run_str = '${GOP} grc ${TGT} ${SRC}'

class goprogram(link_task):
run_str = '${GOL} ${GOLFLAGS} -o ${TGT} ${SRC}'
inst_to = '${BINDIR}'
chmod = Utils.O755

class cgopackage(stlink_task):
color = 'YELLOW'
inst_to = '${LIBDIR}'
ext_in = ['.go']
ext_out = ['.a']

def run(self):
src_dir = self.generator.bld.path
source = self.inputs
target = self.outputs[0].change_ext('')

#print ("--> %s" % self.outputs)
#print ('++> %s' % self.outputs[1])
bld_dir = self.outputs[1]
bld_dir.mkdir()
obj_dir = bld_dir.make_node('_obj')
obj_dir.mkdir()

bld_srcs = []
for s in source:
# FIXME: it seems gomake/cgo stumbles on filenames like a/b/c.go
# -> for the time being replace '/' with '_'...
#b = bld_dir.make_node(s.path_from(src_dir))
b = bld_dir.make_node(s.path_from(src_dir).replace(os.sep,'_'))
b.parent.mkdir()
#print ('++> %s' % (s.path_from(src_dir),))
try:
try:os.remove(b.abspath())
except Exception:pass
os.symlink(s.abspath(), b.abspath())
except Exception:
# if no support for symlinks, copy the file from src
b.write(s.read())
bld_srcs.append(b)
#print("--|> [%s]" % b.abspath())
b.sig = Utils.h_file(b.abspath())
pass
#self.set_inputs(bld_srcs)
#self.generator.bld.raw_deps[self.uid()] = [self.signature()] + bld_srcs
makefile_node = bld_dir.make_node("Makefile")
makefile_tmpl = '''\
# Copyright 2009 The Go Authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file. ---

include $(GOROOT)/src/Make.inc

TARG=%(target)s

GCIMPORTS= %(gcimports)s

CGOFILES=\\
\t%(source)s

CGO_CFLAGS= %(cgo_cflags)s

CGO_LDFLAGS= %(cgo_ldflags)s

include $(GOROOT)/src/Make.pkg

%%: install %%.go
$(GC) $*.go
$(LD) -o $@ $*.$O

''' % {
'gcimports': ' '.join(l for l in self.env['GOCFLAGS']),
'cgo_cflags' : ' '.join(l for l in self.env['GOCFLAGS']),
'cgo_ldflags': ' '.join(l for l in self.env['GOLFLAGS']),
'target': target.path_from(obj_dir),
'source': ' '.join([b.path_from(bld_dir) for b in bld_srcs])
}
makefile_node.write(makefile_tmpl)
#print ("::makefile: %s"%makefile_node.abspath())
cmd = Utils.subst_vars('gomake ${GOMAKE_FLAGS}', self.env).strip()
o = self.outputs[0].change_ext('.gomake.log')
fout_node = bld_dir.find_or_declare(o.name)
fout = open(fout_node.abspath(), 'w')
rc = self.generator.bld.exec_command(
cmd,
stdout=fout,
stderr=fout,
cwd=bld_dir.abspath(),
)
if rc != 0:
import waflib.Logs as msg
msg.error('** error running [%s] (cgo-%s)' % (cmd, target))
msg.error(fout_node.read())
return rc
self.generator.bld.read_stlib(
target,
paths=[obj_dir.abspath(),],
)
tgt = self.outputs[0]
if tgt.parent != obj_dir:
install_dir = os.path.join('${LIBDIR}',
tgt.parent.path_from(obj_dir))
else:
install_dir = '${LIBDIR}'
#print('===> %s (%s)' % (tgt.abspath(), install_dir))
self.generator.bld.install_files(
install_dir,
tgt.abspath(),
relative_trick=False,
postpone=False,
)
return rc

@extension('.go')
def compile_go(self, node):
#print('*'*80, self.name)
if not ('cgopackage' in self.features):
return self.create_compiled_task('go', node)
#print ('compile_go-cgo...')
bld_dir = node.parent.get_bld()
obj_dir = bld_dir.make_node('_obj')
target = obj_dir.make_node(node.change_ext('.a').name)
return self.create_task('cgopackage', node, node.change_ext('.a'))

@feature('gopackage', 'goprogram', 'cgopackage')
@before_method('process_source')
def go_compiler_is_foobar(self):
if self.env.GONAME == 'gcc':
return
self.source = self.to_nodes(self.source)
src = []
go = []
for node in self.source:
if node.name.endswith('.go'):
go.append(node)
else:
src.append(node)
self.source = src
if not ('cgopackage' in self.features):
#print('--> [%s]... (%s)' % (go[0], getattr(self, 'target', 'N/A')))
tsk = self.create_compiled_task('go', go[0])
tsk.inputs.extend(go[1:])
else:
#print ('+++ [%s] +++' % self.target)
bld_dir = self.path.get_bld().make_node('cgopackage--%s' % self.target.replace(os.sep,'_'))
obj_dir = bld_dir.make_node('_obj')
target = obj_dir.make_node(self.target+'.a')
tsk = self.create_task('cgopackage', go, [target, bld_dir])
self.link_task = tsk

@feature('gopackage', 'goprogram', 'cgopackage')
@after_method('process_source', 'apply_incpaths',)
def go_local_libs(self):
names = self.to_list(getattr(self, 'use', []))
#print ('== go-local-libs == [%s] == use: %s' % (self.name, names))
for name in names:
tg = self.bld.get_tgen_by_name(name)
if not tg:
raise Utils.WafError('no target of name %r necessary for %r in go uselib local' % (name, self))
tg.post()
#print ("-- tg[%s]: %s" % (self.name,name))
lnk_task = getattr(tg, 'link_task', None)
if lnk_task:
for tsk in self.tasks:
if isinstance(tsk, (go, gopackage, cgopackage)):
tsk.set_run_after(lnk_task)
tsk.dep_nodes.extend(lnk_task.outputs)
path = lnk_task.outputs[0].parent.abspath()
if isinstance(lnk_task, (go, gopackage)):
# handle hierarchical packages
path = lnk_task.generator.path.get_bld().abspath()
elif isinstance(lnk_task, (cgopackage,)):
# handle hierarchical cgopackages
cgo_obj_dir = lnk_task.outputs[1].find_or_declare('_obj')
path = cgo_obj_dir.abspath()
# recursively add parent GOCFLAGS...
self.env.append_unique('GOCFLAGS',
getattr(lnk_task.env, 'GOCFLAGS',[]))
# ditto for GOLFLAGS...
self.env.append_unique('GOLFLAGS',
getattr(lnk_task.env, 'GOLFLAGS',[]))
self.env.append_unique('GOCFLAGS', ['-I%s' % path])
self.env.append_unique('GOLFLAGS', ['-L%s' % path])
for n in getattr(tg, 'includes_nodes', []):
self.env.append_unique('GOCFLAGS', ['-I%s' % n.abspath()])
pass
pass

def configure(conf):

def set_def(var, val):
if not conf.env[var]:
conf.env[var] = val

goarch = os.getenv('GOARCH')
if goarch == '386':
set_def('GO_PLATFORM', 'i386')
elif goarch == 'amd64':
set_def('GO_PLATFORM', 'x86_64')
elif goarch == 'arm':
set_def('GO_PLATFORM', 'arm')
else:
set_def('GO_PLATFORM', platform.machine())

if conf.env.GO_PLATFORM == 'x86_64':
set_def('GO_COMPILER', '6g')
set_def('GO_LINKER', '6l')
elif conf.env.GO_PLATFORM in ('i386', 'i486', 'i586', 'i686'):
set_def('GO_COMPILER', '8g')
set_def('GO_LINKER', '8l')
elif conf.env.GO_PLATFORM == 'arm':
set_def('GO_COMPILER', '5g')
set_def('GO_LINKER', '5l')
set_def('GO_EXTENSION', '.5')

if not (conf.env.GO_COMPILER or conf.env.GO_LINKER):
raise conf.fatal('Unsupported platform ' + platform.machine())

set_def('GO_PACK', 'gopack')
set_def('gopackage_PATTERN', '%s.a')
set_def('CPPPATH_ST', '-I%s')

set_def('GOMAKE_FLAGS', ['--quiet'])
conf.find_program(conf.env.GO_COMPILER, var='GOC')
conf.find_program(conf.env.GO_LINKER, var='GOL')
conf.find_program(conf.env.GO_PACK, var='GOP')

conf.find_program('cgo', var='CGO')

TaskGen.feature('go')(process_use)
TaskGen.feature('go')(propagate_uselib_vars)


+ 0
- 17
waflib/extras/gob2.py View File

@@ -1,17 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Ali Sabil, 2007

from waflib import TaskGen

TaskGen.declare_chain(
name = 'gob2',
rule = '${GOB2} -o ${TGT[0].bld_dir()} ${GOB2FLAGS} ${SRC}',
ext_in = '.gob',
ext_out = '.c'
)

def configure(conf):
conf.find_program('gob2', var='GOB2')
conf.env['GOB2FLAGS'] = ''


+ 0
- 150
waflib/extras/halide.py View File

@@ -1,150 +0,0 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Halide code generation tool

__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
__copyright__ = "Jérôme Carretero, 2014"

"""

Tool to run `Halide <http://halide-lang.org>`_ code generators.

Usage::

bld(
name='pipeline',
# ^ Reference this in use="..." for things using the generated code
#target=['pipeline.o', 'pipeline.h']
# ^ by default, name.{o,h} is added, but you can set the outputs here
features='halide',
halide_env="HL_TRACE=1 HL_TARGET=host-opencl-gpu_debug",
# ^ Environment passed to the generator,
# can be a dict, k/v list, or string.
args=[],
# ^ Command-line arguments to the generator (optional),
# eg. to give parameters to the scheduling
source='pipeline_gen',
# ^ Name of the source executable
)


Known issues:


- Currently only supports Linux (no ".exe")

- Doesn't rerun on input modification when input is part of a build
chain, and has been modified externally.

"""

import os
from waflib import Task, Utils, Options, TaskGen, Errors

class run_halide_gen(Task.Task):
color = 'CYAN'
vars = ['HALIDE_ENV', 'HALIDE_ARGS']
run_str = "${SRC[0].abspath()} ${HALIDE_ARGS}"
def __str__(self):
stuff = "halide"
stuff += ("[%s]" % (",".join(
('%s=%s' % (k,v)) for k, v in sorted(self.env.env.items()))))
return Task.Task.__str__(self).replace(self.__class__.__name__,
stuff)

@TaskGen.feature('halide')
@TaskGen.before_method('process_source')
def halide(self):
Utils.def_attrs(self,
args=[],
halide_env={},
)

bld = self.bld

env = self.halide_env
try:
if isinstance(env, str):
env = dict(x.split('=') for x in env.split())
elif isinstance(env, list):
env = dict(x.split('=') for x in env)
assert isinstance(env, dict)
except Exception as e:
if not isinstance(e, ValueError) \
and not isinstance(e, AssertionError):
raise
raise Errors.WafError(
"halide_env must be under the form" \
" {'HL_x':'a', 'HL_y':'b'}" \
" or ['HL_x=y', 'HL_y=b']" \
" or 'HL_x=y HL_y=b'")

src = self.to_nodes(self.source)
assert len(src) == 1, "Only one source expected"
src = src[0]

args = Utils.to_list(self.args)

def change_ext(src, ext):
# Return a node with a new extension, in an appropriate folder
name = src.name
xpos = src.name.rfind('.')
if xpos == -1: xpos = len(src.name)
newname = name[:xpos] + ext
if src.is_child_of(bld.bldnode):
node = src.get_src().parent.find_or_declare(newname)
else:
node = bld.bldnode.find_or_declare(newname)
return node

def to_nodes(self, lst, path=None):
tmp = []
path = path or self.path
find = path.find_or_declare

if isinstance(lst, self.path.__class__):
lst = [lst]

for x in Utils.to_list(lst):
if isinstance(x, str):
node = find(x)
else:
node = x
tmp.append(node)
return tmp

tgt = to_nodes(self, self.target)
if not tgt:
tgt = [change_ext(src, '.o'), change_ext(src, '.h')]
cwd = tgt[0].parent.abspath()
task = self.create_task('run_halide_gen', src, tgt, cwd=cwd)
task.env.append_unique('HALIDE_ARGS', args)
if task.env.env == []:
task.env.env = {}
task.env.env.update(env)
task.env.HALIDE_ENV = " ".join(("%s=%s" % (k,v)) for (k,v) in sorted(env.items()))
task.env.HALIDE_ARGS = args

try:
self.compiled_tasks.append(task)
except AttributeError:
self.compiled_tasks = [task]
self.source = []

def configure(conf):
if Options.options.halide_root is None:
conf.check_cfg(package='Halide', args='--cflags --libs')
else:
halide_root = Options.options.halide_root
conf.env.INCLUDES_HALIDE = [ os.path.join(halide_root, "include") ]
conf.env.LIBPATH_HALIDE = [ os.path.join(halide_root, "lib") ]
conf.env.LIB_HALIDE = ["Halide"]

# You might want to add this, while upstream doesn't fix it
#conf.env.LIB_HALIDE += ['ncurses', 'dl', 'pthread']

def options(opt):
opt.add_option('--halide-root',
help="path to Halide include and lib files",
)


+ 0
- 19
waflib/extras/local_rpath.py View File

@@ -1,19 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2011 (ita)

from waflib.TaskGen import after_method, feature

@after_method('propagate_uselib_vars')
@feature('cprogram', 'cshlib', 'cxxprogram', 'cxxshlib', 'fcprogram', 'fcshlib')
def add_rpath_stuff(self):
all = self.to_list(getattr(self, 'use', []))
while all:
name = all.pop()
try:
tg = self.bld.get_tgen_by_name(name)
except:
continue
self.env.append_value('RPATH', tg.link_task.outputs[0].parent.abspath())
all.extend(self.to_list(getattr(tg, 'use', [])))


+ 0
- 142
waflib/extras/make.py View File

@@ -1,142 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2011 (ita)

"""
A make-like way of executing the build, following the relationships between inputs/outputs

This algorithm will lead to slower builds, will not be as flexible as "waf build", but
it might be useful for building data files (?)

It is likely to break in the following cases:
- files are created dynamically (no inputs or outputs)
- headers
- building two files from different groups
"""

import re
from waflib import Options, Task, Logs
from waflib.Build import BuildContext

class MakeContext(BuildContext):
'''executes tasks in a step-by-step manner, following dependencies between inputs/outputs'''
cmd = 'make'
fun = 'build'

def __init__(self, **kw):
super(MakeContext, self).__init__(**kw)
self.files = Options.options.files

def get_build_iterator(self):
if not self.files:
while 1:
yield super(MakeContext, self).get_build_iterator()

for g in self.groups:
for tg in g:
try:
f = tg.post
except AttributeError:
pass
else:
f()

provides = {}
uses = {}
all_tasks = []
tasks = []
for pat in self.files.split(','):
matcher = self.get_matcher(pat)
for tg in g:
if isinstance(tg, Task.TaskBase):
lst = [tg]
else:
lst = tg.tasks
for tsk in lst:
all_tasks.append(tsk)

do_exec = False
for node in getattr(tsk, 'inputs', []):
try:
uses[node].append(tsk)
except:
uses[node] = [tsk]

if matcher(node, output=False):
do_exec = True
break

for node in getattr(tsk, 'outputs', []):
try:
provides[node].append(tsk)
except:
provides[node] = [tsk]

if matcher(node, output=True):
do_exec = True
break
if do_exec:
tasks.append(tsk)

# so we have the tasks that we need to process, the list of all tasks,
# the map of the tasks providing nodes, and the map of tasks using nodes

if not tasks:
# if there are no tasks matching, return everything in the current group
result = all_tasks
else:
# this is like a big filter...
result = set([])
seen = set([])
cur = set(tasks)
while cur:
result |= cur
tosee = set([])
for tsk in cur:
for node in getattr(tsk, 'inputs', []):
if node in seen:
continue
seen.add(node)
tosee |= set(provides.get(node, []))
cur = tosee
result = list(result)

Task.set_file_constraints(result)
Task.set_precedence_constraints(result)
yield result

while 1:
yield []

def get_matcher(self, pat):
# this returns a function
inn = True
out = True
if pat.startswith('in:'):
out = False
pat = pat.replace('in:', '')
elif pat.startswith('out:'):
inn = False
pat = pat.replace('out:', '')

anode = self.root.find_node(pat)
pattern = None
if not anode:
if not pat.startswith('^'):
pat = '^.+?%s' % pat
if not pat.endswith('$'):
pat = '%s$' % pat
pattern = re.compile(pat)

def match(node, output):
if output == True and not out:
return False
if output == False and not inn:
return False

if anode:
return anode == node
else:
return pattern.match(node.abspath())
return match


+ 0
- 70
waflib/extras/md5_tstamp.py View File

@@ -1,70 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8

"""
This module assumes that only one build context is running at a given time, which
is not the case if you want to execute configuration tests in parallel.

Store some values on the buildcontext mapping file paths to
stat values and md5 values (timestamp + md5)
this way the md5 hashes are computed only when timestamp change (can be faster)
There is usually little or no gain from enabling this, but it can be used to enable
the second level cache with timestamps (WAFCACHE)

You may have to run distclean or to remove the build directory before enabling/disabling
this hashing scheme
"""

import os, stat
try: import cPickle
except: import pickle as cPickle
from waflib import Utils, Build, Context

STRONGEST = True

try:
Build.BuildContext.store_real
except AttributeError:

Context.DBFILE += '_md5tstamp'

Build.hashes_md5_tstamp = {}
Build.SAVED_ATTRS.append('hashes_md5_tstamp')
def store(self):
# save the hash cache as part of the default pickle file
self.hashes_md5_tstamp = Build.hashes_md5_tstamp
self.store_real()
Build.BuildContext.store_real = Build.BuildContext.store
Build.BuildContext.store = store

def restore(self):
# we need a module variable for h_file below
self.restore_real()
try:
Build.hashes_md5_tstamp = self.hashes_md5_tstamp or {}
except Exception as e:
Build.hashes_md5_tstamp = {}
Build.BuildContext.restore_real = Build.BuildContext.restore
Build.BuildContext.restore = restore

def h_file(filename):
st = os.stat(filename)
if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('not a file')

if filename in Build.hashes_md5_tstamp:
if Build.hashes_md5_tstamp[filename][0] == str(st.st_mtime):
return Build.hashes_md5_tstamp[filename][1]
if STRONGEST:
ret = Utils.h_file_no_md5(filename)
Build.hashes_md5_tstamp[filename] = (str(st.st_mtime), ret)
return ret
else:
m = Utils.md5()
m.update(str(st.st_mtime))
m.update(str(st.st_size))
m.update(filename)
Build.hashes_md5_tstamp[filename] = (str(st.st_mtime), m.digest())
return m.digest()
Utils.h_file_no_md5 = Utils.h_file
Utils.h_file = h_file


+ 0
- 111
waflib/extras/mem_reducer.py View File

@@ -1,111 +0,0 @@
#! /usr/bin/env python
# encoding: UTF-8

"""
This tool can help to reduce the memory usage in very large builds featuring many tasks with after/before attributes.
It may also improve the overall build time by decreasing the amount of iterations over tasks.

Usage:
def options(opt):
opt.load('mem_reducer')
"""

import itertools
from waflib import Utils, Task, Runner

class SetOfTasks(object):
"""Wraps a set and a task which has a list of other sets.
The interface is meant to mimic the interface of set. Add missing functions as needed.
"""
def __init__(self, owner):
self._set = owner.run_after
self._owner = owner

def __iter__(self):
for g in self._owner.run_after_groups:
#print len(g)
for task in g:
yield task
for task in self._set:
yield task

def add(self, obj):
self._set.add(obj)

def update(self, obj):
self._set.update(obj)

def set_precedence_constraints(tasks):
cstr_groups = Utils.defaultdict(list)
for x in tasks:
x.run_after = SetOfTasks(x)
x.run_after_groups = []
x.waiting_sets = []

h = x.hash_constraints()
cstr_groups[h].append(x)

# create sets which can be reused for all tasks
for k in cstr_groups.keys():
cstr_groups[k] = set(cstr_groups[k])

# this list should be short
for key1, key2 in itertools.combinations(cstr_groups.keys(), 2):
group1 = cstr_groups[key1]
group2 = cstr_groups[key2]
# get the first entry of the set
t1 = next(iter(group1))
t2 = next(iter(group2))

# add the constraints based on the comparisons
if Task.is_before(t1, t2):
for x in group2:
x.run_after_groups.append(group1)
for k in group1:
k.waiting_sets.append(group1)
elif Task.is_before(t2, t1):
for x in group1:
x.run_after_groups.append(group2)
for k in group2:
k.waiting_sets.append(group2)

Task.set_precedence_constraints = set_precedence_constraints

def get_out(self):
tsk = self.out.get()
if not self.stop:
self.add_more_tasks(tsk)
self.count -= 1
self.dirty = True

# shrinking sets
try:
ws = tsk.waiting_sets
except AttributeError:
pass
else:
for k in ws:
try:
k.remove(tsk)
except KeyError:
pass

return tsk
Runner.Parallel.get_out = get_out

def skip(self, tsk):
tsk.hasrun = Task.SKIPPED

# shrinking sets
try:
ws = tsk.waiting_sets
except AttributeError:
pass
else:
for k in ws:
try:
k.remove(tsk)
except KeyError:
pass
Runner.Parallel.skip = skip


+ 0
- 85
waflib/extras/midl.py View File

@@ -1,85 +0,0 @@
#!/usr/bin/env python
# Issue 1185 ultrix gmail com

"""
Microsoft Interface Definition Language support. Given ComObject.idl, this tool
will generate ComObject.tlb ComObject_i.h ComObject_i.c ComObject_p.c and dlldata.c

To declare targets using midl::

def configure(conf):
conf.load('msvc')
conf.load('midl')

def build(bld):
bld(
features='c cshlib',
# Note: ComObject_i.c is generated from ComObject.idl
source = 'main.c ComObject.idl ComObject_i.c',
target = 'ComObject.dll')
"""

from waflib import Task, Utils
from waflib.TaskGen import feature, before_method
import os

def configure(conf):
conf.find_program(['midl'], var='MIDL')

conf.env.MIDLFLAGS = [
'/nologo',
'/D',
'_DEBUG',
'/W1',
'/char',
'signed',
'/Oicf',
]

@feature('c', 'cxx')
@before_method('process_source')
def idl_file(self):
# Do this before process_source so that the generated header can be resolved
# when scanning source dependencies.
idl_nodes = []
src_nodes = []
for node in Utils.to_list(self.source):
if str(node).endswith('.idl'):
idl_nodes.append(node)
else:
src_nodes.append(node)

for node in self.to_nodes(idl_nodes):
t = node.change_ext('.tlb')
h = node.change_ext('_i.h')
c = node.change_ext('_i.c')
p = node.change_ext('_p.c')
d = node.parent.find_or_declare('dlldata.c')
tsk = self.create_task('midl', node, [t, h, c, p, d])

self.source = src_nodes

class midl(Task.Task):
"""
Compile idl files
"""
color = 'YELLOW'
run_str = '${MIDL} ${MIDLFLAGS} ${CPPPATH_ST:INCLUDES} /tlb ${TGT[0].bldpath()} /header ${TGT[1].bldpath()} /iid ${TGT[2].bldpath()} /proxy ${TGT[3].bldpath()} /dlldata ${TGT[4].bldpath()} ${SRC}'
before = ['winrc']

def exec_command(self, *k, **kw):
if self.env['PATH']:
env = self.env.env or dict(os.environ)
env.update(PATH = ';'.join(self.env['PATH']))
kw['env'] = env
bld = self.generator.bld
try:
if not kw.get('cwd', None):
kw['cwd'] = bld.cwd
except AttributeError:
bld.cwd = kw['cwd'] = bld.variant_dir
return bld.exec_command(k[0], **kw)


+ 0
- 413
waflib/extras/misc.py View File

@@ -1,413 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)

"""
This tool is totally deprecated

Try using:
.pc.in files for .pc files
the feature intltool_in - see demos/intltool
make-like rules
"""

import shutil, re, os
from waflib import TaskGen, Node, Task, Utils, Build, Errors
from waflib.TaskGen import feature, after_method, before_method
from waflib.Logs import debug

def copy_attrs(orig, dest, names, only_if_set=False):
"""
copy class attributes from an object to another
"""
for a in Utils.to_list(names):
u = getattr(orig, a, ())
if u or not only_if_set:
setattr(dest, a, u)

def copy_func(tsk):
"Make a file copy. This might be used to make other kinds of file processing (even calling a compiler is possible)"
env = tsk.env
infile = tsk.inputs[0].abspath()
outfile = tsk.outputs[0].abspath()
try:
shutil.copy2(infile, outfile)
except EnvironmentError:
return 1
else:
if tsk.chmod: os.chmod(outfile, tsk.chmod)
return 0

def action_process_file_func(tsk):
"Ask the function attached to the task to process it"
if not tsk.fun: raise Errors.WafError('task must have a function attached to it for copy_func to work!')
return tsk.fun(tsk)

@feature('cmd')
def apply_cmd(self):
"call a command everytime"
if not self.fun: raise Errors.WafError('cmdobj needs a function!')
tsk = Task.TaskBase()
tsk.fun = self.fun
tsk.env = self.env
self.tasks.append(tsk)
tsk.install_path = self.install_path

@feature('copy')
@before_method('process_source')
def apply_copy(self):
Utils.def_attrs(self, fun=copy_func)
self.default_install_path = 0

lst = self.to_list(self.source)
self.meths.remove('process_source')

for filename in lst:
node = self.path.find_resource(filename)
if not node: raise Errors.WafError('cannot find input file %s for processing' % filename)

target = self.target
if not target or len(lst)>1: target = node.name

# TODO the file path may be incorrect
newnode = self.path.find_or_declare(target)

tsk = self.create_task('copy', node, newnode)
tsk.fun = self.fun
tsk.chmod = getattr(self, 'chmod', Utils.O644)

if not tsk.env:
tsk.debug()
raise Errors.WafError('task without an environment')

def subst_func(tsk):
"Substitutes variables in a .in file"

m4_re = re.compile('@(\w+)@', re.M)

code = tsk.inputs[0].read() #Utils.readf(infile)

# replace all % by %% to prevent errors by % signs in the input file while string formatting
code = code.replace('%', '%%')

s = m4_re.sub(r'%(\1)s', code)

env = tsk.env
di = getattr(tsk, 'dict', {}) or getattr(tsk.generator, 'dict', {})
if not di:
names = m4_re.findall(code)
for i in names:
di[i] = env.get_flat(i) or env.get_flat(i.upper())

tsk.outputs[0].write(s % di)

@feature('subst')
@before_method('process_source')
def apply_subst(self):
Utils.def_attrs(self, fun=subst_func)
lst = self.to_list(self.source)
self.meths.remove('process_source')

self.dict = getattr(self, 'dict', {})

for filename in lst:
node = self.path.find_resource(filename)
if not node: raise Errors.WafError('cannot find input file %s for processing' % filename)

if self.target:
newnode = self.path.find_or_declare(self.target)
else:
newnode = node.change_ext('')

try:
self.dict = self.dict.get_merged_dict()
except AttributeError:
pass

if self.dict and not self.env['DICT_HASH']:
self.env = self.env.derive()
keys = list(self.dict.keys())
keys.sort()
lst = [self.dict[x] for x in keys]
self.env['DICT_HASH'] = str(Utils.h_list(lst))

tsk = self.create_task('copy', node, newnode)
tsk.fun = self.fun
tsk.dict = self.dict
tsk.dep_vars = ['DICT_HASH']
tsk.chmod = getattr(self, 'chmod', Utils.O644)

if not tsk.env:
tsk.debug()
raise Errors.WafError('task without an environment')

####################
## command-output ####
####################

class cmd_arg(object):
"""command-output arguments for representing files or folders"""
def __init__(self, name, template='%s'):
self.name = name
self.template = template
self.node = None

class input_file(cmd_arg):
def find_node(self, base_path):
assert isinstance(base_path, Node.Node)
self.node = base_path.find_resource(self.name)
if self.node is None:
raise Errors.WafError("Input file %s not found in " % (self.name, base_path))

def get_path(self, env, absolute):
if absolute:
return self.template % self.node.abspath()
else:
return self.template % self.node.srcpath()

class output_file(cmd_arg):
def find_node(self, base_path):
assert isinstance(base_path, Node.Node)
self.node = base_path.find_or_declare(self.name)
if self.node is None:
raise Errors.WafError("Output file %s not found in " % (self.name, base_path))

def get_path(self, env, absolute):
if absolute:
return self.template % self.node.abspath()
else:
return self.template % self.node.bldpath()

class cmd_dir_arg(cmd_arg):
def find_node(self, base_path):
assert isinstance(base_path, Node.Node)
self.node = base_path.find_dir(self.name)
if self.node is None:
raise Errors.WafError("Directory %s not found in " % (self.name, base_path))

class input_dir(cmd_dir_arg):
def get_path(self, dummy_env, dummy_absolute):
return self.template % self.node.abspath()

class output_dir(cmd_dir_arg):
def get_path(self, env, dummy_absolute):
return self.template % self.node.abspath()


class command_output(Task.Task):
color = "BLUE"
def __init__(self, env, command, command_node, command_args, stdin, stdout, cwd, os_env, stderr):
Task.Task.__init__(self, env=env)
assert isinstance(command, (str, Node.Node))
self.command = command
self.command_args = command_args
self.stdin = stdin
self.stdout = stdout
self.cwd = cwd
self.os_env = os_env
self.stderr = stderr

if command_node is not None: self.dep_nodes = [command_node]
self.dep_vars = [] # additional environment variables to look

def run(self):
task = self
#assert len(task.inputs) > 0

def input_path(node, template):
if task.cwd is None:
return template % node.bldpath()
else:
return template % node.abspath()
def output_path(node, template):
fun = node.abspath
if task.cwd is None: fun = node.bldpath
return template % fun()

if isinstance(task.command, Node.Node):
argv = [input_path(task.command, '%s')]
else:
argv = [task.command]

for arg in task.command_args:
if isinstance(arg, str):
argv.append(arg)
else:
assert isinstance(arg, cmd_arg)
argv.append(arg.get_path(task.env, (task.cwd is not None)))

if task.stdin:
stdin = open(input_path(task.stdin, '%s'))
else:
stdin = None

if task.stdout:
stdout = open(output_path(task.stdout, '%s'), "w")
else:
stdout = None

if task.stderr:
stderr = open(output_path(task.stderr, '%s'), "w")
else:
stderr = None

if task.cwd is None:
cwd = ('None (actually %r)' % os.getcwd())
else:
cwd = repr(task.cwd)
debug("command-output: cwd=%s, stdin=%r, stdout=%r, argv=%r" %
(cwd, stdin, stdout, argv))

if task.os_env is None:
os_env = os.environ
else:
os_env = task.os_env
command = Utils.subprocess.Popen(argv, stdin=stdin, stdout=stdout, stderr=stderr, cwd=task.cwd, env=os_env)
return command.wait()

@feature('command-output')
def init_cmd_output(self):
Utils.def_attrs(self,
stdin = None,
stdout = None,
stderr = None,
# the command to execute
command = None,

# whether it is an external command; otherwise it is assumed
# to be an executable binary or script that lives in the
# source or build tree.
command_is_external = False,

# extra parameters (argv) to pass to the command (excluding
# the command itself)
argv = [],

# dependencies to other objects -> this is probably not what you want (ita)
# values must be 'task_gen' instances (not names!)
dependencies = [],

# dependencies on env variable contents
dep_vars = [],

# input files that are implicit, i.e. they are not
# stdin, nor are they mentioned explicitly in argv
hidden_inputs = [],

# output files that are implicit, i.e. they are not
# stdout, nor are they mentioned explicitly in argv
hidden_outputs = [],

# change the subprocess to this cwd (must use obj.input_dir() or output_dir() here)
cwd = None,

# OS environment variables to pass to the subprocess
# if None, use the default environment variables unchanged
os_env = None)

@feature('command-output')
@after_method('init_cmd_output')
def apply_cmd_output(self):
if self.command is None:
raise Errors.WafError("command-output missing command")
if self.command_is_external:
cmd = self.command
cmd_node = None
else:
cmd_node = self.path.find_resource(self.command)
assert cmd_node is not None, ('''Could not find command '%s' in source tree.
Hint: if this is an external command,
use command_is_external=True''') % (self.command,)
cmd = cmd_node

if self.cwd is None:
cwd = None

args = []
inputs = []
outputs = []

for arg in self.argv:
if isinstance(arg, cmd_arg):
arg.find_node(self.path)
if isinstance(arg, input_file):
inputs.append(arg.node)
if isinstance(arg, output_file):
outputs.append(arg.node)

if self.stdout is None:
stdout = None
else:
assert isinstance(self.stdout, str)
stdout = self.path.find_or_declare(self.stdout)
if stdout is None:
raise Errors.WafError("File %s not found" % (self.stdout,))
outputs.append(stdout)

if self.stderr is None:
stderr = None
else:
assert isinstance(self.stderr, str)
stderr = self.path.find_or_declare(self.stderr)
if stderr is None:
raise Errors.WafError("File %s not found" % (self.stderr,))
outputs.append(stderr)

if self.stdin is None:
stdin = None
else:
assert isinstance(self.stdin, str)
stdin = self.path.find_resource(self.stdin)
if stdin is None:
raise Errors.WafError("File %s not found" % (self.stdin,))
inputs.append(stdin)

for hidden_input in self.to_list(self.hidden_inputs):
node = self.path.find_resource(hidden_input)
if node is None:
raise Errors.WafError("File %s not found in dir %s" % (hidden_input, self.path))
inputs.append(node)

for hidden_output in self.to_list(self.hidden_outputs):
node = self.path.find_or_declare(hidden_output)
if node is None:
raise Errors.WafError("File %s not found in dir %s" % (hidden_output, self.path))
outputs.append(node)

if not (inputs or getattr(self, 'no_inputs', None)):
raise Errors.WafError('command-output objects must have at least one input file or give self.no_inputs')
if not (outputs or getattr(self, 'no_outputs', None)):
raise Errors.WafError('command-output objects must have at least one output file or give self.no_outputs')

cwd = self.bld.variant_dir
task = command_output(self.env, cmd, cmd_node, self.argv, stdin, stdout, cwd, self.os_env, stderr)
task.generator = self
copy_attrs(self, task, 'before after ext_in ext_out', only_if_set=True)
self.tasks.append(task)

task.inputs = inputs
task.outputs = outputs
task.dep_vars = self.to_list(self.dep_vars)

for dep in self.dependencies:
assert dep is not self
dep.post()
for dep_task in dep.tasks:
task.set_run_after(dep_task)

if not task.inputs:
# the case for svnversion, always run, and update the output nodes
task.runnable_status = type(Task.TaskBase.run)(runnable_status, task, task.__class__) # always run
task.post_run = type(Task.TaskBase.run)(post_run, task, task.__class__)

# TODO the case with no outputs?

def post_run(self):
for x in self.outputs:
x.sig = Utils.h_file(x.abspath())

def runnable_status(self):
return self.RUN_ME

Task.task_factory('copy', vars=[], func=action_process_file_func)


+ 0
- 262
waflib/extras/msvcdeps.py View File

@@ -1,262 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Copyright Garmin International or its subsidiaries, 2012-2013

'''
Off-load dependency scanning from Python code to MSVC compiler

This tool is safe to load in any environment; it will only activate the
MSVC exploits when it finds that a particular taskgen uses MSVC to
compile.

Empirical testing shows about a 10% execution time savings from using
this tool as compared to c_preproc.

The technique of gutting scan() and pushing the dependency calculation
down to post_run() is cribbed from gccdeps.py.
'''

import os
import sys
import tempfile
import threading

from waflib import Context, Errors, Logs, Task, Utils
from waflib.Tools import c_preproc, c, cxx, msvc
from waflib.TaskGen import feature, before_method

lock = threading.Lock()
nodes = {} # Cache the path -> Node lookup

PREPROCESSOR_FLAG = '/showIncludes'
INCLUDE_PATTERN = 'Note: including file:'

# Extensible by outside tools
supported_compilers = ['msvc']

@feature('c', 'cxx')
@before_method('process_source')
def apply_msvcdeps_flags(taskgen):
if taskgen.env.CC_NAME not in supported_compilers:
return

for flag in ('CFLAGS', 'CXXFLAGS'):
if taskgen.env.get_flat(flag).find(PREPROCESSOR_FLAG) < 0:
taskgen.env.append_value(flag, PREPROCESSOR_FLAG)

# Figure out what casing conventions the user's shell used when
# launching Waf
(drive, _) = os.path.splitdrive(taskgen.bld.srcnode.abspath())
taskgen.msvcdeps_drive_lowercase = drive == drive.lower()

def path_to_node(base_node, path, cached_nodes):
# Take the base node and the path and return a node
# Results are cached because searching the node tree is expensive
# The following code is executed by threads, it is not safe, so a lock is needed...
if getattr(path, '__hash__'):
node_lookup_key = (base_node, path)
else:
# Not hashable, assume it is a list and join into a string
node_lookup_key = (base_node, os.path.sep.join(path))
try:
lock.acquire()
node = cached_nodes[node_lookup_key]
except KeyError:
node = base_node.find_resource(path)
cached_nodes[node_lookup_key] = node
finally:
lock.release()
return node

'''
Register a task subclass that has hooks for running our custom
dependency calculations rather than the C/C++ stock c_preproc
method.
'''
def wrap_compiled_task(classname):
derived_class = type(classname, (Task.classes[classname],), {})

def post_run(self):
if self.env.CC_NAME not in supported_compilers:
return super(derived_class, self).post_run()

if getattr(self, 'cached', None):
return Task.Task.post_run(self)

bld = self.generator.bld
unresolved_names = []
resolved_nodes = []

lowercase = self.generator.msvcdeps_drive_lowercase
correct_case_path = bld.path.abspath()
correct_case_path_len = len(correct_case_path)
correct_case_path_norm = os.path.normcase(correct_case_path)

# Dynamically bind to the cache
try:
cached_nodes = bld.cached_nodes
except AttributeError:
cached_nodes = bld.cached_nodes = {}

for path in self.msvcdeps_paths:
node = None
if os.path.isabs(path):
# Force drive letter to match conventions of main source tree
drive, tail = os.path.splitdrive(path)

if os.path.normcase(path[:correct_case_path_len]) == correct_case_path_norm:
# Path is in the sandbox, force it to be correct. MSVC sometimes returns a lowercase path.
path = correct_case_path + path[correct_case_path_len:]
else:
# Check the drive letter
if lowercase and (drive != drive.lower()):
path = drive.lower() + tail
elif (not lowercase) and (drive != drive.upper()):
path = drive.upper() + tail
node = path_to_node(bld.root, path, cached_nodes)
else:
base_node = bld.bldnode
# when calling find_resource, make sure the path does not begin by '..'
path = [k for k in Utils.split_path(path) if k and k != '.']
while path[0] == '..':
path = path[1:]
base_node = base_node.parent

node = path_to_node(base_node, path, cached_nodes)

if not node:
raise ValueError('could not find %r for %r' % (path, self))
else:
if not c_preproc.go_absolute:
if not (node.is_child_of(bld.srcnode) or node.is_child_of(bld.bldnode)):
# System library
Logs.debug('msvcdeps: Ignoring system include %r' % node)
continue

if id(node) == id(self.inputs[0]):
# Self-dependency
continue

resolved_nodes.append(node)

bld.node_deps[self.uid()] = resolved_nodes
bld.raw_deps[self.uid()] = unresolved_names

try:
del self.cache_sig
except:
pass

Task.Task.post_run(self)

def scan(self):
if self.env.CC_NAME not in supported_compilers:
return super(derived_class, self).scan()

resolved_nodes = self.generator.bld.node_deps.get(self.uid(), [])
unresolved_names = []
return (resolved_nodes, unresolved_names)

def sig_implicit_deps(self):
if self.env.CC_NAME not in supported_compilers:
return super(derived_class, self).sig_implicit_deps()

try:
return Task.Task.sig_implicit_deps(self)
except Errors.WafError:
return Utils.SIG_NIL

def exec_response_command(self, cmd, **kw):
# exec_response_command() is only called from inside msvc.py anyway
assert self.env.CC_NAME in supported_compilers

# Only bother adding '/showIncludes' to compile tasks
if isinstance(self, (c.c, cxx.cxx)):
try:
# The Visual Studio IDE adds an environment variable that causes
# the MS compiler to send its textual output directly to the
# debugging window rather than normal stdout/stderr.
#
# This is unrecoverably bad for this tool because it will cause
# all the dependency scanning to see an empty stdout stream and
# assume that the file being compiled uses no headers.
#
# See http://blogs.msdn.com/b/freik/archive/2006/04/05/569025.aspx
#
# Attempting to repair the situation by deleting the offending
# envvar at this point in tool execution will not be good enough--
# its presence poisons the 'waf configure' step earlier. We just
# want to put a sanity check here in order to help developers
# quickly diagnose the issue if an otherwise-good Waf tree
# is then executed inside the MSVS IDE.
assert 'VS_UNICODE_OUTPUT' not in kw['env']

tmp = None

# This block duplicated from Waflib's msvc.py
if sys.platform.startswith('win') and isinstance(cmd, list) and len(' '.join(cmd)) >= 8192:
program = cmd[0]
cmd = [self.quote_response_command(x) for x in cmd]
(fd, tmp) = tempfile.mkstemp()
os.write(fd, '\r\n'.join(i.replace('\\', '\\\\') for i in cmd[1:]).encode())
os.close(fd)
cmd = [program, '@' + tmp]
# ... end duplication

self.msvcdeps_paths = []

kw['env'] = kw.get('env', os.environ.copy())
kw['cwd'] = kw.get('cwd', os.getcwd())
kw['quiet'] = Context.STDOUT
kw['output'] = Context.STDOUT

out = []

try:
raw_out = self.generator.bld.cmd_and_log(cmd, **kw)
ret = 0
except Errors.WafError as e:
raw_out = e.stdout
ret = e.returncode

for line in raw_out.splitlines():
if line.startswith(INCLUDE_PATTERN):
inc_path = line[len(INCLUDE_PATTERN):].strip()
Logs.debug('msvcdeps: Regex matched %s' % inc_path)
self.msvcdeps_paths.append(inc_path)
else:
out.append(line)

# Pipe through the remaining stdout content (not related to /showIncludes)
if self.generator.bld.logger:
self.generator.bld.logger.debug('out: %s' % os.linesep.join(out))
else:
sys.stdout.write(os.linesep.join(out) + os.linesep)

finally:
if tmp:
try:
os.remove(tmp)
except OSError:
pass

return ret
else:
# Use base class's version of this method for linker tasks
return super(derived_class, self).exec_response_command(cmd, **kw)

def can_retrieve_cache(self):
# msvcdeps and netcaching are incompatible, so disable the cache
if self.env.CC_NAME not in supported_compilers:
return super(derived_class, self).can_retrieve_cache()
self.nocache = True # Disable sending the file to the cache
return False

derived_class.post_run = post_run
derived_class.scan = scan
derived_class.sig_implicit_deps = sig_implicit_deps
derived_class.exec_response_command = exec_response_command
derived_class.can_retrieve_cache = can_retrieve_cache

for k in ('c', 'cxx'):
wrap_compiled_task(k)

+ 0
- 1025
waflib/extras/msvs.py
File diff suppressed because it is too large
View File


+ 0
- 383
waflib/extras/netcache_client.py View File

@@ -1,383 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2011-2015 (ita)

"""
A client for the network cache (playground/netcache/). Launch the server with:
./netcache_server, then use it for the builds by adding the following:

def build(bld):
bld.load('netcache_client')

The parameters should be present in the environment in the form:
NETCACHE=host:port waf configure build

Or in a more detailed way:
NETCACHE_PUSH=host:port NETCACHE_PULL=host:port waf configure build

where:
host: host where the server resides, by default localhost
port: by default push on 11001 and pull on 12001

Use the server provided in playground/netcache/Netcache.java
"""

import os, socket, time, atexit, sys
from waflib import Task, Logs, Utils, Build, Runner
from waflib.Configure import conf

BUF = 8192 * 16
HEADER_SIZE = 128
MODES = ['PUSH', 'PULL', 'PUSH_PULL']
STALE_TIME = 30 # seconds

GET = 'GET'
PUT = 'PUT'
LST = 'LST'
BYE = 'BYE'

all_sigs_in_cache = (0.0, [])

def put_data(conn, data):
if sys.hexversion > 0x3000000:
data = data.encode('iso8859-1')
cnt = 0
while cnt < len(data):
sent = conn.send(data[cnt:])
if sent == 0:
raise RuntimeError('connection ended')
cnt += sent

push_connections = Runner.Queue(0)
pull_connections = Runner.Queue(0)
def get_connection(push=False):
# return a new connection... do not forget to release it!
try:
if push:
ret = push_connections.get(block=False)
else:
ret = pull_connections.get(block=False)
except Exception:
ret = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if push:
ret.connect(Task.push_addr)
else:
ret.connect(Task.pull_addr)
return ret

def release_connection(conn, msg='', push=False):
if conn:
if push:
push_connections.put(conn)
else:
pull_connections.put(conn)

def close_connection(conn, msg=''):
if conn:
data = '%s,%s' % (BYE, msg)
try:
put_data(conn, data.ljust(HEADER_SIZE))
except:
pass
try:
conn.close()
except:
pass

def close_all():
for q in (push_connections, pull_connections):
while q.qsize():
conn = q.get()
try:
close_connection(conn)
except:
# ignore errors when cleaning up
pass
atexit.register(close_all)

def read_header(conn):
cnt = 0
buf = []
while cnt < HEADER_SIZE:
data = conn.recv(HEADER_SIZE - cnt)
if not data:
#import traceback
#traceback.print_stack()
raise ValueError('connection ended when reading a header %r' % buf)
buf.append(data)
cnt += len(data)
if sys.hexversion > 0x3000000:
ret = ''.encode('iso8859-1').join(buf)
ret = ret.decode('iso8859-1')
else:
ret = ''.join(buf)
return ret

def check_cache(conn, ssig):
"""
List the files on the server, this is an optimization because it assumes that
concurrent builds are rare
"""
global all_sigs_in_cache
if not STALE_TIME:
return
if time.time() - all_sigs_in_cache[0] > STALE_TIME:

params = (LST,'')
put_data(conn, ','.join(params).ljust(HEADER_SIZE))

# read what is coming back
ret = read_header(conn)
size = int(ret.split(',')[0])

buf = []
cnt = 0
while cnt < size:
data = conn.recv(min(BUF, size-cnt))
if not data:
raise ValueError('connection ended %r %r' % (cnt, size))
buf.append(data)
cnt += len(data)

if sys.hexversion > 0x3000000:
ret = ''.encode('iso8859-1').join(buf)
ret = ret.decode('iso8859-1')
else:
ret = ''.join(buf)

all_sigs_in_cache = (time.time(), ret.splitlines())
Logs.debug('netcache: server cache has %r entries' % len(all_sigs_in_cache[1]))

if not ssig in all_sigs_in_cache[1]:
raise ValueError('no file %s in cache' % ssig)

class MissingFile(Exception):
pass

def recv_file(conn, ssig, count, p):
check_cache(conn, ssig)

params = (GET, ssig, str(count))
put_data(conn, ','.join(params).ljust(HEADER_SIZE))
data = read_header(conn)

size = int(data.split(',')[0])

if size == -1:
raise MissingFile('no file %s - %s in cache' % (ssig, count))

# get the file, writing immediately
# TODO a tmp file would be better
f = open(p, 'wb')
cnt = 0
while cnt < size:
data = conn.recv(min(BUF, size-cnt))
if not data:
raise ValueError('connection ended %r %r' % (cnt, size))
f.write(data)
cnt += len(data)
f.close()

def sock_send(conn, ssig, cnt, p):
#print "pushing %r %r %r" % (ssig, cnt, p)
size = os.stat(p).st_size
params = (PUT, ssig, str(cnt), str(size))
put_data(conn, ','.join(params).ljust(HEADER_SIZE))
f = open(p, 'rb')
cnt = 0
while cnt < size:
r = f.read(min(BUF, size-cnt))
while r:
k = conn.send(r)
if not k:
raise ValueError('connection ended')
cnt += k
r = r[k:]

def can_retrieve_cache(self):
if not Task.pull_addr:
return False
if not self.outputs:
return False
self.cached = False

cnt = 0
sig = self.signature()
ssig = Utils.to_hex(self.uid() + sig)

conn = None
err = False
try:
try:
conn = get_connection()
for node in self.outputs:
p = node.abspath()
recv_file(conn, ssig, cnt, p)
cnt += 1
except MissingFile as e:
Logs.debug('netcache: file is not in the cache %r' % e)
err = True

except Exception as e:
Logs.debug('netcache: could not get the files %r' % e)
err = True

# broken connection? remove this one
close_connection(conn)
conn = None
finally:
release_connection(conn)
if err:
return False

for node in self.outputs:
node.sig = sig
#if self.generator.bld.progress_bar < 1:
# self.generator.bld.to_log('restoring from cache %r\n' % node.abspath())

self.cached = True
return True

@Utils.run_once
def put_files_cache(self):
if not Task.push_addr:
return
if not self.outputs:
return
if getattr(self, 'cached', None):
return

#print "called put_files_cache", id(self)
bld = self.generator.bld
sig = self.signature()
ssig = Utils.to_hex(self.uid() + sig)

conn = None
cnt = 0
try:
for node in self.outputs:
# We could re-create the signature of the task with the signature of the outputs
# in practice, this means hashing the output files
# this is unnecessary
try:
if not conn:
conn = get_connection(push=True)
sock_send(conn, ssig, cnt, node.abspath())
except Exception as e:
Logs.debug("netcache: could not push the files %r" % e)

# broken connection? remove this one
close_connection(conn)
conn = None
cnt += 1
finally:
release_connection(conn, push=True)

bld.task_sigs[self.uid()] = self.cache_sig

def hash_env_vars(self, env, vars_lst):
# reimplement so that the resulting hash does not depend on local paths
if not env.table:
env = env.parent
if not env:
return Utils.SIG_NIL

idx = str(id(env)) + str(vars_lst)
try:
cache = self.cache_env
except AttributeError:
cache = self.cache_env = {}
else:
try:
return self.cache_env[idx]
except KeyError:
pass

v = str([env[a] for a in vars_lst])
v = v.replace(self.srcnode.abspath().__repr__()[:-1], '')
m = Utils.md5()
m.update(v.encode())
ret = m.digest()

Logs.debug('envhash: %r %r', ret, v)

cache[idx] = ret

return ret

def uid(self):
# reimplement so that the signature does not depend on local paths
try:
return self.uid_
except AttributeError:
m = Utils.md5()
src = self.generator.bld.srcnode
up = m.update
up(self.__class__.__name__.encode())
for x in self.inputs + self.outputs:
up(x.path_from(src).encode())
self.uid_ = m.digest()
return self.uid_


def make_cached(cls):
if getattr(cls, 'nocache', None):
return

m1 = cls.run
def run(self):
if self.can_retrieve_cache():
return 0
return m1(self)
cls.run = run

m2 = cls.post_run
def post_run(self):
bld = self.generator.bld
ret = m2(self)
if bld.cache_global:
self.put_files_cache()
return ret
cls.post_run = post_run

@conf
def setup_netcache(ctx, push_addr, pull_addr):
Task.Task.can_retrieve_cache = can_retrieve_cache
Task.Task.put_files_cache = put_files_cache
Task.Task.uid = uid
Task.push_addr = push_addr
Task.pull_addr = pull_addr
Build.BuildContext.hash_env_vars = hash_env_vars
ctx.cache_global = True

for x in Task.classes.values():
make_cached(x)

def build(bld):
if not 'NETCACHE' in os.environ and not 'NETCACHE_PULL' in os.environ and not 'NETCACHE_PUSH' in os.environ:
Logs.warn('Setting NETCACHE_PULL=127.0.0.1:11001 and NETCACHE_PUSH=127.0.0.1:12001')
os.environ['NETCACHE_PULL'] = '127.0.0.1:12001'
os.environ['NETCACHE_PUSH'] = '127.0.0.1:11001'

if 'NETCACHE' in os.environ:
if not 'NETCACHE_PUSH' in os.environ:
os.environ['NETCACHE_PUSH'] = os.environ['NETCACHE']
if not 'NETCACHE_PULL' in os.environ:
os.environ['NETCACHE_PULL'] = os.environ['NETCACHE']

v = os.environ['NETCACHE_PULL']
if v:
h, p = v.split(':')
pull_addr = (h, int(p))
else:
pull_addr = None

v = os.environ['NETCACHE_PUSH']
if v:
h, p = v.split(':')
push_addr = (h, int(p))
else:
push_addr = None

setup_netcache(bld, push_addr, pull_addr)


+ 0
- 24
waflib/extras/nobuild.py View File

@@ -1,24 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2015 (ita)

"""
Override the build commands to write empty files.
This is useful for profiling and evaluating the Python overhead.

To use::

def build(bld):
...
bld.load('nobuild')

"""

from waflib import Task
def build(bld):
def run(self):
for x in self.outputs:
x.write('')
for (name, cls) in Task.classes.items():
cls.run = run


+ 0
- 54
waflib/extras/objcopy.py View File

@@ -1,54 +0,0 @@
#!/usr/bin/python
# Grygoriy Fuchedzhy 2010

"""
Support for converting linked targets to ihex, srec or binary files using
objcopy. Use the 'objcopy' feature in conjuction with the 'cc' or 'cxx'
feature. The 'objcopy' feature uses the following attributes:

objcopy_bfdname Target object format name (eg. ihex, srec, binary).
Defaults to ihex.
objcopy_target File name used for objcopy output. This defaults to the
target name with objcopy_bfdname as extension.
objcopy_install_path Install path for objcopy_target file. Defaults to ${PREFIX}/fw.
objcopy_flags Additional flags passed to objcopy.
"""

from waflib.Utils import def_attrs
from waflib import Task
from waflib.TaskGen import feature, after_method

class objcopy(Task.Task):
run_str = '${OBJCOPY} -O ${TARGET_BFDNAME} ${OBJCOPYFLAGS} ${SRC} ${TGT}'
color = 'CYAN'

@feature('objcopy')
@after_method('apply_link')
def objcopy(self):
def_attrs(self,
objcopy_bfdname = 'ihex',
objcopy_target = None,
objcopy_install_path = "${PREFIX}/firmware",
objcopy_flags = '')

link_output = self.link_task.outputs[0]
if not self.objcopy_target:
self.objcopy_target = link_output.change_ext('.' + self.objcopy_bfdname).name
task = self.create_task('objcopy',
src=link_output,
tgt=self.path.find_or_declare(self.objcopy_target))

task.env.append_unique('TARGET_BFDNAME', self.objcopy_bfdname)
try:
task.env.append_unique('OBJCOPYFLAGS', getattr(self, 'objcopy_flags'))
except AttributeError:
pass

if self.objcopy_install_path:
self.bld.install_files(self.objcopy_install_path,
task.outputs[0],
env=task.env.derive())

def configure(ctx):
objcopy = ctx.find_program('objcopy', var='OBJCOPY', mandatory=True)


+ 0
- 331
waflib/extras/ocaml.py View File

@@ -1,331 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)

"ocaml support"

import os, re
from waflib import Utils, Task
from waflib.Logs import error
from waflib.TaskGen import feature, before_method, after_method, extension

EXT_MLL = ['.mll']
EXT_MLY = ['.mly']
EXT_MLI = ['.mli']
EXT_MLC = ['.c']
EXT_ML = ['.ml']

open_re = re.compile('^\s*open\s+([a-zA-Z]+)(;;){0,1}$', re.M)
foo = re.compile(r"""(\(\*)|(\*\))|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^()*"'\\]*)""", re.M)
def filter_comments(txt):
meh = [0]
def repl(m):
if m.group(1): meh[0] += 1
elif m.group(2): meh[0] -= 1
elif not meh[0]: return m.group(0)
return ''
return foo.sub(repl, txt)

def scan(self):
node = self.inputs[0]
code = filter_comments(node.read())

global open_re
names = []
import_iterator = open_re.finditer(code)
if import_iterator:
for import_match in import_iterator:
names.append(import_match.group(1))
found_lst = []
raw_lst = []
for name in names:
nd = None
for x in self.incpaths:
nd = x.find_resource(name.lower()+'.ml')
if not nd: nd = x.find_resource(name+'.ml')
if nd:
found_lst.append(nd)
break
else:
raw_lst.append(name)

return (found_lst, raw_lst)

native_lst=['native', 'all', 'c_object']
bytecode_lst=['bytecode', 'all']

@feature('ocaml')
def init_ml(self):
Utils.def_attrs(self,
type = 'all',
incpaths_lst = [],
bld_incpaths_lst = [],
mlltasks = [],
mlytasks = [],
mlitasks = [],
native_tasks = [],
bytecode_tasks = [],
linktasks = [],
bytecode_env = None,
native_env = None,
compiled_tasks = [],
includes = '',
uselib = '',
are_deps_set = 0)

@feature('ocaml')
@after_method('init_ml')
def init_envs_ml(self):

self.islibrary = getattr(self, 'islibrary', False)

global native_lst, bytecode_lst
self.native_env = None
if self.type in native_lst:
self.native_env = self.env.derive()
if self.islibrary: self.native_env['OCALINKFLAGS'] = '-a'

self.bytecode_env = None
if self.type in bytecode_lst:
self.bytecode_env = self.env.derive()
if self.islibrary: self.bytecode_env['OCALINKFLAGS'] = '-a'

if self.type == 'c_object':
self.native_env.append_unique('OCALINKFLAGS_OPT', '-output-obj')

@feature('ocaml')
@before_method('apply_vars_ml')
@after_method('init_envs_ml')
def apply_incpaths_ml(self):
inc_lst = self.includes.split()
lst = self.incpaths_lst
for dir in inc_lst:
node = self.path.find_dir(dir)
if not node:
error("node not found: " + str(dir))
continue
if not node in lst:
lst.append(node)
self.bld_incpaths_lst.append(node)
# now the nodes are added to self.incpaths_lst

@feature('ocaml')
@before_method('process_source')
def apply_vars_ml(self):
for i in self.incpaths_lst:
if self.bytecode_env:
app = self.bytecode_env.append_value
app('OCAMLPATH', ['-I', i.bldpath(), '-I', i.srcpath()])

if self.native_env:
app = self.native_env.append_value
app('OCAMLPATH', ['-I', i.bldpath(), '-I', i.srcpath()])

varnames = ['INCLUDES', 'OCAMLFLAGS', 'OCALINKFLAGS', 'OCALINKFLAGS_OPT']
for name in self.uselib.split():
for vname in varnames:
cnt = self.env[vname+'_'+name]
if cnt:
if self.bytecode_env: self.bytecode_env.append_value(vname, cnt)
if self.native_env: self.native_env.append_value(vname, cnt)

@feature('ocaml')
@after_method('process_source')
def apply_link_ml(self):

if self.bytecode_env:
ext = self.islibrary and '.cma' or '.run'

linktask = self.create_task('ocalink')
linktask.bytecode = 1
linktask.set_outputs(self.path.find_or_declare(self.target + ext))
linktask.env = self.bytecode_env
self.linktasks.append(linktask)

if self.native_env:
if self.type == 'c_object': ext = '.o'
elif self.islibrary: ext = '.cmxa'
else: ext = ''

linktask = self.create_task('ocalinkx')
linktask.set_outputs(self.path.find_or_declare(self.target + ext))
linktask.env = self.native_env
self.linktasks.append(linktask)

# we produce a .o file to be used by gcc
self.compiled_tasks.append(linktask)

@extension(*EXT_MLL)
def mll_hook(self, node):
mll_task = self.create_task('ocamllex', node, node.change_ext('.ml'))
mll_task.env = self.native_env.derive()
self.mlltasks.append(mll_task)

self.source.append(mll_task.outputs[0])

@extension(*EXT_MLY)
def mly_hook(self, node):
mly_task = self.create_task('ocamlyacc', node, [node.change_ext('.ml'), node.change_ext('.mli')])
mly_task.env = self.native_env.derive()
self.mlytasks.append(mly_task)
self.source.append(mly_task.outputs[0])

task = self.create_task('ocamlcmi', mly_task.outputs[1], mly_task.outputs[1].change_ext('.cmi'))
task.env = self.native_env.derive()

@extension(*EXT_MLI)
def mli_hook(self, node):
task = self.create_task('ocamlcmi', node, node.change_ext('.cmi'))
task.env = self.native_env.derive()
self.mlitasks.append(task)

@extension(*EXT_MLC)
def mlc_hook(self, node):
task = self.create_task('ocamlcc', node, node.change_ext('.o'))
task.env = self.native_env.derive()
self.compiled_tasks.append(task)

@extension(*EXT_ML)
def ml_hook(self, node):
if self.native_env:
task = self.create_task('ocamlx', node, node.change_ext('.cmx'))
task.env = self.native_env.derive()
task.incpaths = self.bld_incpaths_lst
self.native_tasks.append(task)

if self.bytecode_env:
task = self.create_task('ocaml', node, node.change_ext('.cmo'))
task.env = self.bytecode_env.derive()
task.bytecode = 1
task.incpaths = self.bld_incpaths_lst
self.bytecode_tasks.append(task)

def compile_may_start(self):

if not getattr(self, 'flag_deps', ''):
self.flag_deps = 1

# the evil part is that we can only compute the dependencies after the
# source files can be read (this means actually producing the source files)
if getattr(self, 'bytecode', ''): alltasks = self.generator.bytecode_tasks
else: alltasks = self.generator.native_tasks

self.signature() # ensure that files are scanned - unfortunately
tree = self.generator.bld
for node in self.inputs:
lst = tree.node_deps[self.uid()]
for depnode in lst:
for t in alltasks:
if t == self: continue
if depnode in t.inputs:
self.set_run_after(t)

# TODO necessary to get the signature right - for now
delattr(self, 'cache_sig')
self.signature()

return Task.Task.runnable_status(self)

class ocamlx(Task.Task):
"""native caml compilation"""
color = 'GREEN'
run_str = '${OCAMLOPT} ${OCAMLPATH} ${OCAMLFLAGS} ${OCAMLINCLUDES} -c -o ${TGT} ${SRC}'
scan = scan
runnable_status = compile_may_start

class ocaml(Task.Task):
"""bytecode caml compilation"""
color = 'GREEN'
run_str = '${OCAMLC} ${OCAMLPATH} ${OCAMLFLAGS} ${OCAMLINCLUDES} -c -o ${TGT} ${SRC}'
scan = scan
runnable_status = compile_may_start

class ocamlcmi(Task.Task):
"""interface generator (the .i files?)"""
color = 'BLUE'
run_str = '${OCAMLC} ${OCAMLPATH} ${OCAMLINCLUDES} -o ${TGT} -c ${SRC}'
before = ['ocamlcc', 'ocaml', 'ocamlcc']

class ocamlcc(Task.Task):
"""ocaml to c interfaces"""
color = 'GREEN'
run_str = 'cd ${TGT[0].bld_dir()} && ${OCAMLOPT} ${OCAMLFLAGS} ${OCAMLPATH} ${OCAMLINCLUDES} -c ${SRC[0].abspath()}'

class ocamllex(Task.Task):
"""lexical generator"""
color = 'BLUE'
run_str = '${OCAMLLEX} ${SRC} -o ${TGT}'
before = ['ocamlcmi', 'ocaml', 'ocamlcc']

class ocamlyacc(Task.Task):
"""parser generator"""
color = 'BLUE'
run_str = '${OCAMLYACC} -b ${tsk.base()} ${SRC}'
before = ['ocamlcmi', 'ocaml', 'ocamlcc']

def base(self):
node = self.outputs[0]
s = os.path.splitext(node.name)[0]
return node.bld_dir() + os.sep + s

def link_may_start(self):

if getattr(self, 'bytecode', 0): alltasks = self.generator.bytecode_tasks
else: alltasks = self.generator.native_tasks

for x in alltasks:
if not x.hasrun:
return Task.ASK_LATER

if not getattr(self, 'order', ''):

# now reorder the inputs given the task dependencies
# this part is difficult, we do not have a total order on the tasks
# if the dependencies are wrong, this may not stop
seen = []
pendant = []+alltasks
while pendant:
task = pendant.pop(0)
if task in seen: continue
for x in task.run_after:
if not x in seen:
pendant.append(task)
break
else:
seen.append(task)
self.inputs = [x.outputs[0] for x in seen]
self.order = 1
return Task.Task.runnable_status(self)

class ocalink(Task.Task):
"""bytecode caml link"""
color = 'YELLOW'
run_str = '${OCAMLC} -o ${TGT} ${OCAMLINCLUDES} ${OCALINKFLAGS} ${SRC}'
runnable_status = link_may_start
after = ['ocaml', 'ocamlcc']

class ocalinkx(Task.Task):
"""native caml link"""
color = 'YELLOW'
run_str = '${OCAMLOPT} -o ${TGT} ${OCAMLINCLUDES} ${OCALINKFLAGS_OPT} ${SRC}'
runnable_status = link_may_start
after = ['ocamlx', 'ocamlcc']

def configure(conf):
opt = conf.find_program('ocamlopt', var='OCAMLOPT', mandatory=False)
occ = conf.find_program('ocamlc', var='OCAMLC', mandatory=False)
if (not opt) or (not occ):
conf.fatal('The objective caml compiler was not found:\ninstall it or make it available in your PATH')

v = conf.env
v['OCAMLC'] = occ
v['OCAMLOPT'] = opt
v['OCAMLLEX'] = conf.find_program('ocamllex', var='OCAMLLEX', mandatory=False)
v['OCAMLYACC'] = conf.find_program('ocamlyacc', var='OCAMLYACC', mandatory=False)
v['OCAMLFLAGS'] = ''
where = conf.cmd_and_log(conf.env.OCAMLC + ['-where']).strip()+os.sep
v['OCAMLLIB'] = where
v['LIBPATH_OCAML'] = where
v['INCLUDES_OCAML'] = where
v['LIB_OCAML'] = 'camlrun'


+ 0
- 76
waflib/extras/package.py View File

@@ -1,76 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2011

"""
Obtain packages, unpack them in a location, and add associated uselib variables
(CFLAGS_pkgname, LIBPATH_pkgname, etc).

The default is use a Dependencies.txt file in the source directory.

This is a work in progress.

Usage:

def options(opt):
opt.load('package')

def configure(conf):
conf.load_packages()
"""

from waflib import Logs
from waflib.Configure import conf

try:
from urllib import request
except ImportError:
from urllib import urlopen
else:
urlopen = request.urlopen


CACHEVAR = 'WAFCACHE_PACKAGE'

@conf
def get_package_cache_dir(self):
cache = None
if CACHEVAR in conf.environ:
cache = conf.environ[CACHEVAR]
cache = self.root.make_node(cache)
elif self.env[CACHEVAR]:
cache = self.env[CACHEVAR]
cache = self.root.make_node(cache)
else:
cache = self.srcnode.make_node('.wafcache_package')
cache.mkdir()
return cache

@conf
def download_archive(self, src, dst):
for x in self.env.PACKAGE_REPO:
url = '/'.join((x, src))
try:
web = urlopen(url)
try:
if web.getcode() != 200:
continue
except AttributeError:
pass
except Exception:
# on python3 urlopen throws an exception
# python 2.3 does not have getcode and throws an exception to fail
continue
else:
tmp = self.root.make_node(dst)
tmp.write(web.read())
Logs.warn('Downloaded %s from %s' % (tmp.abspath(), url))
break
else:
self.fatal('Could not get the package %s' % src)

@conf
def load_packages(self):
cache = self.get_package_cache_dir()
# read the dependencies, get the archives, ..


+ 0
- 442
waflib/extras/parallel_debug.py View File

@@ -1,442 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2007-2010 (ita)

"""
Debugging helper for parallel compilation, outputs
a file named pdebug.svg in the source directory::

def options(opt):
opt.load('parallel_debug')
def build(bld):
...
"""

import os, time, sys, re
try: from Queue import Queue
except: from queue import Queue
from waflib import Runner, Options, Utils, Task, Logs, Errors

#import random
#random.seed(100)

SVG_TEMPLATE = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.0//EN" "http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd">
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.0"
x="${project.x}" y="${project.y}" width="${project.width}" height="${project.height}" id="svg602" xml:space="preserve">

<style type='text/css' media='screen'>
g.over rect { stroke:#FF0000; fill-opacity:0.4 }
</style>

<script type='text/javascript'><![CDATA[
var svg = document.getElementsByTagName('svg')[0];

svg.addEventListener('mouseover', function(e) {
var g = e.target.parentNode;
var x = document.getElementById('r_' + g.id);
if (x) {
g.setAttribute('class', g.getAttribute('class') + ' over');
x.setAttribute('class', x.getAttribute('class') + ' over');
showInfo(e, g.id);
}
}, false);

svg.addEventListener('mouseout', function(e) {
var g = e.target.parentNode;
var x = document.getElementById('r_' + g.id);
if (x) {
g.setAttribute('class', g.getAttribute('class').replace(' over', ''));
x.setAttribute('class', x.getAttribute('class').replace(' over', ''));
hideInfo(e);
}
}, false);

function showInfo(evt, txt) {
tooltip = document.getElementById('tooltip');

var t = document.getElementById('tooltiptext');
t.firstChild.data = txt;

var x = evt.clientX + 9;
if (x > 250) { x -= t.getComputedTextLength() + 16; }
var y = evt.clientY + 20;
tooltip.setAttribute("transform", "translate(" + x + "," + y + ")");
tooltip.setAttributeNS(null, "visibility", "visible");

var r = document.getElementById('tooltiprect');
r.setAttribute('width', t.getComputedTextLength() + 6);
}

function hideInfo(evt) {
var tooltip = document.getElementById('tooltip');
tooltip.setAttributeNS(null,"visibility","hidden");
}
]]></script>

<!-- inkscape requires a big rectangle or it will not export the pictures properly -->
<rect
x='${project.x}' y='${project.y}' width='${project.width}' height='${project.height}'
style="font-size:10;fill:#ffffff;fill-opacity:0.01;fill-rule:evenodd;stroke:#ffffff;"
/>

${if project.title}
<text x="${project.title_x}" y="${project.title_y}"
style="font-size:15px; text-anchor:middle; font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans">${project.title}</text>
${endif}


${for cls in project.groups}
<g id='${cls.classname}'>
${for rect in cls.rects}
<rect x='${rect.x}' y='${rect.y}' width='${rect.width}' height='${rect.height}' style="font-size:10;fill:${rect.color};fill-rule:evenodd;stroke:#000000;stroke-width:0.4;" />
${endfor}
</g>
${endfor}

${for info in project.infos}
<g id='r_${info.classname}'>
<rect x='${info.x}' y='${info.y}' width='${info.width}' height='${info.height}' style="font-size:10;fill:${info.color};fill-rule:evenodd;stroke:#000000;stroke-width:0.4;" />
<text x="${info.text_x}" y="${info.text_y}"
style="font-size:12px;font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans"
>${info.text}</text>
</g>
${endfor}

<g transform="translate(0,0)" visibility="hidden" id="tooltip">
<rect id="tooltiprect" y="-15" x="-3" width="1" height="20" style="stroke:black;fill:#edefc2;stroke-width:1"/>
<text id="tooltiptext" style="font-family:Arial; font-size:12;fill:black;" />
</g>

</svg>
"""

COMPILE_TEMPLATE = '''def f(project):
lst = []
def xml_escape(value):
return value.replace("&", "&amp;").replace('"', "&quot;").replace("'", "&apos;").replace("<", "&lt;").replace(">", "&gt;")

%s
return ''.join(lst)
'''
reg_act = re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<code>[^}]*?)\})", re.M)
def compile_template(line):

extr = []
def repl(match):
g = match.group
if g('dollar'): return "$"
elif g('backslash'):
return "\\"
elif g('subst'):
extr.append(g('code'))
return "<<|@|>>"
return None

line2 = reg_act.sub(repl, line)
params = line2.split('<<|@|>>')
assert(extr)


indent = 0
buf = []
app = buf.append

def app(txt):
buf.append(indent * '\t' + txt)

for x in range(len(extr)):
if params[x]:
app("lst.append(%r)" % params[x])

f = extr[x]
if f.startswith('if') or f.startswith('for'):
app(f + ':')
indent += 1
elif f.startswith('py:'):
app(f[3:])
elif f.startswith('endif') or f.startswith('endfor'):
indent -= 1
elif f.startswith('else') or f.startswith('elif'):
indent -= 1
app(f + ':')
indent += 1
elif f.startswith('xml:'):
app('lst.append(xml_escape(%s))' % f[4:])
else:
#app('lst.append((%s) or "cannot find %s")' % (f, f))
app('lst.append(str(%s))' % f)

if extr:
if params[-1]:
app("lst.append(%r)" % params[-1])

fun = COMPILE_TEMPLATE % "\n\t".join(buf)
# uncomment the following to debug the template
#for i, x in enumerate(fun.splitlines()):
# print i, x
return Task.funex(fun)

# red #ff4d4d
# green #4da74d
# lila #a751ff

color2code = {
'GREEN' : '#4da74d',
'YELLOW' : '#fefe44',
'PINK' : '#a751ff',
'RED' : '#cc1d1d',
'BLUE' : '#6687bb',
'CYAN' : '#34e2e2',
}

mp = {}
info = [] # list of (text,color)

def map_to_color(name):
if name in mp:
return mp[name]
try:
cls = Task.classes[name]
except KeyError:
return color2code['RED']
if cls.color in mp:
return mp[cls.color]
if cls.color in color2code:
return color2code[cls.color]
return color2code['RED']

def process(self):
m = self.master
if m.stop:
m.out.put(self)
return

self.master.set_running(1, id(Utils.threading.currentThread()), self)

# remove the task signature immediately before it is executed
# in case of failure the task will be executed again
try:
del self.generator.bld.task_sigs[self.uid()]
except:
pass

try:
self.generator.bld.returned_tasks.append(self)
self.log_display(self.generator.bld)
ret = self.run()
except Exception:
self.err_msg = Utils.ex_stack()
self.hasrun = Task.EXCEPTION

# TODO cleanup
m.error_handler(self)
m.out.put(self)
return

if ret:
self.err_code = ret
self.hasrun = Task.CRASHED
else:
try:
self.post_run()
except Errors.WafError:
pass
except Exception:
self.err_msg = Utils.ex_stack()
self.hasrun = Task.EXCEPTION
else:
self.hasrun = Task.SUCCESS
if self.hasrun != Task.SUCCESS:
m.error_handler(self)

self.master.set_running(-1, id(Utils.threading.currentThread()), self)
m.out.put(self)
Task.TaskBase.process_back = Task.TaskBase.process
Task.TaskBase.process = process

old_start = Runner.Parallel.start
def do_start(self):
try:
Options.options.dband
except AttributeError:
self.bld.fatal('use def options(opt): opt.load("parallel_debug")!')

self.taskinfo = Queue()
old_start(self)
if self.dirty:
make_picture(self)
Runner.Parallel.start = do_start

def set_running(self, by, i, tsk):
self.taskinfo.put( (i, id(tsk), time.time(), tsk.__class__.__name__, self.processed, self.count, by) )
Runner.Parallel.set_running = set_running

def name2class(name):
return name.replace(' ', '_').replace('.', '_')

def make_picture(producer):
# first, cast the parameters
if not hasattr(producer.bld, 'path'):
return

tmp = []
try:
while True:
tup = producer.taskinfo.get(False)
tmp.append(list(tup))
except:
pass

try:
ini = float(tmp[0][2])
except:
return

if not info:
seen = []
for x in tmp:
name = x[3]
if not name in seen:
seen.append(name)
else:
continue

info.append((name, map_to_color(name)))
info.sort(key=lambda x: x[0])

thread_count = 0
acc = []
for x in tmp:
thread_count += x[6]
acc.append("%d %d %f %r %d %d %d" % (x[0], x[1], x[2] - ini, x[3], x[4], x[5], thread_count))

data_node = producer.bld.path.make_node('pdebug.dat')
data_node.write('\n'.join(acc))

tmp = [lst[:2] + [float(lst[2]) - ini] + lst[3:] for lst in tmp]

st = {}
for l in tmp:
if not l[0] in st:
st[l[0]] = len(st.keys())
tmp = [ [st[lst[0]]] + lst[1:] for lst in tmp ]
THREAD_AMOUNT = len(st.keys())

st = {}
for l in tmp:
if not l[1] in st:
st[l[1]] = len(st.keys())
tmp = [ [lst[0]] + [st[lst[1]]] + lst[2:] for lst in tmp ]


BAND = Options.options.dband

seen = {}
acc = []
for x in range(len(tmp)):
line = tmp[x]
id = line[1]

if id in seen:
continue
seen[id] = True

begin = line[2]
thread_id = line[0]
for y in range(x + 1, len(tmp)):
line = tmp[y]
if line[1] == id:
end = line[2]
#print id, thread_id, begin, end
#acc.append( ( 10*thread_id, 10*(thread_id+1), 10*begin, 10*end ) )
acc.append( (BAND * begin, BAND*thread_id, BAND*end - BAND*begin, BAND, line[3]) )
break

if Options.options.dmaxtime < 0.1:
gwidth = 1
for x in tmp:
m = BAND * x[2]
if m > gwidth:
gwidth = m
else:
gwidth = BAND * Options.options.dmaxtime

ratio = float(Options.options.dwidth) / gwidth
gwidth = Options.options.dwidth
gheight = BAND * (THREAD_AMOUNT + len(info) + 1.5)


# simple data model for our template
class tobject(object):
pass

model = tobject()
model.x = 0
model.y = 0
model.width = gwidth + 4
model.height = gheight + 4

model.title = Options.options.dtitle
model.title_x = gwidth / 2
model.title_y = gheight + - 5

groups = {}
for (x, y, w, h, clsname) in acc:
try:
groups[clsname].append((x, y, w, h))
except:
groups[clsname] = [(x, y, w, h)]

# groups of rectangles (else js highlighting is slow)
model.groups = []
for cls in groups:
g = tobject()
model.groups.append(g)
g.classname = name2class(cls)
g.rects = []
for (x, y, w, h) in groups[cls]:
r = tobject()
g.rects.append(r)
r.x = 2 + x * ratio
r.y = 2 + y
r.width = w * ratio
r.height = h
r.color = map_to_color(cls)

cnt = THREAD_AMOUNT

# caption
model.infos = []
for (text, color) in info:
inf = tobject()
model.infos.append(inf)
inf.classname = name2class(text)
inf.x = 2 + BAND
inf.y = 5 + (cnt + 0.5) * BAND
inf.width = BAND/2
inf.height = BAND/2
inf.color = color

inf.text = text
inf.text_x = 2 + 2 * BAND
inf.text_y = 5 + (cnt + 0.5) * BAND + 10

cnt += 1

# write the file...
template1 = compile_template(SVG_TEMPLATE)
txt = template1(model)

node = producer.bld.path.make_node('pdebug.svg')
node.write(txt)
Logs.warn('Created the diagram %r' % node.abspath())

def options(opt):
opt.add_option('--dtitle', action='store', default='Parallel build representation for %r' % ' '.join(sys.argv),
help='title for the svg diagram', dest='dtitle')
opt.add_option('--dwidth', action='store', type='int', help='diagram width', default=800, dest='dwidth')
opt.add_option('--dtime', action='store', type='float', help='recording interval in seconds', default=0.009, dest='dtime')
opt.add_option('--dband', action='store', type='int', help='band width', default=22, dest='dband')
opt.add_option('--dmaxtime', action='store', type='float', help='maximum time, for drawing fair comparisons', default=0, dest='dmaxtime')


+ 0
- 148
waflib/extras/pch.py View File

@@ -1,148 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# Alexander Afanasyev (UCLA), 2014

"""
Enable precompiled C++ header support (currently only clang++ and g++ are supported)

To use this tool, wscript should look like:

def options(opt):
opt.load('pch')
# This will add `--with-pch` configure option.
# Unless --with-pch during configure stage specified, the precompiled header support is disabled

def configure(conf):
conf.load('pch')
# this will set conf.env.WITH_PCH if --with-pch is specified and the supported compiler is used
# Unless conf.env.WITH_PCH is set, the precompiled header support is disabled

def build(bld):
bld(features='cxx pch',
target='precompiled-headers',
name='precompiled-headers',
headers='a.h b.h c.h', # headers to pre-compile into `precompiled-headers`

# Other parameters to compile precompiled headers
# includes=...,
# export_includes=...,
# use=...,
# ...

# Exported parameters will be propagated even if precompiled headers are disabled
)

bld(
target='test',
features='cxx cxxprogram',
source='a.cpp b.cpp d.cpp main.cpp',
use='precompiled-headers',
)

# or

bld(
target='test',
features='pch cxx cxxprogram',
source='a.cpp b.cpp d.cpp main.cpp',
headers='a.h b.h c.h',
)

Note that precompiled header must have multiple inclusion guards. If the guards are missing, any benefit of precompiled header will be voided and compilation may fail in some cases.
"""

import os
from waflib import Task, TaskGen, Utils
from waflib.Tools import c_preproc, cxx


PCH_COMPILER_OPTIONS = {
'clang++': [['-include'], '.pch', ['-x', 'c++-header']],
'g++': [['-include'], '.gch', ['-x', 'c++-header']],
}


def options(opt):
opt.add_option('--without-pch', action='store_false', default=True, dest='with_pch', help='''Try to use precompiled header to speed up compilation (only g++ and clang++)''')

def configure(conf):
if (conf.options.with_pch and conf.env['COMPILER_CXX'] in PCH_COMPILER_OPTIONS.keys()):
conf.env.WITH_PCH = True
flags = PCH_COMPILER_OPTIONS[conf.env['COMPILER_CXX']]
conf.env.CXXPCH_F = flags[0]
conf.env.CXXPCH_EXT = flags[1]
conf.env.CXXPCH_FLAGS = flags[2]


@TaskGen.feature('pch')
@TaskGen.before('process_source')
def apply_pch(self):
if not self.env.WITH_PCH:
return

if getattr(self.bld, 'pch_tasks', None) is None:
self.bld.pch_tasks = {}

if getattr(self, 'headers', None) is None:
return

self.headers = self.to_nodes(self.headers)

if getattr(self, 'name', None):
try:
task = self.bld.pch_tasks[self.name]
self.bld.fatal("Duplicated 'pch' task with name %r" % self.name)
except KeyError:
pass

out = '%s.%d%s' % (self.target, self.idx, self.env['CXXPCH_EXT'])
out = self.path.find_or_declare(out)
task = self.create_task('gchx', self.headers, out)

# target should be an absolute path of `out`, but without precompiled header extension
task.target = out.abspath()[:-len(out.suffix())]

self.pch_task = task
if getattr(self, 'name', None):
self.bld.pch_tasks[self.name] = task

@TaskGen.feature('cxx')
@TaskGen.after_method('process_source', 'propagate_uselib_vars')
def add_pch(self):
if not (self.env['WITH_PCH'] and getattr(self, 'use', None) and getattr(self, 'compiled_tasks', None) and getattr(self.bld, 'pch_tasks', None)):
return

pch = None
# find pch task, if any

if getattr(self, 'pch_task', None):
pch = self.pch_task
else:
for use in Utils.to_list(self.use):
try:
pch = self.bld.pch_tasks[use]
except KeyError:
pass

if pch:
for x in self.compiled_tasks:
x.env.append_value('CXXFLAGS', self.env['CXXPCH_F'] + [pch.target])

class gchx(Task.Task):
run_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CPPFLAGS} ${CXXPCH_FLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXXPCH_F:SRC} ${CXX_SRC_F}${SRC[0].abspath()} ${CXX_TGT_F}${TGT[0].abspath()}'
scan = c_preproc.scan
color = 'BLUE'
ext_out=['.h']

def runnable_status(self):
try:
node_deps = self.generator.bld.node_deps[self.uid()]
except KeyError:
node_deps = []
ret = Task.Task.runnable_status(self)
if ret == Task.SKIP_ME and self.env.CXX_NAME == 'clang':
t = os.stat(self.outputs[0].abspath()).st_mtime
for n in self.inputs + node_deps:
if os.stat(n.abspath()).st_mtime > t:
return Task.RUN_ME
return ret

+ 0
- 106
waflib/extras/pep8.py View File

@@ -1,106 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
#
# written by Sylvain Rouquette, 2011

'''
Install pep8 module:
$ easy_install pep8
or
$ pip install pep8

To add the boost tool to the waf file:
$ ./waf-light --tools=compat15,pep8
or, if you have waf >= 1.6.2
$ ./waf update --files=pep8


Then add this to your wscript:

[at]extension('.py', 'wscript')
def run_pep8(self, node):
self.create_task('Pep8', node)

'''

import threading
from waflib import TaskGen, Task, Options

pep8 = __import__('pep8')


class Pep8(Task.Task):
color = 'PINK'
lock = threading.Lock()

def check_options(self):
if pep8.options:
return
pep8.options = Options.options
pep8.options.prog = 'pep8'
excl = pep8.options.exclude.split(',')
pep8.options.exclude = [s.rstrip('/') for s in excl]
if pep8.options.filename:
pep8.options.filename = pep8.options.filename.split(',')
if pep8.options.select:
pep8.options.select = pep8.options.select.split(',')
else:
pep8.options.select = []
if pep8.options.ignore:
pep8.options.ignore = pep8.options.ignore.split(',')
elif pep8.options.select:
# Ignore all checks which are not explicitly selected
pep8.options.ignore = ['']
elif pep8.options.testsuite or pep8.options.doctest:
# For doctest and testsuite, all checks are required
pep8.options.ignore = []
else:
# The default choice: ignore controversial checks
pep8.options.ignore = pep8.DEFAULT_IGNORE.split(',')
pep8.options.physical_checks = pep8.find_checks('physical_line')
pep8.options.logical_checks = pep8.find_checks('logical_line')
pep8.options.counters = dict.fromkeys(pep8.BENCHMARK_KEYS, 0)
pep8.options.messages = {}

def run(self):
with Pep8.lock:
self.check_options()
pep8.input_file(self.inputs[0].abspath())
return 0 if not pep8.get_count() else -1


def options(opt):
opt.add_option('-q', '--quiet', default=0, action='count',
help="report only file names, or nothing with -qq")
opt.add_option('-r', '--repeat', action='store_true',
help="show all occurrences of the same error")
opt.add_option('--exclude', metavar='patterns',
default=pep8.DEFAULT_EXCLUDE,
help="exclude files or directories which match these "
"comma separated patterns (default: %s)" %
pep8.DEFAULT_EXCLUDE,
dest='exclude')
opt.add_option('--filename', metavar='patterns', default='*.py',
help="when parsing directories, only check filenames "
"matching these comma separated patterns (default: "
"*.py)")
opt.add_option('--select', metavar='errors', default='',
help="select errors and warnings (e.g. E,W6)")
opt.add_option('--ignore', metavar='errors', default='',
help="skip errors and warnings (e.g. E4,W)")
opt.add_option('--show-source', action='store_true',
help="show source code for each error")
opt.add_option('--show-pep8', action='store_true',
help="show text of PEP 8 for each error")
opt.add_option('--statistics', action='store_true',
help="count errors and warnings")
opt.add_option('--count', action='store_true',
help="print total number of errors and warnings "
"to standard error and set exit code to 1 if "
"total is not null")
opt.add_option('--benchmark', action='store_true',
help="measure processing speed")
opt.add_option('--testsuite', metavar='dir',
help="run regression tests from dir")
opt.add_option('--doctest', action='store_true',
help="run doctest on myself")

+ 0
- 68
waflib/extras/pgicc.py View File

@@ -1,68 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Antoine Dechaume 2011

"""
Detect the PGI C compiler
"""

import sys, re
from waflib.Configure import conf
from waflib.Tools.compiler_c import c_compiler
c_compiler['linux'].append('pgicc')

@conf
def find_pgi_compiler(conf, var, name):
"""
Find the program name, and execute it to ensure it really is itself.
"""
if sys.platform == 'cygwin':
conf.fatal('The PGI compiler does not work on Cygwin')

v = conf.env
cc = None
if v[var]: cc = v[var]
elif var in conf.environ: cc = conf.environ[var]
if not cc: cc = conf.find_program(name, var=var)
if not cc: conf.fatal('PGI Compiler (%s) was not found' % name)

v[var + '_VERSION'] = conf.get_pgi_version(cc)
v[var] = cc
v[var + '_NAME'] = 'pgi'

@conf
def get_pgi_version(conf, cc):
"""Find the version of a pgi compiler."""
version_re = re.compile(r"The Portland Group", re.I).search
cmd = cc + ['-V', '-E'] # Issue 1078, prevent wrappers from linking

try:
out, err = conf.cmd_and_log(cmd, output=0)
except Exception:
conf.fatal('Could not find pgi compiler %r' % cmd)

if out: match = version_re(out)
else: match = version_re(err)

if not match:
conf.fatal('Could not verify PGI signature')

cmd = cc + ['-help=variable']
try:
out, err = conf.cmd_and_log(cmd, output=0)
except Exception:
conf.fatal('Could not find pgi compiler %r' % cmd)

version = re.findall('^COMPVER\s*=(.*)', out, re.M)
if len(version) != 1:
conf.fatal('Could not determine the compiler version')
return version[0]

def configure(conf):
conf.find_pgi_compiler('CC', 'pgcc')
conf.find_ar()
conf.gcc_common_flags()
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()


+ 0
- 22
waflib/extras/pgicxx.py View File

@@ -1,22 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Antoine Dechaume 2011

"""
Detect the PGI C++ compiler
"""

import sys, re
from waflib.Configure import conf
from waflib.Tools.compiler_cxx import cxx_compiler
cxx_compiler['linux'].append('pgicxx')

from waflib.extras import pgicc

def configure(conf):
conf.find_pgi_compiler('CXX', 'pgCC')
conf.find_ar()
conf.gxx_common_flags()
conf.cxx_load_tools()
conf.cxx_add_flags()
conf.link_add_flags()

+ 0
- 402
waflib/extras/prefork.py View File

@@ -1,402 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2015 (ita)

"""
Execute commands through pre-forked servers. This tool creates as many servers as build threads.
On a benchmark executed on Linux Kubuntu 14, 8 virtual cores and SSD drive::

./genbench.py /tmp/build 200 100 15 5
waf clean build -j24
# no prefork: 2m7.179s
# prefork: 0m55.400s

To use::

def options(opt):
# optional, will spawn 40 servers early
opt.load('prefork')

def build(bld):
bld.load('prefork')
...
more code

The servers and the build process are using a shared nonce to prevent undesirable external connections.
"""

import os, re, socket, threading, sys, subprocess, time, atexit, traceback, random, signal
try:
import SocketServer
except ImportError:
import socketserver as SocketServer
try:
from queue import Queue
except ImportError:
from Queue import Queue
try:
import cPickle
except ImportError:
import pickle as cPickle

SHARED_KEY = None
HEADER_SIZE = 64

REQ = 'REQ'
RES = 'RES'
BYE = 'BYE'

def make_header(params, cookie=''):
header = ','.join(params)
header = header.ljust(HEADER_SIZE - len(cookie))
assert(len(header) == HEADER_SIZE - len(cookie))
header = header + cookie
if sys.hexversion > 0x3000000:
header = header.encode('iso8859-1')
return header

def safe_compare(x, y):
sum = 0
for (a, b) in zip(x, y):
sum |= ord(a) ^ ord(b)
return sum == 0

re_valid_query = re.compile('^[a-zA-Z0-9_, ]+$')
class req(SocketServer.StreamRequestHandler):
def handle(self):
try:
while self.process_command():
pass
except KeyboardInterrupt:
return
except Exception as e:
print(e)

def send_response(self, ret, out, err, exc):
if out or err or exc:
data = (out, err, exc)
data = cPickle.dumps(data, -1)
else:
data = ''

params = [RES, str(ret), str(len(data))]

# no need for the cookie in the response
self.wfile.write(make_header(params))
if data:
self.wfile.write(data)
self.wfile.flush()

def process_command(self):
query = self.rfile.read(HEADER_SIZE)
if not query:
return None
#print(len(query))
assert(len(query) == HEADER_SIZE)
if sys.hexversion > 0x3000000:
query = query.decode('iso8859-1')

# magic cookie
key = query[-20:]
if not safe_compare(key, SHARED_KEY):
print('%r %r' % (key, SHARED_KEY))
self.send_response(-1, '', '', 'Invalid key given!')
return 'meh'

query = query[:-20]
#print "%r" % query
if not re_valid_query.match(query):
self.send_response(-1, '', '', 'Invalid query %r' % query)
raise ValueError('Invalid query %r' % query)

query = query.strip().split(',')

if query[0] == REQ:
self.run_command(query[1:])
elif query[0] == BYE:
raise ValueError('Exit')
else:
raise ValueError('Invalid query %r' % query)
return 'ok'

def run_command(self, query):

size = int(query[0])
data = self.rfile.read(size)
assert(len(data) == size)
kw = cPickle.loads(data)

# run command
ret = out = err = exc = None
cmd = kw['cmd']
del kw['cmd']
#print(cmd)

try:
if kw['stdout'] or kw['stderr']:
p = subprocess.Popen(cmd, **kw)
(out, err) = p.communicate()
ret = p.returncode
else:
ret = subprocess.Popen(cmd, **kw).wait()
except KeyboardInterrupt:
raise
except Exception as e:
ret = -1
exc = str(e) + traceback.format_exc()

self.send_response(ret, out, err, exc)

def create_server(conn, cls):
# child processes do not need the key, so we remove it from the OS environment
global SHARED_KEY
SHARED_KEY = os.environ['SHARED_KEY']
os.environ['SHARED_KEY'] = ''

ppid = int(os.environ['PREFORKPID'])
def reap():
if os.sep != '/':
os.waitpid(ppid, 0)
else:
while 1:
try:
os.kill(ppid, 0)
except OSError:
break
else:
time.sleep(1)
os.kill(os.getpid(), signal.SIGKILL)
t = threading.Thread(target=reap)
t.setDaemon(True)
t.start()

server = SocketServer.TCPServer(conn, req)
print(server.server_address[1])
sys.stdout.flush()
#server.timeout = 6000 # seconds
server.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
try:
server.serve_forever(poll_interval=0.001)
except KeyboardInterrupt:
pass

if __name__ == '__main__':
conn = ("127.0.0.1", 0)
#print("listening - %r %r\n" % conn)
create_server(conn, req)
else:

from waflib import Logs, Utils, Runner, Errors, Options

def init_task_pool(self):
# lazy creation, and set a common pool for all task consumers
pool = self.pool = []
for i in range(self.numjobs):
consumer = Runner.get_pool()
pool.append(consumer)
consumer.idx = i
self.ready = Queue(0)
def setq(consumer):
consumer.ready = self.ready
try:
threading.current_thread().idx = consumer.idx
except Exception as e:
print(e)
for x in pool:
x.ready.put(setq)
return pool
Runner.Parallel.init_task_pool = init_task_pool

def make_server(bld, idx):
cmd = [sys.executable, os.path.abspath(__file__)]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
return proc

def make_conn(bld, srv):
port = srv.port
conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
conn.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
conn.connect(('127.0.0.1', port))
return conn


SERVERS = []
CONNS = []
def close_all():
global SERVERS, CONNS
while CONNS:
conn = CONNS.pop()
try:
conn.close()
except:
pass
while SERVERS:
srv = SERVERS.pop()
try:
srv.kill()
except:
pass
atexit.register(close_all)

def put_data(conn, data):
cnt = 0
while cnt < len(data):
sent = conn.send(data[cnt:])
if sent == 0:
raise RuntimeError('connection ended')
cnt += sent

def read_data(conn, siz):
cnt = 0
buf = []
while cnt < siz:
data = conn.recv(min(siz - cnt, 1024))
if not data:
raise RuntimeError('connection ended %r %r' % (cnt, siz))
buf.append(data)
cnt += len(data)
if sys.hexversion > 0x3000000:
ret = ''.encode('iso8859-1').join(buf)
else:
ret = ''.join(buf)
return ret

def exec_command(self, cmd, **kw):
if 'stdout' in kw:
if kw['stdout'] not in (None, subprocess.PIPE):
return self.exec_command_old(cmd, **kw)
elif 'stderr' in kw:
if kw['stderr'] not in (None, subprocess.PIPE):
return self.exec_command_old(cmd, **kw)

kw['shell'] = isinstance(cmd, str)
Logs.debug('runner: %r' % cmd)
Logs.debug('runner_env: kw=%s' % kw)

if self.logger:
self.logger.info(cmd)

if 'stdout' not in kw:
kw['stdout'] = subprocess.PIPE
if 'stderr' not in kw:
kw['stderr'] = subprocess.PIPE

if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
raise Errors.WafError("Program %s not found!" % cmd[0])

idx = threading.current_thread().idx
kw['cmd'] = cmd

# serialization..
#print("sub %r %r" % (idx, cmd))
#print("write to %r %r" % (idx, cmd))

data = cPickle.dumps(kw, -1)
params = [REQ, str(len(data))]
header = make_header(params, self.SHARED_KEY)

conn = CONNS[idx]

put_data(conn, header + data)
#put_data(conn, data)

#print("running %r %r" % (idx, cmd))
#print("read from %r %r" % (idx, cmd))

data = read_data(conn, HEADER_SIZE)
if sys.hexversion > 0x3000000:
data = data.decode('iso8859-1')

#print("received %r" % data)
lst = data.split(',')
ret = int(lst[1])
dlen = int(lst[2])

out = err = None
if dlen:
data = read_data(conn, dlen)
(out, err, exc) = cPickle.loads(data)
if exc:
raise Errors.WafError('Execution failure: %s' % exc)

if out:
if not isinstance(out, str):
out = out.decode(sys.stdout.encoding or 'iso8859-1')
if self.logger:
self.logger.debug('out: %s' % out)
else:
Logs.info(out, extra={'stream':sys.stdout, 'c1': ''})
if err:
if not isinstance(err, str):
err = err.decode(sys.stdout.encoding or 'iso8859-1')
if self.logger:
self.logger.error('err: %s' % err)
else:
Logs.info(err, extra={'stream':sys.stderr, 'c1': ''})

return ret

def init_key(ctx):
try:
key = ctx.SHARED_KEY = os.environ['SHARED_KEY']
except KeyError:
key = "".join([chr(random.SystemRandom().randint(40, 126)) for x in range(20)])
os.environ['SHARED_KEY'] = ctx.SHARED_KEY = key

os.environ['PREFORKPID'] = str(os.getpid())
return key

def init_servers(ctx, maxval):
while len(SERVERS) < maxval:
i = len(SERVERS)
srv = make_server(ctx, i)
SERVERS.append(srv)
while len(CONNS) < maxval:
i = len(CONNS)
srv = SERVERS[i]

# postpone the connection
srv.port = int(srv.stdout.readline())

conn = None
for x in range(30):
try:
conn = make_conn(ctx, srv)
break
except socket.error:
time.sleep(0.01)
if not conn:
raise ValueError('Could not start the server!')
if srv.poll() is not None:
Logs.warn('Looks like it it not our server process - concurrent builds are unsupported at this stage')
raise ValueError('Could not start the server')
CONNS.append(conn)

def init_smp(self):
if not getattr(Options.options, 'smp', getattr(self, 'smp', None)):
return
if Utils.unversioned_sys_platform() in ('freebsd',):
pid = os.getpid()
cmd = ['cpuset', '-l', '0', '-p', str(pid)]
elif Utils.unversioned_sys_platform() in ('linux',):
pid = os.getpid()
cmd = ['taskset', '-pc', '0', str(pid)]
if cmd:
self.cmd_and_log(cmd, quiet=0)

def options(opt):
init_key(opt)
init_servers(opt, 40)
opt.add_option('--pin-process', action='store_true', dest='smp', default=False)

def build(bld):
if bld.cmd == 'clean':
return

init_key(bld)
init_servers(bld, bld.jobs)
init_smp(bld)

bld.__class__.exec_command_old = bld.__class__.exec_command
bld.__class__.exec_command = exec_command


+ 0
- 241
waflib/extras/preforkjava.py View File

@@ -1,241 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2015 (ita)

# TODO: have the child process terminate if the parent is killed abruptly

import os, re, socket, threading, sys, subprocess, time, atexit, traceback, random
try:
import SocketServer
except ImportError:
import socketserver as SocketServer
try:
from queue import Queue
except ImportError:
from Queue import Queue

import json as pickle

SHARED_KEY = None
HEADER_SIZE = 64

REQ = 'REQ'
RES = 'RES'
BYE = 'BYE'

def make_header(params, cookie=''):
header = ','.join(params)
header = header.ljust(HEADER_SIZE - len(cookie))
assert(len(header) == HEADER_SIZE - len(cookie))
header = header + cookie
if sys.hexversion > 0x3000000:
header = header.encode('iso8859-1')
return header

if 1:
from waflib import Logs, Utils, Runner, Errors, Options

def init_task_pool(self):
# lazy creation, and set a common pool for all task consumers
pool = self.pool = []
for i in range(self.numjobs):
consumer = Runner.get_pool()
pool.append(consumer)
consumer.idx = i
self.ready = Queue(0)
def setq(consumer):
consumer.ready = self.ready
try:
threading.current_thread().idx = consumer.idx
except Exception as e:
print(e)
for x in pool:
x.ready.put(setq)
return pool
Runner.Parallel.init_task_pool = init_task_pool

def make_server(bld, idx):
top = getattr(bld, 'preforkjava_top', os.path.dirname(os.path.abspath('__file__')))
cp = getattr(bld, 'preforkjava_cp', os.path.join(top, 'minimal-json-0.9.3-SNAPSHOT.jar') + os.pathsep + top)

for x in cp.split(os.pathsep):
if x and not os.path.exists(x):
Logs.warn('Invalid classpath: %r' % cp)
Logs.warn('Set for example bld.preforkjava_cp to /path/to/minimal-json:/path/to/Prefork.class/')

cwd = getattr(bld, 'preforkjava_cwd', top)
port = getattr(bld, 'preforkjava_port', 51200)
cmd = getattr(bld, 'preforkjava_cmd', 'java -cp %s%s Prefork %d' % (cp, os.pathsep, port))
proc = subprocess.Popen(cmd.split(), shell=False, cwd=cwd)
proc.port = port
return proc

def make_conn(bld, srv):
#port = PORT + idx
port = srv.port
conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
conn.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
conn.connect(('127.0.0.1', port))
return conn

SERVERS = []
CONNS = []
def close_all():
global SERVERS
while SERVERS:
srv = SERVERS.pop()
pid = srv.pid
try:
srv.kill()
except Exception as e:
pass
atexit.register(close_all)

def put_data(conn, data):
cnt = 0
while cnt < len(data):
sent = conn.send(data[cnt:])
if sent == 0:
raise RuntimeError('connection ended')
cnt += sent

def read_data(conn, siz):
cnt = 0
buf = []
while cnt < siz:
data = conn.recv(min(siz - cnt, 1024))
if not data:
raise RuntimeError('connection ended %r %r' % (cnt, siz))
buf.append(data)
cnt += len(data)
if sys.hexversion > 0x3000000:
ret = ''.encode('iso8859-1').join(buf)
else:
ret = ''.join(buf)
return ret

def exec_command(self, cmd, **kw):
if 'stdout' in kw:
if kw['stdout'] not in (None, subprocess.PIPE):
return self.exec_command_old(cmd, **kw)
elif 'stderr' in kw:
if kw['stderr'] not in (None, subprocess.PIPE):
return self.exec_command_old(cmd, **kw)

kw['shell'] = isinstance(cmd, str)
Logs.debug('runner: %r' % cmd)
Logs.debug('runner_env: kw=%s' % kw)

if self.logger:
self.logger.info(cmd)

if 'stdout' not in kw:
kw['stdout'] = subprocess.PIPE
if 'stderr' not in kw:
kw['stderr'] = subprocess.PIPE

if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
raise Errors.WafError("Program %s not found!" % cmd[0])

idx = threading.current_thread().idx
kw['cmd'] = cmd

data = pickle.dumps(kw)
params = [REQ, str(len(data))]
header = make_header(params, self.SHARED_KEY)

conn = CONNS[idx]

if sys.hexversion > 0x3000000:
data = data.encode('iso8859-1')
put_data(conn, header + data)

data = read_data(conn, HEADER_SIZE)
if sys.hexversion > 0x3000000:
data = data.decode('iso8859-1')

#print("received %r" % data)
lst = data.split(',')
ret = int(lst[1])
dlen = int(lst[2])

out = err = None
if dlen:
data = read_data(conn, dlen)
(out, err, exc) = pickle.loads(data)
if exc:
raise Errors.WafError('Execution failure: %s' % exc)

if out:
if not isinstance(out, str):
out = out.decode(sys.stdout.encoding or 'iso8859-1')
if self.logger:
self.logger.debug('out: %s' % out)
else:
Logs.info(out, extra={'stream':sys.stdout, 'c1': ''})
if err:
if not isinstance(err, str):
err = err.decode(sys.stdout.encoding or 'iso8859-1')
if self.logger:
self.logger.error('err: %s' % err)
else:
Logs.info(err, extra={'stream':sys.stderr, 'c1': ''})

return ret

def init_key(ctx):
try:
key = ctx.SHARED_KEY = os.environ['SHARED_KEY']
except KeyError:
key = "".join([chr(random.SystemRandom().randint(40, 126)) for x in range(20)])
os.environ['SHARED_KEY'] = ctx.SHARED_KEY = key
os.environ['PREFORKPID'] = str(os.getpid())
return key

def init_servers(ctx, maxval):
while len(SERVERS) < 1:
i = len(SERVERS)
srv = make_server(ctx, i)
SERVERS.append(srv)
while len(CONNS) < maxval:
i = len(CONNS)
srv = SERVERS[0]
conn = None
for x in range(30):
try:
conn = make_conn(ctx, srv)
break
except socket.error:
time.sleep(0.01)
if not conn:
raise ValueError('Could not start the server!')
CONNS.append(conn)

def init_smp(self):
if not getattr(Options.options, 'smp', getattr(self, 'smp', None)):
return
if Utils.unversioned_sys_platform() in ('freebsd',):
pid = os.getpid()
cmd = ['cpuset', '-l', '0', '-p', str(pid)]
elif Utils.unversioned_sys_platform() in ('linux',):
pid = os.getpid()
cmd = ['taskset', '-pc', '0', str(pid)]
if cmd:
self.cmd_and_log(cmd, quiet=0)

def options(opt):
opt.add_option('--pin-process', action='store_true', dest='smp', default=False)
init_key(opt)
init_servers(opt, 40)

def build(bld):
if bld.cmd == 'clean':
return

init_key(bld)
init_servers(bld, bld.jobs)
init_smp(bld)

bld.__class__.exec_command_old = bld.__class__.exec_command
bld.__class__.exec_command = exec_command


+ 0
- 318
waflib/extras/preforkunix.py View File

@@ -1,318 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2015 (ita)

"""
A version of prefork.py that uses unix sockets. The advantage is that it does not expose
connections to the outside. Yet performance it only works on unix-like systems
and performance can be slightly worse.

To use::

def options(opt):
# recommended, fork new processes before using more memory
opt.load('preforkunix')

def build(bld):
bld.load('preforkunix')
...
more code
"""

import os, re, socket, threading, sys, subprocess, atexit, traceback, signal, time
try:
from queue import Queue
except ImportError:
from Queue import Queue
try:
import cPickle
except ImportError:
import pickle as cPickle

HEADER_SIZE = 20

REQ = 'REQ'
RES = 'RES'
BYE = 'BYE'

def make_header(params, cookie=''):
header = ','.join(params)
header = header.ljust(HEADER_SIZE - len(cookie))
assert(len(header) == HEADER_SIZE - len(cookie))
header = header + cookie
if sys.hexversion > 0x3000000:
header = header.encode('iso8859-1')
return header

re_valid_query = re.compile('^[a-zA-Z0-9_, ]+$')
if 1:
def send_response(conn, ret, out, err, exc):
if out or err or exc:
data = (out, err, exc)
data = cPickle.dumps(data, -1)
else:
data = ''

params = [RES, str(ret), str(len(data))]

# no need for the cookie in the response
conn.send(make_header(params))
if data:
conn.send(data)

def process_command(conn):
query = conn.recv(HEADER_SIZE)
if not query:
return None
#print(len(query))
assert(len(query) == HEADER_SIZE)
if sys.hexversion > 0x3000000:
query = query.decode('iso8859-1')

#print "%r" % query
if not re_valid_query.match(query):
send_response(conn, -1, '', '', 'Invalid query %r' % query)
raise ValueError('Invalid query %r' % query)

query = query.strip().split(',')

if query[0] == REQ:
run_command(conn, query[1:])
elif query[0] == BYE:
raise ValueError('Exit')
else:
raise ValueError('Invalid query %r' % query)
return 'ok'

def run_command(conn, query):

size = int(query[0])
data = conn.recv(size)
assert(len(data) == size)
kw = cPickle.loads(data)

# run command
ret = out = err = exc = None
cmd = kw['cmd']
del kw['cmd']
#print(cmd)

try:
if kw['stdout'] or kw['stderr']:
p = subprocess.Popen(cmd, **kw)
(out, err) = p.communicate()
ret = p.returncode
else:
ret = subprocess.Popen(cmd, **kw).wait()
except KeyboardInterrupt:
raise
except Exception as e:
ret = -1
exc = str(e) + traceback.format_exc()

send_response(conn, ret, out, err, exc)

if 1:

from waflib import Logs, Utils, Runner, Errors, Options

def init_task_pool(self):
# lazy creation, and set a common pool for all task consumers
pool = self.pool = []
for i in range(self.numjobs):
consumer = Runner.get_pool()
pool.append(consumer)
consumer.idx = i
self.ready = Queue(0)
def setq(consumer):
consumer.ready = self.ready
try:
threading.current_thread().idx = consumer.idx
except Exception as e:
print(e)
for x in pool:
x.ready.put(setq)
return pool
Runner.Parallel.init_task_pool = init_task_pool

def make_conn(bld):
child_socket, parent_socket = socket.socketpair(socket.AF_UNIX)
ppid = os.getpid()
pid = os.fork()
if pid == 0:
parent_socket.close()

# if the parent crashes, try to exit cleanly
def reap():
while 1:
try:
os.kill(ppid, 0)
except OSError:
break
else:
time.sleep(1)
os.kill(os.getpid(), signal.SIGKILL)
t = threading.Thread(target=reap)
t.setDaemon(True)
t.start()

# write to child_socket only
try:
while process_command(child_socket):
pass
except KeyboardInterrupt:
sys.exit(2)
else:
child_socket.close()
return (pid, parent_socket)

SERVERS = []
CONNS = []
def close_all():
global SERVERS, CONS
while CONNS:
conn = CONNS.pop()
try:
conn.close()
except:
pass
while SERVERS:
pid = SERVERS.pop()
try:
os.kill(pid, 9)
except:
pass
atexit.register(close_all)

def put_data(conn, data):
cnt = 0
while cnt < len(data):
sent = conn.send(data[cnt:])
if sent == 0:
raise RuntimeError('connection ended')
cnt += sent

def read_data(conn, siz):
cnt = 0
buf = []
while cnt < siz:
data = conn.recv(min(siz - cnt, 1024))
if not data:
raise RuntimeError('connection ended %r %r' % (cnt, siz))
buf.append(data)
cnt += len(data)
if sys.hexversion > 0x3000000:
ret = ''.encode('iso8859-1').join(buf)
else:
ret = ''.join(buf)
return ret

def exec_command(self, cmd, **kw):
if 'stdout' in kw:
if kw['stdout'] not in (None, subprocess.PIPE):
return self.exec_command_old(cmd, **kw)
elif 'stderr' in kw:
if kw['stderr'] not in (None, subprocess.PIPE):
return self.exec_command_old(cmd, **kw)

kw['shell'] = isinstance(cmd, str)
Logs.debug('runner: %r' % cmd)
Logs.debug('runner_env: kw=%s' % kw)

if self.logger:
self.logger.info(cmd)

if 'stdout' not in kw:
kw['stdout'] = subprocess.PIPE
if 'stderr' not in kw:
kw['stderr'] = subprocess.PIPE

if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
raise Errors.WafError("Program %s not found!" % cmd[0])

idx = threading.current_thread().idx
kw['cmd'] = cmd

# serialization..
#print("sub %r %r" % (idx, cmd))
#print("write to %r %r" % (idx, cmd))

data = cPickle.dumps(kw, -1)
params = [REQ, str(len(data))]
header = make_header(params)

conn = CONNS[idx]

put_data(conn, header + data)

#print("running %r %r" % (idx, cmd))
#print("read from %r %r" % (idx, cmd))

data = read_data(conn, HEADER_SIZE)
if sys.hexversion > 0x3000000:
data = data.decode('iso8859-1')

#print("received %r" % data)
lst = data.split(',')
ret = int(lst[1])
dlen = int(lst[2])

out = err = None
if dlen:
data = read_data(conn, dlen)
(out, err, exc) = cPickle.loads(data)
if exc:
raise Errors.WafError('Execution failure: %s' % exc)

if out:
if not isinstance(out, str):
out = out.decode(sys.stdout.encoding or 'iso8859-1')
if self.logger:
self.logger.debug('out: %s' % out)
else:
Logs.info(out, extra={'stream':sys.stdout, 'c1': ''})
if err:
if not isinstance(err, str):
err = err.decode(sys.stdout.encoding or 'iso8859-1')
if self.logger:
self.logger.error('err: %s' % err)
else:
Logs.info(err, extra={'stream':sys.stderr, 'c1': ''})

return ret

def init_smp(self):
if not getattr(Options.options, 'smp', getattr(self, 'smp', None)):
return
if Utils.unversioned_sys_platform() in ('freebsd',):
pid = os.getpid()
cmd = ['cpuset', '-l', '0', '-p', str(pid)]
elif Utils.unversioned_sys_platform() in ('linux',):
pid = os.getpid()
cmd = ['taskset', '-pc', '0', str(pid)]
if cmd:
self.cmd_and_log(cmd, quiet=0)

def options(opt):
# memory consumption might be at the lowest point while processing options
opt.add_option('--pin-process', action='store_true', dest='smp', default=False)
if Utils.is_win32 or os.sep != '/':
return
while len(CONNS) < 30:
(pid, conn) = make_conn(opt)
SERVERS.append(pid)
CONNS.append(conn)

def build(bld):
if Utils.is_win32 or os.sep != '/':
return
if bld.cmd == 'clean':
return
while len(CONNS) < bld.jobs:
(pid, conn) = make_conn(bld)
SERVERS.append(pid)
CONNS.append(conn)
init_smp(bld)
bld.__class__.exec_command_old = bld.__class__.exec_command
bld.__class__.exec_command = exec_command


+ 0
- 46
waflib/extras/print_commands.py View File

@@ -1,46 +0,0 @@
#! /usr/bin/env python

"""
Illustrate how to override a class method to do something

In this case, print the commands being executed as strings
(the commands are usually lists, so this can be misleading)
"""

import sys
from waflib import Context, Utils, Logs

def exec_command(self, cmd, **kw):
subprocess = Utils.subprocess
kw['shell'] = isinstance(cmd, str)

txt = cmd
if isinstance(cmd, list):
txt = ' '.join(cmd)

print(txt)
Logs.debug('runner_env: kw=%s' % kw)

try:
if self.logger:
# warning: may deadlock with a lot of output (subprocess limitation)

self.logger.info(cmd)

kw['stdout'] = kw['stderr'] = subprocess.PIPE
p = subprocess.Popen(cmd, **kw)
(out, err) = p.communicate()
if out:
self.logger.debug('out: %s' % out.decode(sys.stdout.encoding or 'iso8859-1'))
if err:
self.logger.error('err: %s' % err.decode(sys.stdout.encoding or 'iso8859-1'))
return p.returncode
else:
p = subprocess.Popen(cmd, **kw)
return p.wait()
except OSError:
return -1

Context.Context.exec_command = exec_command



+ 0
- 56
waflib/extras/proc.py View File

@@ -1,56 +0,0 @@
#! /usr/bin/env python
# per rosengren 2011

from os import environ, path
from waflib import TaskGen, Utils

def options(opt):
grp = opt.add_option_group('Oracle ProC Options')
grp.add_option('--oracle_home', action='store', default=environ.get('PROC_ORACLE'), help='Path to Oracle installation home (has bin/lib)')
grp.add_option('--tns_admin', action='store', default=environ.get('TNS_ADMIN'), help='Directory containing server list (TNS_NAMES.ORA)')
grp.add_option('--connection', action='store', default='dummy-user/dummy-password@dummy-server', help='Format: user/password@server')

def configure(cnf):
env = cnf.env
if not env.PROC_ORACLE:
env.PROC_ORACLE = cnf.options.oracle_home
if not env.PROC_TNS_ADMIN:
env.PROC_TNS_ADMIN = cnf.options.tns_admin
if not env.PROC_CONNECTION:
env.PROC_CONNECTION = cnf.options.connection
cnf.find_program('proc', var='PROC', path_list=env.PROC_ORACLE + path.sep + 'bin')

def proc(tsk):
env = tsk.env
gen = tsk.generator
bld = gen.bld
inc_nodes = gen.to_incnodes(Utils.to_list(getattr(gen,'includes',[])) + env['INCLUDES'])

# FIXME the if-else construct will not work in python 2
cmd = (
[env.PROC] +
['SQLCHECK=SEMANTICS'] +
(['SYS_INCLUDE=(' + ','.join(env.PROC_INCLUDES) + ')']
if env.PROC_INCLUDES else []) +
['INCLUDE=(' + ','.join(
[i.bldpath() for i in inc_nodes]
) + ')'] +
['userid=' + env.PROC_CONNECTION] +
['INAME=' + tsk.inputs[0].bldpath()] +
['ONAME=' + tsk.outputs[0].bldpath()]
)
exec_env = {
'ORACLE_HOME': env.PROC_ORACLE,
'LD_LIBRARY_PATH': env.PROC_ORACLE + path.sep + 'lib',
}
if env.PROC_TNS_ADMIN:
exec_env['TNS_ADMIN'] = env.PROC_TNS_ADMIN
return tsk.exec_command(cmd, env=exec_env)

TaskGen.declare_chain(
name = 'proc',
rule = proc,
ext_in = '.pc',
ext_out = '.c',
)


Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save