| @@ -47,6 +47,7 @@ WAFLIB_STRIP_EXTRAS=" | |||||
| boo | boo | ||||
| boost | boost | ||||
| c_dumbpreproc | c_dumbpreproc | ||||
| c_emscripten | |||||
| cabal | cabal | ||||
| cfg_altoptions | cfg_altoptions | ||||
| cfg_cross_gnu | cfg_cross_gnu | ||||
| @@ -32,7 +32,7 @@ POSSIBILITY OF SUCH DAMAGE. | |||||
| import os, sys, inspect | import os, sys, inspect | ||||
| VERSION="1.8.9" | |||||
| VERSION="1.8.14" | |||||
| REVISION="x" | REVISION="x" | ||||
| GIT="x" | GIT="x" | ||||
| INSTALL="x" | INSTALL="x" | ||||
| @@ -1015,6 +1015,7 @@ class InstallContext(BuildContext): | |||||
| if tsk.runnable_status() == Task.ASK_LATER: | if tsk.runnable_status() == Task.ASK_LATER: | ||||
| raise self.WafError('cannot post the task %r' % tsk) | raise self.WafError('cannot post the task %r' % tsk) | ||||
| tsk.run() | tsk.run() | ||||
| tsk.hasrun = True | |||||
| def install_files(self, dest, files, env=None, chmod=Utils.O644, relative_trick=False, cwd=None, add=True, postpone=True, task=None): | def install_files(self, dest, files, env=None, chmod=Utils.O644, relative_trick=False, cwd=None, add=True, postpone=True, task=None): | ||||
| """ | """ | ||||
| @@ -167,6 +167,7 @@ class ConfigSet(object): | |||||
| for x in keys: | for x in keys: | ||||
| tbl[x] = copy.deepcopy(tbl[x]) | tbl[x] = copy.deepcopy(tbl[x]) | ||||
| self.table = tbl | self.table = tbl | ||||
| return self | |||||
| def get_flat(self, key): | def get_flat(self, key): | ||||
| """ | """ | ||||
| @@ -192,11 +192,11 @@ class ConfigurationContext(Context.Context): | |||||
| env['files'] = self.files | env['files'] = self.files | ||||
| env['environ'] = dict(self.environ) | env['environ'] = dict(self.environ) | ||||
| if not self.env.NO_LOCK_IN_RUN: | |||||
| if not self.env.NO_LOCK_IN_RUN and not getattr(Options.options, 'no_lock_in_run'): | |||||
| env.store(os.path.join(Context.run_dir, Options.lockfile)) | env.store(os.path.join(Context.run_dir, Options.lockfile)) | ||||
| if not self.env.NO_LOCK_IN_TOP: | |||||
| if not self.env.NO_LOCK_IN_TOP and not getattr(Options.options, 'no_lock_in_top'): | |||||
| env.store(os.path.join(Context.top_dir, Options.lockfile)) | env.store(os.path.join(Context.top_dir, Options.lockfile)) | ||||
| if not self.env.NO_LOCK_IN_OUT: | |||||
| if not self.env.NO_LOCK_IN_OUT and not getattr(Options.options, 'no_lock_in_out'): | |||||
| env.store(os.path.join(Context.out_dir, Options.lockfile)) | env.store(os.path.join(Context.out_dir, Options.lockfile)) | ||||
| def prepare_env(self, env): | def prepare_env(self, env): | ||||
| @@ -234,7 +234,7 @@ class ConfigurationContext(Context.Context): | |||||
| tmpenv = self.all_envs[key] | tmpenv = self.all_envs[key] | ||||
| tmpenv.store(os.path.join(self.cachedir.abspath(), key + Build.CACHE_SUFFIX)) | tmpenv.store(os.path.join(self.cachedir.abspath(), key + Build.CACHE_SUFFIX)) | ||||
| def load(self, input, tooldir=None, funs=None): | |||||
| def load(self, input, tooldir=None, funs=None, with_sys_path=True): | |||||
| """ | """ | ||||
| Load Waf tools, which will be imported whenever a build is started. | Load Waf tools, which will be imported whenever a build is started. | ||||
| @@ -252,7 +252,7 @@ class ConfigurationContext(Context.Context): | |||||
| # avoid loading the same tool more than once with the same functions | # avoid loading the same tool more than once with the same functions | ||||
| # used by composite projects | # used by composite projects | ||||
| mag = (tool, id(self.env), funs) | |||||
| mag = (tool, id(self.env), tooldir, funs) | |||||
| if mag in self.tool_cache: | if mag in self.tool_cache: | ||||
| self.to_log('(tool %s is already loaded, skipping)' % tool) | self.to_log('(tool %s is already loaded, skipping)' % tool) | ||||
| continue | continue | ||||
| @@ -260,7 +260,7 @@ class ConfigurationContext(Context.Context): | |||||
| module = None | module = None | ||||
| try: | try: | ||||
| module = Context.load_tool(tool, tooldir, ctx=self) | |||||
| module = Context.load_tool(tool, tooldir, ctx=self, with_sys_path=with_sys_path) | |||||
| except ImportError as e: | except ImportError as e: | ||||
| self.fatal('Could not load the Waf tool %r from %r\n%s' % (tool, sys.path, e)) | self.fatal('Could not load the Waf tool %r from %r\n%s' % (tool, sys.path, e)) | ||||
| except Exception as e: | except Exception as e: | ||||
| @@ -352,7 +352,7 @@ def conf(f): | |||||
| return f | return f | ||||
| @conf | @conf | ||||
| def add_os_flags(self, var, dest=None): | |||||
| def add_os_flags(self, var, dest=None, dup=True): | |||||
| """ | """ | ||||
| Import operating system environment values into ``conf.env`` dict:: | Import operating system environment values into ``conf.env`` dict:: | ||||
| @@ -363,10 +363,16 @@ def add_os_flags(self, var, dest=None): | |||||
| :type var: string | :type var: string | ||||
| :param dest: destination variable, by default the same as var | :param dest: destination variable, by default the same as var | ||||
| :type dest: string | :type dest: string | ||||
| :param dup: add the same set of flags again | |||||
| :type dup: bool | |||||
| """ | """ | ||||
| # do not use 'get' to make certain the variable is not defined | |||||
| try: self.env.append_value(dest or var, shlex.split(self.environ[var])) | |||||
| except KeyError: pass | |||||
| try: | |||||
| flags = shlex.split(self.environ[var]) | |||||
| except KeyError: | |||||
| return | |||||
| # TODO: in waf 1.9, make dup=False the default | |||||
| if dup or ''.join(flags) not in ''.join(Utils.to_list(self.env[dest or var])): | |||||
| self.env.append_value(dest or var, flags) | |||||
| @conf | @conf | ||||
| def cmd_to_list(self, cmd): | def cmd_to_list(self, cmd): | ||||
| @@ -11,13 +11,13 @@ from waflib import Utils, Errors, Logs | |||||
| import waflib.Node | import waflib.Node | ||||
| # the following 3 constants are updated on each new release (do not touch) | # the following 3 constants are updated on each new release (do not touch) | ||||
| HEXVERSION=0x1080900 | |||||
| HEXVERSION=0x1080e00 | |||||
| """Constant updated on new releases""" | """Constant updated on new releases""" | ||||
| WAFVERSION="1.8.9" | |||||
| WAFVERSION="1.8.14" | |||||
| """Constant updated on new releases""" | """Constant updated on new releases""" | ||||
| WAFREVISION="06e49b2a82166aeb14dde8357c58387f252fc722" | |||||
| WAFREVISION="ce8234c396bb246a20ea9f51594ee051d5b378e7" | |||||
| """Git revision when the waf version is updated""" | """Git revision when the waf version is updated""" | ||||
| ABI = 98 | ABI = 98 | ||||
| @@ -55,7 +55,7 @@ waf_dir = '' | |||||
| local_repo = '' | local_repo = '' | ||||
| """Local repository containing additional Waf tools (plugins)""" | """Local repository containing additional Waf tools (plugins)""" | ||||
| remote_repo = 'http://waf.googlecode.com/git/' | |||||
| remote_repo = 'https://raw.githubusercontent.com/waf-project/waf/master/' | |||||
| """ | """ | ||||
| Remote directory containing downloadable waf tools. The missing tools can be downloaded by using:: | Remote directory containing downloadable waf tools. The missing tools can be downloaded by using:: | ||||
| @@ -210,9 +210,10 @@ class Context(ctx): | |||||
| """ | """ | ||||
| tools = Utils.to_list(tool_list) | tools = Utils.to_list(tool_list) | ||||
| path = Utils.to_list(kw.get('tooldir', '')) | path = Utils.to_list(kw.get('tooldir', '')) | ||||
| with_sys_path = kw.get('with_sys_path', True) | |||||
| for t in tools: | for t in tools: | ||||
| module = load_tool(t, path) | |||||
| module = load_tool(t, path, with_sys_path=with_sys_path) | |||||
| fun = getattr(module, kw.get('name', self.fun), None) | fun = getattr(module, kw.get('name', self.fun), None) | ||||
| if fun: | if fun: | ||||
| fun(self) | fun(self) | ||||
| @@ -321,11 +322,11 @@ class Context(ctx): | |||||
| unlike :py:meth:`waflib.Context.Context.cmd_and_log` | unlike :py:meth:`waflib.Context.Context.cmd_and_log` | ||||
| :param cmd: command argument for subprocess.Popen | :param cmd: command argument for subprocess.Popen | ||||
| :param kw: keyword arguments for subprocess.Popen | |||||
| :param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate. | |||||
| """ | """ | ||||
| subprocess = Utils.subprocess | subprocess = Utils.subprocess | ||||
| kw['shell'] = isinstance(cmd, str) | kw['shell'] = isinstance(cmd, str) | ||||
| Logs.debug('runner: %r' % cmd) | |||||
| Logs.debug('runner: %r' % (cmd,)) | |||||
| Logs.debug('runner_env: kw=%s' % kw) | Logs.debug('runner_env: kw=%s' % kw) | ||||
| if self.logger: | if self.logger: | ||||
| @@ -339,14 +340,25 @@ class Context(ctx): | |||||
| if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]): | if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]): | ||||
| raise Errors.WafError("Program %s not found!" % cmd[0]) | raise Errors.WafError("Program %s not found!" % cmd[0]) | ||||
| wargs = {} | |||||
| if 'timeout' in kw: | |||||
| if kw['timeout'] is not None: | |||||
| wargs['timeout'] = kw['timeout'] | |||||
| del kw['timeout'] | |||||
| if 'input' in kw: | |||||
| if kw['input']: | |||||
| wargs['input'] = kw['input'] | |||||
| kw['stdin'] = Utils.subprocess.PIPE | |||||
| del kw['input'] | |||||
| try: | try: | ||||
| if kw['stdout'] or kw['stderr']: | if kw['stdout'] or kw['stderr']: | ||||
| p = subprocess.Popen(cmd, **kw) | p = subprocess.Popen(cmd, **kw) | ||||
| (out, err) = p.communicate() | |||||
| (out, err) = p.communicate(**wargs) | |||||
| ret = p.returncode | ret = p.returncode | ||||
| else: | else: | ||||
| out, err = (None, None) | out, err = (None, None) | ||||
| ret = subprocess.Popen(cmd, **kw).wait() | |||||
| ret = subprocess.Popen(cmd, **kw).wait(**wargs) | |||||
| except Exception as e: | except Exception as e: | ||||
| raise Errors.WafError('Execution failure: %s' % str(e), ex=e) | raise Errors.WafError('Execution failure: %s' % str(e), ex=e) | ||||
| @@ -369,24 +381,25 @@ class Context(ctx): | |||||
| def cmd_and_log(self, cmd, **kw): | def cmd_and_log(self, cmd, **kw): | ||||
| """ | """ | ||||
| Execute a command and return stdout if the execution is successful. | |||||
| Execute a command and return stdout/stderr if the execution is successful. | |||||
| An exception is thrown when the exit status is non-0. In that case, both stderr and stdout | An exception is thrown when the exit status is non-0. In that case, both stderr and stdout | ||||
| will be bound to the WafError object:: | will be bound to the WafError object:: | ||||
| def configure(conf): | def configure(conf): | ||||
| out = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.STDOUT, quiet=waflib.Context.BOTH) | out = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.STDOUT, quiet=waflib.Context.BOTH) | ||||
| (out, err) = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.BOTH) | (out, err) = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.BOTH) | ||||
| (out, err) = conf.cmd_and_log(cmd, input='\\n'.encode(), output=waflib.Context.STDOUT) | |||||
| try: | try: | ||||
| conf.cmd_and_log(['which', 'someapp'], output=waflib.Context.BOTH) | conf.cmd_and_log(['which', 'someapp'], output=waflib.Context.BOTH) | ||||
| except Exception as e: | except Exception as e: | ||||
| print(e.stdout, e.stderr) | print(e.stdout, e.stderr) | ||||
| :param cmd: args for subprocess.Popen | :param cmd: args for subprocess.Popen | ||||
| :param kw: keyword arguments for subprocess.Popen | |||||
| :param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate. | |||||
| """ | """ | ||||
| subprocess = Utils.subprocess | subprocess = Utils.subprocess | ||||
| kw['shell'] = isinstance(cmd, str) | kw['shell'] = isinstance(cmd, str) | ||||
| Logs.debug('runner: %r' % cmd) | |||||
| Logs.debug('runner: %r' % (cmd,)) | |||||
| if 'quiet' in kw: | if 'quiet' in kw: | ||||
| quiet = kw['quiet'] | quiet = kw['quiet'] | ||||
| @@ -406,9 +419,21 @@ class Context(ctx): | |||||
| kw['stdout'] = kw['stderr'] = subprocess.PIPE | kw['stdout'] = kw['stderr'] = subprocess.PIPE | ||||
| if quiet is None: | if quiet is None: | ||||
| self.to_log(cmd) | self.to_log(cmd) | ||||
| wargs = {} | |||||
| if 'timeout' in kw: | |||||
| if kw['timeout'] is not None: | |||||
| wargs['timeout'] = kw['timeout'] | |||||
| del kw['timeout'] | |||||
| if 'input' in kw: | |||||
| if kw['input']: | |||||
| wargs['input'] = kw['input'] | |||||
| kw['stdin'] = Utils.subprocess.PIPE | |||||
| del kw['input'] | |||||
| try: | try: | ||||
| p = subprocess.Popen(cmd, **kw) | p = subprocess.Popen(cmd, **kw) | ||||
| (out, err) = p.communicate() | |||||
| (out, err) = p.communicate(**wargs) | |||||
| except Exception as e: | except Exception as e: | ||||
| raise Errors.WafError('Execution failure: %s' % str(e), ex=e) | raise Errors.WafError('Execution failure: %s' % str(e), ex=e) | ||||
| @@ -620,14 +645,14 @@ def load_module(path, encoding=None): | |||||
| module_dir = os.path.dirname(path) | module_dir = os.path.dirname(path) | ||||
| sys.path.insert(0, module_dir) | sys.path.insert(0, module_dir) | ||||
| exec(compile(code, path, 'exec'), module.__dict__) | |||||
| sys.path.remove(module_dir) | |||||
| try : exec(compile(code, path, 'exec'), module.__dict__) | |||||
| finally: sys.path.remove(module_dir) | |||||
| cache_modules[path] = module | cache_modules[path] = module | ||||
| return module | return module | ||||
| def load_tool(tool, tooldir=None, ctx=None): | |||||
| def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True): | |||||
| """ | """ | ||||
| Import a Waf tool (python module), and store it in the dict :py:const:`waflib.Context.Context.tools` | Import a Waf tool (python module), and store it in the dict :py:const:`waflib.Context.Context.tools` | ||||
| @@ -635,33 +660,44 @@ def load_tool(tool, tooldir=None, ctx=None): | |||||
| :param tool: Name of the tool | :param tool: Name of the tool | ||||
| :type tooldir: list | :type tooldir: list | ||||
| :param tooldir: List of directories to search for the tool module | :param tooldir: List of directories to search for the tool module | ||||
| :type with_sys_path: boolean | |||||
| :param with_sys_path: whether or not to search the regular sys.path, besides waf_dir and potentially given tooldirs | |||||
| """ | """ | ||||
| if tool == 'java': | if tool == 'java': | ||||
| tool = 'javaw' # jython | tool = 'javaw' # jython | ||||
| else: | else: | ||||
| tool = tool.replace('++', 'xx') | tool = tool.replace('++', 'xx') | ||||
| if tooldir: | |||||
| assert isinstance(tooldir, list) | |||||
| sys.path = tooldir + sys.path | |||||
| try: | |||||
| __import__(tool) | |||||
| origSysPath = sys.path | |||||
| if not with_sys_path: sys.path = [] | |||||
| try: | |||||
| if tooldir: | |||||
| assert isinstance(tooldir, list) | |||||
| sys.path = tooldir + sys.path | |||||
| try: | |||||
| __import__(tool) | |||||
| finally: | |||||
| for d in tooldir: | |||||
| sys.path.remove(d) | |||||
| ret = sys.modules[tool] | ret = sys.modules[tool] | ||||
| Context.tools[tool] = ret | Context.tools[tool] = ret | ||||
| return ret | return ret | ||||
| finally: | |||||
| for d in tooldir: | |||||
| sys.path.remove(d) | |||||
| else: | |||||
| for x in ('waflib.Tools.%s', 'waflib.extras.%s', 'waflib.%s', '%s'): | |||||
| else: | |||||
| if not with_sys_path: sys.path.insert(0, waf_dir) | |||||
| try: | try: | ||||
| __import__(x % tool) | |||||
| break | |||||
| except ImportError: | |||||
| x = None | |||||
| if x is None: # raise an exception | |||||
| __import__(tool) | |||||
| ret = sys.modules[x % tool] | |||||
| Context.tools[tool] = ret | |||||
| return ret | |||||
| for x in ('waflib.Tools.%s', 'waflib.extras.%s', 'waflib.%s', '%s'): | |||||
| try: | |||||
| __import__(x % tool) | |||||
| break | |||||
| except ImportError: | |||||
| x = None | |||||
| if x is None: # raise an exception | |||||
| __import__(tool) | |||||
| finally: | |||||
| if not with_sys_path: sys.path.remove(waf_dir) | |||||
| ret = sys.modules[x % tool] | |||||
| Context.tools[tool] = ret | |||||
| return ret | |||||
| finally: | |||||
| if not with_sys_path: sys.path += origSysPath | |||||
| @@ -20,8 +20,8 @@ if not os.environ.get('NOSYNC', False): | |||||
| # in case someone uses the root logger | # in case someone uses the root logger | ||||
| import logging | import logging | ||||
| LOG_FORMAT = "%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s" | |||||
| HOUR_FORMAT = "%H:%M:%S" | |||||
| LOG_FORMAT = os.environ.get('WAF_LOG_FORMAT', '%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s') | |||||
| HOUR_FORMAT = os.environ.get('WAF_HOUR_FORMAT', '%H:%M:%S') | |||||
| zones = '' | zones = '' | ||||
| verbose = 0 | verbose = 0 | ||||
| @@ -35,6 +35,7 @@ colors_lst = { | |||||
| 'PINK' :'\x1b[35m', | 'PINK' :'\x1b[35m', | ||||
| 'BLUE' :'\x1b[01;34m', | 'BLUE' :'\x1b[01;34m', | ||||
| 'CYAN' :'\x1b[36m', | 'CYAN' :'\x1b[36m', | ||||
| 'GREY' :'\x1b[37m', | |||||
| 'NORMAL':'\x1b[0m', | 'NORMAL':'\x1b[0m', | ||||
| 'cursor_on' :'\x1b[?25h', | 'cursor_on' :'\x1b[?25h', | ||||
| 'cursor_off' :'\x1b[?25l', | 'cursor_off' :'\x1b[?25l', | ||||
| @@ -126,6 +126,10 @@ class OptionsContext(Context.Context): | |||||
| gr.add_option('-o', '--out', action='store', default='', help='build dir for the project', dest='out') | gr.add_option('-o', '--out', action='store', default='', help='build dir for the project', dest='out') | ||||
| gr.add_option('-t', '--top', action='store', default='', help='src dir for the project', dest='top') | gr.add_option('-t', '--top', action='store', default='', help='src dir for the project', dest='top') | ||||
| gr.add_option('--no-lock-in-run', action='store_true', default='', help=optparse.SUPPRESS_HELP, dest='no_lock_in_run') | |||||
| gr.add_option('--no-lock-in-out', action='store_true', default='', help=optparse.SUPPRESS_HELP, dest='no_lock_in_out') | |||||
| gr.add_option('--no-lock-in-top', action='store_true', default='', help=optparse.SUPPRESS_HELP, dest='no_lock_in_top') | |||||
| default_prefix = getattr(Context.g_module, 'default_prefix', os.environ.get('PREFIX')) | default_prefix = getattr(Context.g_module, 'default_prefix', os.environ.get('PREFIX')) | ||||
| if not default_prefix: | if not default_prefix: | ||||
| if platform == 'win32': | if platform == 'win32': | ||||
| @@ -61,10 +61,17 @@ def waf_entry_point(current_directory, version, wafdir): | |||||
| no_climb = True | no_climb = True | ||||
| break | break | ||||
| # if --top is provided assume the build started in the top directory | |||||
| for x in sys.argv: | |||||
| if x.startswith('--top='): | |||||
| Context.run_dir = Context.top_dir = x[6:] | |||||
| if x.startswith('--out='): | |||||
| Context.out_dir = x[6:] | |||||
| # try to find a lock file (if the project was configured) | # try to find a lock file (if the project was configured) | ||||
| # at the same time, store the first wscript file seen | # at the same time, store the first wscript file seen | ||||
| cur = current_directory | cur = current_directory | ||||
| while cur: | |||||
| while cur and not Context.top_dir: | |||||
| lst = os.listdir(cur) | lst = os.listdir(cur) | ||||
| if Options.lockfile in lst: | if Options.lockfile in lst: | ||||
| env = ConfigSet.ConfigSet() | env = ConfigSet.ConfigSet() | ||||
| @@ -557,16 +564,26 @@ def distcheck(ctx): | |||||
| pass | pass | ||||
| def update(ctx): | def update(ctx): | ||||
| '''updates the plugins from the *waflib/extras* directory''' | |||||
| lst = Options.options.files.split(',') | |||||
| if not lst: | |||||
| lst = [x for x in Utils.listdir(Context.waf_dir + '/waflib/extras') if x.endswith('.py')] | |||||
| lst = Options.options.files | |||||
| if lst: | |||||
| lst = lst.split(',') | |||||
| else: | |||||
| path = os.path.join(Context.waf_dir, 'waflib', 'extras') | |||||
| lst = [x for x in Utils.listdir(path) if x.endswith('.py')] | |||||
| for x in lst: | for x in lst: | ||||
| tool = x.replace('.py', '') | tool = x.replace('.py', '') | ||||
| if not tool: | |||||
| continue | |||||
| try: | |||||
| dl = Configure.download_tool | |||||
| except AttributeError: | |||||
| ctx.fatal('The command "update" is dangerous; include the tool "use_config" in your project!') | |||||
| try: | try: | ||||
| Configure.download_tool(tool, force=True, ctx=ctx) | |||||
| dl(tool, force=True, ctx=ctx) | |||||
| except Errors.WafError: | except Errors.WafError: | ||||
| Logs.error('Could not find the tool %s in the remote repository' % x) | |||||
| Logs.error('Could not find the tool %r in the remote repository' % x) | |||||
| else: | |||||
| Logs.warn('Updated %r' % tool) | |||||
| def autoconfigure(execute_method): | def autoconfigure(execute_method): | ||||
| """ | """ | ||||
| @@ -64,7 +64,7 @@ def f(tsk): | |||||
| ''' | ''' | ||||
| classes = {} | classes = {} | ||||
| "class tasks created by user scripts or Waf tools are kept in this dict name -> class object" | |||||
| "Class tasks created by user scripts or Waf tools (maps names to class objects). Task classes defined in Waf tools are registered here through the metaclass :py:class:`waflib.Task.store_task_type`." | |||||
| class store_task_type(type): | class store_task_type(type): | ||||
| """ | """ | ||||
| @@ -118,6 +118,8 @@ class TaskBase(evil): | |||||
| #. runnable_status: ask the task if it should be run, skipped, or if we have to ask later | #. runnable_status: ask the task if it should be run, skipped, or if we have to ask later | ||||
| #. run: let threads execute the task | #. run: let threads execute the task | ||||
| #. post_run: let threads update the data regarding the task (cache) | #. post_run: let threads update the data regarding the task (cache) | ||||
| .. warning:: For backward compatibility reasons, the suffix "_task" is truncated in derived class names. This limitation will be removed in Waf 1.9. | |||||
| """ | """ | ||||
| color = 'GREEN' | color = 'GREEN' | ||||
| @@ -402,6 +404,8 @@ class Task(TaskBase): | |||||
| uses a hash value (from :py:class:`waflib.Task.Task.signature`) which is persistent from build to build. When the value changes, | uses a hash value (from :py:class:`waflib.Task.Task.signature`) which is persistent from build to build. When the value changes, | ||||
| the task has to be executed. The method :py:class:`waflib.Task.Task.post_run` will assign the task signature to the output | the task has to be executed. The method :py:class:`waflib.Task.Task.post_run` will assign the task signature to the output | ||||
| nodes (if present). | nodes (if present). | ||||
| .. warning:: For backward compatibility reasons, the suffix "_task" is truncated in derived class names. This limitation will be removed in Waf 1.9. | |||||
| """ | """ | ||||
| vars = [] | vars = [] | ||||
| """Variables to depend on (class attribute used for :py:meth:`waflib.Task.Task.sig_vars`)""" | """Variables to depend on (class attribute used for :py:meth:`waflib.Task.Task.sig_vars`)""" | ||||
| @@ -593,6 +593,12 @@ def process_rule(self): | |||||
| if getattr(self, 'cache_rule', 'True'): | if getattr(self, 'cache_rule', 'True'): | ||||
| cache[(name, self.rule)] = cls | cache[(name, self.rule)] = cls | ||||
| if getattr(self, 'cls_str', None): | |||||
| setattr(cls, '__str__', self.cls_str) | |||||
| if getattr(self, 'cls_keyword', None): | |||||
| setattr(cls, 'keyword', self.cls_keyword) | |||||
| # now create one instance | # now create one instance | ||||
| tsk = self.create_task(name) | tsk = self.create_task(name) | ||||
| @@ -1,11 +1,11 @@ | |||||
| #!/usr/bin/env python | #!/usr/bin/env python | ||||
| # encoding: utf-8 | # encoding: utf-8 | ||||
| # Thomas Nagy, 2005-2010 (ita) | |||||
| # Thomas Nagy, 2005-2015 (ita) | |||||
| "base for all c/c++ programs and libraries" | "base for all c/c++ programs and libraries" | ||||
| import os, sys, re | import os, sys, re | ||||
| from waflib import Utils, Build | |||||
| from waflib import Utils, Build, Errors | |||||
| from waflib.Configure import conf | from waflib.Configure import conf | ||||
| def get_extensions(lst): | def get_extensions(lst): | ||||
| @@ -44,26 +44,34 @@ def sniff_features(**kw): | |||||
| feats = [] | feats = [] | ||||
| # watch the order, cxx will have the precedence | # watch the order, cxx will have the precedence | ||||
| if 'cxx' in exts or 'cpp' in exts or 'c++' in exts or 'cc' in exts or 'C' in exts: | |||||
| feats.append('cxx') | |||||
| for x in 'cxx cpp c++ cc C'.split(): | |||||
| if x in exts: | |||||
| feats.append('cxx') | |||||
| break | |||||
| if 'c' in exts or 'vala' in exts: | if 'c' in exts or 'vala' in exts: | ||||
| feats.append('c') | feats.append('c') | ||||
| for x in 'f f90 F F90 for FOR'.split(): | |||||
| if x in exts: | |||||
| feats.append('fc') | |||||
| break | |||||
| if 'd' in exts: | if 'd' in exts: | ||||
| feats.append('d') | feats.append('d') | ||||
| if 'java' in exts: | if 'java' in exts: | ||||
| feats.append('java') | feats.append('java') | ||||
| if 'java' in exts: | |||||
| return 'java' | return 'java' | ||||
| if type in ('program', 'shlib', 'stlib'): | if type in ('program', 'shlib', 'stlib'): | ||||
| will_link = False | |||||
| for x in feats: | for x in feats: | ||||
| if x in ('cxx', 'd', 'c'): | |||||
| if x in ('cxx', 'd', 'fc', 'c'): | |||||
| feats.append(x + type) | feats.append(x + type) | ||||
| will_link = True | |||||
| if not will_link and not kw.get('features', []): | |||||
| raise Errors.WafError('Cannot link from %r, try passing eg: features="cprogram"?' % kw) | |||||
| return feats | return feats | ||||
| def set_features(kw, _type): | def set_features(kw, _type): | ||||
| @@ -25,10 +25,10 @@ cfg_ver = { | |||||
| SNIP_FUNCTION = ''' | SNIP_FUNCTION = ''' | ||||
| int main(int argc, char **argv) { | int main(int argc, char **argv) { | ||||
| void *p; | |||||
| void (*p)(); | |||||
| (void)argc; (void)argv; | (void)argc; (void)argv; | ||||
| p=(void*)(%s); | |||||
| return (int)p; | |||||
| p=(void(*)())(%s); | |||||
| return !p; | |||||
| } | } | ||||
| ''' | ''' | ||||
| """Code template for checking for functions""" | """Code template for checking for functions""" | ||||
| @@ -383,7 +383,9 @@ def check_cfg(self, *k, **kw): | |||||
| conf.check_cfg(path='sdl-config', args='--cflags --libs', package='', uselib_store='SDL') | conf.check_cfg(path='sdl-config', args='--cflags --libs', package='', uselib_store='SDL') | ||||
| conf.check_cfg(path='mpicc', args='--showme:compile --showme:link', | conf.check_cfg(path='mpicc', args='--showme:compile --showme:link', | ||||
| package='', uselib_store='OPEN_MPI', mandatory=False) | package='', uselib_store='OPEN_MPI', mandatory=False) | ||||
| # variables | |||||
| conf.check_cfg(package='gtk+-2.0', variables=['includedir', 'prefix'], uselib_store='FOO') | |||||
| print(conf.env.FOO_includedir) | |||||
| """ | """ | ||||
| if k: | if k: | ||||
| lst = k[0].split() | lst = k[0].split() | ||||
| @@ -587,6 +589,7 @@ def validate_c(self, kw): | |||||
| kw['execute'] = False | kw['execute'] = False | ||||
| if kw['execute']: | if kw['execute']: | ||||
| kw['features'].append('test_exec') | kw['features'].append('test_exec') | ||||
| kw['chmod'] = 493 | |||||
| if not 'errmsg' in kw: | if not 'errmsg' in kw: | ||||
| kw['errmsg'] = 'not found' | kw['errmsg'] = 'not found' | ||||
| @@ -661,6 +664,16 @@ def check(self, *k, **kw): | |||||
| Perform a configuration test by calling :py:func:`waflib.Configure.run_build`. | Perform a configuration test by calling :py:func:`waflib.Configure.run_build`. | ||||
| For the complete list of parameters, see :py:func:`waflib.Tools.c_config.validate_c`. | For the complete list of parameters, see :py:func:`waflib.Tools.c_config.validate_c`. | ||||
| To force a specific compiler, pass "compiler='c'" or "compiler='cxx'" in the arguments | To force a specific compiler, pass "compiler='c'" or "compiler='cxx'" in the arguments | ||||
| Besides build targets, complete builds can be given though a build function. All files will | |||||
| be written to a temporary directory:: | |||||
| def build(bld): | |||||
| lib_node = bld.srcnode.make_node('libdir/liblc1.c') | |||||
| lib_node.parent.mkdir() | |||||
| lib_node.write('#include <stdio.h>\\nint lib_func(void) { FILE *f = fopen("foo", "r");}\\n', 'w') | |||||
| bld(features='c cshlib', source=[lib_node], linkflags=conf.env.EXTRA_LDFLAGS, target='liblc') | |||||
| conf.check(build_fun=build, msg=msg) | |||||
| """ | """ | ||||
| self.validate_c(kw) | self.validate_c(kw) | ||||
| self.start_msg(kw['msg'], **kw) | self.start_msg(kw['msg'], **kw) | ||||
| @@ -855,7 +868,10 @@ def write_config_header(self, configfile='', guard='', top=False, defines=True, | |||||
| cnf.define('A', 1) | cnf.define('A', 1) | ||||
| cnf.write_config_header('config.h') | cnf.write_config_header('config.h') | ||||
| :param configfile: relative path to the file to create | |||||
| This function only adds include guards (if necessary), consult | |||||
| :py:func:`waflib.Tools.c_config.get_config_header` for details on the body. | |||||
| :param configfile: path to the file to create (relative or absolute) | |||||
| :type configfile: string | :type configfile: string | ||||
| :param guard: include guard name to add, by default it is computed from the file name | :param guard: include guard name to add, by default it is computed from the file name | ||||
| :type guard: string | :type guard: string | ||||
| @@ -884,7 +900,7 @@ def write_config_header(self, configfile='', guard='', top=False, defines=True, | |||||
| node.write('\n'.join(lst)) | node.write('\n'.join(lst)) | ||||
| # config files are not removed on "waf clean" | |||||
| # config files must not be removed on "waf clean" | |||||
| self.env.append_unique(Build.CFG_FILES, [node.abspath()]) | self.env.append_unique(Build.CFG_FILES, [node.abspath()]) | ||||
| if remove: | if remove: | ||||
| @@ -898,9 +914,16 @@ def get_config_header(self, defines=True, headers=False, define_prefix=''): | |||||
| Create the contents of a ``config.h`` file from the defines and includes | Create the contents of a ``config.h`` file from the defines and includes | ||||
| set in conf.env.define_key / conf.env.include_key. No include guards are added. | set in conf.env.define_key / conf.env.include_key. No include guards are added. | ||||
| A prelude will be added from the variable env.WAF_CONFIG_H_PRELUDE if provided. This | |||||
| can be used to insert complex macros or include guards:: | |||||
| def configure(conf): | |||||
| conf.env.WAF_CONFIG_H_PRELUDE = '#include <unistd.h>\\n' | |||||
| conf.write_config_header('config.h') | |||||
| :param defines: write the defines values | :param defines: write the defines values | ||||
| :type defines: bool | :type defines: bool | ||||
| :param headers: write the headers | |||||
| :param headers: write include entries for each element in self.env.INCKEYS | |||||
| :type headers: bool | :type headers: bool | ||||
| :type define_prefix: string | :type define_prefix: string | ||||
| :param define_prefix: prefix all the defines with a particular prefix | :param define_prefix: prefix all the defines with a particular prefix | ||||
| @@ -908,6 +931,10 @@ def get_config_header(self, defines=True, headers=False, define_prefix=''): | |||||
| :rtype: string | :rtype: string | ||||
| """ | """ | ||||
| lst = [] | lst = [] | ||||
| if self.env.WAF_CONFIG_H_PRELUDE: | |||||
| lst.append(self.env.WAF_CONFIG_H_PRELUDE) | |||||
| if headers: | if headers: | ||||
| for x in self.env[INCKEYS]: | for x in self.env[INCKEYS]: | ||||
| lst.append('#include <%s>' % x) | lst.append('#include <%s>' % x) | ||||
| @@ -931,24 +958,24 @@ def cc_add_flags(conf): | |||||
| """ | """ | ||||
| Add CFLAGS / CPPFLAGS from os.environ to conf.env | Add CFLAGS / CPPFLAGS from os.environ to conf.env | ||||
| """ | """ | ||||
| conf.add_os_flags('CPPFLAGS', 'CFLAGS') | |||||
| conf.add_os_flags('CFLAGS') | |||||
| conf.add_os_flags('CPPFLAGS', dup=False) | |||||
| conf.add_os_flags('CFLAGS', dup=False) | |||||
| @conf | @conf | ||||
| def cxx_add_flags(conf): | def cxx_add_flags(conf): | ||||
| """ | """ | ||||
| Add CXXFLAGS / CPPFLAGS from os.environ to conf.env | Add CXXFLAGS / CPPFLAGS from os.environ to conf.env | ||||
| """ | """ | ||||
| conf.add_os_flags('CPPFLAGS', 'CXXFLAGS') | |||||
| conf.add_os_flags('CXXFLAGS') | |||||
| conf.add_os_flags('CPPFLAGS', dup=False) | |||||
| conf.add_os_flags('CXXFLAGS', dup=False) | |||||
| @conf | @conf | ||||
| def link_add_flags(conf): | def link_add_flags(conf): | ||||
| """ | """ | ||||
| Add LINKFLAGS / LDFLAGS from os.environ to conf.env | Add LINKFLAGS / LDFLAGS from os.environ to conf.env | ||||
| """ | """ | ||||
| conf.add_os_flags('LINKFLAGS') | |||||
| conf.add_os_flags('LDFLAGS') | |||||
| conf.add_os_flags('LINKFLAGS', dup=False) | |||||
| conf.add_os_flags('LDFLAGS', dup=False) | |||||
| @conf | @conf | ||||
| def cc_load_tools(conf): | def cc_load_tools(conf): | ||||
| @@ -978,15 +1005,10 @@ def get_cc_version(conf, cc, gcc=False, icc=False, clang=False): | |||||
| cmd = cc + ['-dM', '-E', '-'] | cmd = cc + ['-dM', '-E', '-'] | ||||
| env = conf.env.env or None | env = conf.env.env or None | ||||
| try: | try: | ||||
| p = Utils.subprocess.Popen(cmd, stdin=Utils.subprocess.PIPE, stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE, env=env) | |||||
| p.stdin.write('\n'.encode()) | |||||
| out = p.communicate()[0] | |||||
| out, err = conf.cmd_and_log(cmd, output=0, input='\n'.encode(), env=env) | |||||
| except Exception: | except Exception: | ||||
| conf.fatal('Could not determine the compiler version %r' % cmd) | conf.fatal('Could not determine the compiler version %r' % cmd) | ||||
| if not isinstance(out, str): | |||||
| out = out.decode(sys.stdout.encoding or 'iso8859-1') | |||||
| if gcc: | if gcc: | ||||
| if out.find('__INTEL_COMPILER') >= 0: | if out.find('__INTEL_COMPILER') >= 0: | ||||
| conf.fatal('The intel compiler pretends to be gcc') | conf.fatal('The intel compiler pretends to be gcc') | ||||
| @@ -999,7 +1021,7 @@ def get_cc_version(conf, cc, gcc=False, icc=False, clang=False): | |||||
| if clang and out.find('__clang__') < 0: | if clang and out.find('__clang__') < 0: | ||||
| conf.fatal('Not clang/clang++') | conf.fatal('Not clang/clang++') | ||||
| if not clang and out.find('__clang__') >= 0: | if not clang and out.find('__clang__') >= 0: | ||||
| conf.fatal('Could not find g++, if renamed try eg: CXX=g++48 waf configure') | |||||
| conf.fatal('Could not find gcc/g++ (only Clang), if renamed try eg: CC=gcc48 CXX=g++48 waf configure') | |||||
| k = {} | k = {} | ||||
| if icc or gcc or clang: | if icc or gcc or clang: | ||||
| @@ -1014,9 +1036,6 @@ def get_cc_version(conf, cc, gcc=False, icc=False, clang=False): | |||||
| def isD(var): | def isD(var): | ||||
| return var in k | return var in k | ||||
| def isT(var): | |||||
| return var in k and k[var] != '0' | |||||
| # Some documentation is available at http://predef.sourceforge.net | # Some documentation is available at http://predef.sourceforge.net | ||||
| # The names given to DEST_OS must match what Utils.unversioned_sys_platform() returns. | # The names given to DEST_OS must match what Utils.unversioned_sys_platform() returns. | ||||
| if not conf.env.DEST_OS: | if not conf.env.DEST_OS: | ||||
| @@ -1053,17 +1072,11 @@ def get_cc_version(conf, cc, gcc=False, icc=False, clang=False): | |||||
| ver = k['__INTEL_COMPILER'] | ver = k['__INTEL_COMPILER'] | ||||
| conf.env['CC_VERSION'] = (ver[:-2], ver[-2], ver[-1]) | conf.env['CC_VERSION'] = (ver[:-2], ver[-2], ver[-1]) | ||||
| else: | else: | ||||
| if isD('__clang__'): | |||||
| try: | |||||
| conf.env['CC_VERSION'] = (k['__clang_major__'], k['__clang_minor__'], k['__clang_patchlevel__']) | |||||
| except KeyError: | |||||
| # Some versions of OSX have a faux-gcc "clang" without clang version defines | |||||
| conf.env['CC_VERSION'] = (k['__GNUC__'], k['__GNUC_MINOR__'], k['__GNUC_PATCHLEVEL__']) | |||||
| if isD('__clang__') and isD('__clang_major__'): | |||||
| conf.env['CC_VERSION'] = (k['__clang_major__'], k['__clang_minor__'], k['__clang_patchlevel__']) | |||||
| else: | else: | ||||
| try: | |||||
| conf.env['CC_VERSION'] = (k['__GNUC__'], k['__GNUC_MINOR__'], k['__GNUC_PATCHLEVEL__']) | |||||
| except KeyError: | |||||
| conf.env['CC_VERSION'] = (k['__GNUC__'], k['__GNUC_MINOR__'], 0) | |||||
| # older clang versions and gcc | |||||
| conf.env['CC_VERSION'] = (k['__GNUC__'], k['__GNUC_MINOR__'], k.get('__GNUC_PATCHLEVEL__', '0')) | |||||
| return k | return k | ||||
| @conf | @conf | ||||
| @@ -1165,6 +1178,7 @@ def multicheck(self, *k, **kw): | |||||
| self.keep = False | self.keep = False | ||||
| self.returned_tasks = [] | self.returned_tasks = [] | ||||
| self.task_sigs = {} | self.task_sigs = {} | ||||
| self.progress_bar = 0 | |||||
| def total(self): | def total(self): | ||||
| return len(tasks) | return len(tasks) | ||||
| def to_log(self, *k, **kw): | def to_log(self, *k, **kw): | ||||
| @@ -1195,10 +1209,17 @@ def multicheck(self, *k, **kw): | |||||
| for x in tasks: | for x in tasks: | ||||
| x.logger.memhandler.flush() | x.logger.memhandler.flush() | ||||
| if p.error: | |||||
| for x in p.error: | |||||
| if getattr(x, 'err_msg', None): | |||||
| self.to_log(x.err_msg) | |||||
| self.end_msg('fail', color='RED') | |||||
| raise Errors.WafError('There is an error in the library, read config.log for more information') | |||||
| for x in tasks: | for x in tasks: | ||||
| if x.hasrun != Task.SUCCESS: | if x.hasrun != Task.SUCCESS: | ||||
| self.end_msg(kw.get('errmsg', 'no'), color='YELLOW', **kw) | self.end_msg(kw.get('errmsg', 'no'), color='YELLOW', **kw) | ||||
| self.fatal(kw.get('fatalmsg', None) or 'One of the tests has failed, see the config.log for more information') | |||||
| self.fatal(kw.get('fatalmsg', None) or 'One of the tests has failed, read config.log for more information') | |||||
| self.end_msg('ok', **kw) | self.end_msg('ok', **kw) | ||||
| @@ -24,7 +24,7 @@ app_info = ''' | |||||
| <key>NOTE</key> | <key>NOTE</key> | ||||
| <string>THIS IS A GENERATED FILE, DO NOT MODIFY</string> | <string>THIS IS A GENERATED FILE, DO NOT MODIFY</string> | ||||
| <key>CFBundleExecutable</key> | <key>CFBundleExecutable</key> | ||||
| <string>%s</string> | |||||
| <string>{app_name}</string> | |||||
| </dict> | </dict> | ||||
| </plist> | </plist> | ||||
| ''' | ''' | ||||
| @@ -71,7 +71,7 @@ def create_task_macapp(self): | |||||
| To compile an executable into a Mac application (a .app), set its *mac_app* attribute:: | To compile an executable into a Mac application (a .app), set its *mac_app* attribute:: | ||||
| def build(bld): | def build(bld): | ||||
| bld.shlib(source='a.c', target='foo', mac_app = True) | |||||
| bld.shlib(source='a.c', target='foo', mac_app=True) | |||||
| To force *all* executables to be transformed into Mac applications:: | To force *all* executables to be transformed into Mac applications:: | ||||
| @@ -91,7 +91,22 @@ def create_task_macapp(self): | |||||
| inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/MacOS/' % name | inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/MacOS/' % name | ||||
| self.bld.install_files(inst_to, n1, chmod=Utils.O755) | self.bld.install_files(inst_to, n1, chmod=Utils.O755) | ||||
| if getattr(self, 'mac_files', None): | |||||
| # this only accepts files; they will be installed as seen from mac_files_root | |||||
| mac_files_root = getattr(self, 'mac_files_root', None) | |||||
| if isinstance(mac_files_root, str): | |||||
| mac_files_root = self.path.find_node(mac_files_root) | |||||
| if not mac_files_root: | |||||
| self.bld.fatal('Invalid mac_files_root %r' % self.mac_files_root) | |||||
| res_dir = n1.parent.parent.make_node('Resources') | |||||
| inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Resources' % name | |||||
| for node in self.to_nodes(self.mac_files): | |||||
| relpath = node.path_from(mac_files_root or node.parent) | |||||
| tsk = self.create_task('macapp', node, res_dir.make_node(relpath)) | |||||
| self.bld.install_as(os.path.join(inst_to, relpath), node) | |||||
| if getattr(self, 'mac_resources', None): | if getattr(self, 'mac_resources', None): | ||||
| # TODO remove in waf 1.9 | |||||
| res_dir = n1.parent.parent.make_node('Resources') | res_dir = n1.parent.parent.make_node('Resources') | ||||
| inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Resources' % name | inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Resources' % name | ||||
| for x in self.to_list(self.mac_resources): | for x in self.to_list(self.mac_resources): | ||||
| @@ -127,6 +142,14 @@ def create_task_macplist(self): | |||||
| dir = self.create_bundle_dirs(name, out) | dir = self.create_bundle_dirs(name, out) | ||||
| n1 = dir.find_or_declare(['Contents', 'Info.plist']) | n1 = dir.find_or_declare(['Contents', 'Info.plist']) | ||||
| self.plisttask = plisttask = self.create_task('macplist', [], n1) | self.plisttask = plisttask = self.create_task('macplist', [], n1) | ||||
| plisttask.context = { | |||||
| 'app_name': self.link_task.outputs[0].name, | |||||
| 'env': self.env | |||||
| } | |||||
| plist_ctx = getattr(self, 'plist_context', None) | |||||
| if (plist_ctx): | |||||
| plisttask.context.update(plist_ctx) | |||||
| if getattr(self, 'mac_plist', False): | if getattr(self, 'mac_plist', False): | ||||
| node = self.path.find_resource(self.mac_plist) | node = self.path.find_resource(self.mac_plist) | ||||
| @@ -135,7 +158,7 @@ def create_task_macplist(self): | |||||
| else: | else: | ||||
| plisttask.code = self.mac_plist | plisttask.code = self.mac_plist | ||||
| else: | else: | ||||
| plisttask.code = app_info % self.link_task.outputs[0].name | |||||
| plisttask.code = app_info | |||||
| inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/' % name | inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/' % name | ||||
| self.bld.install_files(inst_to, n1) | self.bld.install_files(inst_to, n1) | ||||
| @@ -184,5 +207,6 @@ class macplist(Task.Task): | |||||
| txt = self.code | txt = self.code | ||||
| else: | else: | ||||
| txt = self.inputs[0].read() | txt = self.inputs[0].read() | ||||
| context = getattr(self, 'context', {}) | |||||
| txt = txt.format(**context) | |||||
| self.outputs[0].write(txt) | self.outputs[0].write(txt) | ||||
| @@ -489,10 +489,19 @@ def apply_vnum(self): | |||||
| def build(bld): | def build(bld): | ||||
| bld.shlib(source='a.c', target='foo', vnum='14.15.16') | bld.shlib(source='a.c', target='foo', vnum='14.15.16') | ||||
| In this example, ``libfoo.so`` is installed as ``libfoo.so.1.2.3``, and the following symbolic links are created: | |||||
| In this example on Linux platform, ``libfoo.so`` is installed as ``libfoo.so.14.15.16``, and the following symbolic links are created: | |||||
| * ``libfoo.so → libfoo.so.1.2.3`` | |||||
| * ``libfoo.so.1 → libfoo.so.1.2.3`` | |||||
| * ``libfoo.so → libfoo.so.14.15.16`` | |||||
| * ``libfoo.so.14 → libfoo.so.14.15.16`` | |||||
| By default, the library will be assigned SONAME ``libfoo.so.14``, effectively declaring ABI compatibility between all minor and patch releases for the major version of the library. When necessary, the compatibility can be explicitly defined using `cnum` parameter: | |||||
| def build(bld): | |||||
| bld.shlib(source='a.c', target='foo', vnum='14.15.16', cnum='14.15') | |||||
| In this case, the assigned SONAME will be ``libfoo.so.14.15`` with ABI compatibility only between path releases for a specific major and minor version of the library. | |||||
| On OS X platform, install-name parameter will follow the above logic for SONAME with exception that it also specifies an absolute path (based on install_path) of the library. | |||||
| """ | """ | ||||
| if not getattr(self, 'vnum', '') or os.name != 'posix' or self.env.DEST_BINFMT not in ('elf', 'mac-o'): | if not getattr(self, 'vnum', '') or os.name != 'posix' or self.env.DEST_BINFMT not in ('elf', 'mac-o'): | ||||
| return | return | ||||
| @@ -503,13 +512,18 @@ def apply_vnum(self): | |||||
| nums = self.vnum.split('.') | nums = self.vnum.split('.') | ||||
| node = link.outputs[0] | node = link.outputs[0] | ||||
| cnum = getattr(self, 'cnum', str(nums[0])) | |||||
| cnums = cnum.split('.') | |||||
| if len(cnums)>len(nums) or nums[0:len(cnums)] != cnums: | |||||
| raise Errors.WafError('invalid compatibility version %s' % cnum) | |||||
| libname = node.name | libname = node.name | ||||
| if libname.endswith('.dylib'): | if libname.endswith('.dylib'): | ||||
| name3 = libname.replace('.dylib', '.%s.dylib' % self.vnum) | name3 = libname.replace('.dylib', '.%s.dylib' % self.vnum) | ||||
| name2 = libname.replace('.dylib', '.%s.dylib' % nums[0]) | |||||
| name2 = libname.replace('.dylib', '.%s.dylib' % cnum) | |||||
| else: | else: | ||||
| name3 = libname + '.' + self.vnum | name3 = libname + '.' + self.vnum | ||||
| name2 = libname + '.' + nums[0] | |||||
| name2 = libname + '.' + cnum | |||||
| # add the so name for the ld linker - to disable, just unset env.SONAME_ST | # add the so name for the ld linker - to disable, just unset env.SONAME_ST | ||||
| if self.env.SONAME_ST: | if self.env.SONAME_ST: | ||||
| @@ -548,8 +562,10 @@ def apply_vnum(self): | |||||
| inst_to = self.link_task.__class__.inst_to | inst_to = self.link_task.__class__.inst_to | ||||
| if inst_to: | if inst_to: | ||||
| p = Utils.subst_vars(inst_to, self.env) | p = Utils.subst_vars(inst_to, self.env) | ||||
| path = os.path.join(p, self.link_task.outputs[0].name) | |||||
| path = os.path.join(p, name2) | |||||
| self.env.append_value('LINKFLAGS', ['-install_name', path]) | self.env.append_value('LINKFLAGS', ['-install_name', path]) | ||||
| self.env.append_value('LINKFLAGS', '-Wl,-compatibility_version,%s' % cnum) | |||||
| self.env.append_value('LINKFLAGS', '-Wl,-current_version,%s' % self.vnum) | |||||
| class vnum(Task.Task): | class vnum(Task.Task): | ||||
| """ | """ | ||||
| @@ -21,6 +21,7 @@ def find_clang(conf): | |||||
| def configure(conf): | def configure(conf): | ||||
| conf.find_clang() | conf.find_clang() | ||||
| conf.find_program(['llvm-ar', 'ar'], var='AR') | |||||
| conf.find_ar() | conf.find_ar() | ||||
| conf.gcc_common_flags() | conf.gcc_common_flags() | ||||
| conf.gcc_modifier_platform() | conf.gcc_modifier_platform() | ||||
| @@ -21,6 +21,7 @@ def find_clangxx(conf): | |||||
| def configure(conf): | def configure(conf): | ||||
| conf.find_clangxx() | conf.find_clangxx() | ||||
| conf.find_program(['llvm-ar', 'ar'], var='AR') | |||||
| conf.find_ar() | conf.find_ar() | ||||
| conf.gxx_common_flags() | conf.gxx_common_flags() | ||||
| conf.gxx_modifier_platform() | conf.gxx_modifier_platform() | ||||
| @@ -64,7 +64,7 @@ def check_same_targets(self): | |||||
| if not dupe: | if not dupe: | ||||
| for (k, v) in uids.items(): | for (k, v) in uids.items(): | ||||
| if len(v) > 1: | if len(v) > 1: | ||||
| Logs.error('* Several tasks use the same identifier. Please check the information on\n http://docs.waf.googlecode.com/git/apidocs_16/Task.html#waflib.Task.Task.uid') | |||||
| Logs.error('* Several tasks use the same identifier. Please check the information on\n https://waf.io/apidocs/Task.html?highlight=uid#waflib.Task.Task.uid') | |||||
| for tsk in v: | for tsk in v: | ||||
| Logs.error(' - object %r (%r) defined in %r' % (tsk.__class__.__name__, tsk, tsk.generator)) | Logs.error(' - object %r (%r) defined in %r' % (tsk.__class__.__name__, tsk, tsk.generator)) | ||||
| @@ -97,7 +97,7 @@ def gcc_modifier_darwin(conf): | |||||
| """Configuration flags for executing gcc on MacOS""" | """Configuration flags for executing gcc on MacOS""" | ||||
| v = conf.env | v = conf.env | ||||
| v['CFLAGS_cshlib'] = ['-fPIC'] | v['CFLAGS_cshlib'] = ['-fPIC'] | ||||
| v['LINKFLAGS_cshlib'] = ['-dynamiclib', '-Wl,-compatibility_version,1', '-Wl,-current_version,1'] | |||||
| v['LINKFLAGS_cshlib'] = ['-dynamiclib'] | |||||
| v['cshlib_PATTERN'] = 'lib%s.dylib' | v['cshlib_PATTERN'] = 'lib%s.dylib' | ||||
| v['FRAMEWORKPATH_ST'] = '-F%s' | v['FRAMEWORKPATH_ST'] = '-F%s' | ||||
| v['FRAMEWORK_ST'] = ['-framework'] | v['FRAMEWORK_ST'] = ['-framework'] | ||||
| @@ -97,7 +97,7 @@ def gxx_modifier_darwin(conf): | |||||
| """Configuration flags for executing g++ on MacOS""" | """Configuration flags for executing g++ on MacOS""" | ||||
| v = conf.env | v = conf.env | ||||
| v['CXXFLAGS_cxxshlib'] = ['-fPIC'] | v['CXXFLAGS_cxxshlib'] = ['-fPIC'] | ||||
| v['LINKFLAGS_cxxshlib'] = ['-dynamiclib', '-Wl,-compatibility_version,1', '-Wl,-current_version,1'] | |||||
| v['LINKFLAGS_cxxshlib'] = ['-dynamiclib'] | |||||
| v['cxxshlib_PATTERN'] = 'lib%s.dylib' | v['cxxshlib_PATTERN'] = 'lib%s.dylib' | ||||
| v['FRAMEWORKPATH_ST'] = '-F%s' | v['FRAMEWORKPATH_ST'] = '-F%s' | ||||
| v['FRAMEWORK_ST'] = ['-framework'] | v['FRAMEWORK_ST'] = ['-framework'] | ||||
| @@ -30,6 +30,10 @@ or:: | |||||
| Platforms and targets will be tested in the order they appear; | Platforms and targets will be tested in the order they appear; | ||||
| the first good configuration will be used. | the first good configuration will be used. | ||||
| To skip testing all the configurations that are not used, use the ``--msvc_lazy_autodetect`` option | |||||
| or set ``conf.env['MSVC_LAZY_AUTODETECT']=True``. | |||||
| Supported platforms: ia64, x64, x86, x86_amd64, x86_ia64, x86_arm, amd64_x86, amd64_arm | Supported platforms: ia64, x64, x86, x86_amd64, x86_ia64, x86_arm, amd64_x86, amd64_arm | ||||
| Compilers supported: | Compilers supported: | ||||
| @@ -90,8 +94,18 @@ all_icl_platforms = [ ('intel64', 'amd64'), ('em64t', 'amd64'), ('ia32', 'x86'), | |||||
| def options(opt): | def options(opt): | ||||
| opt.add_option('--msvc_version', type='string', help = 'msvc version, eg: "msvc 10.0,msvc 9.0"', default='') | opt.add_option('--msvc_version', type='string', help = 'msvc version, eg: "msvc 10.0,msvc 9.0"', default='') | ||||
| opt.add_option('--msvc_targets', type='string', help = 'msvc targets, eg: "x64,arm"', default='') | opt.add_option('--msvc_targets', type='string', help = 'msvc targets, eg: "x64,arm"', default='') | ||||
| opt.add_option('--msvc_lazy_autodetect', action='store_true', help = 'lazily check msvc target environments') | |||||
| def setup_msvc(conf, versions, arch = False): | def setup_msvc(conf, versions, arch = False): | ||||
| """ | |||||
| Checks installed compilers and targets and returns the first combination from the user's | |||||
| options, env, or the global supported lists that checks. | |||||
| :param versions: A list of tuples of all installed compilers and available targets. | |||||
| :param arch: Whether to return the target architecture. | |||||
| :return: the compiler, revision, path, include dirs, library paths, and (optionally) target architecture | |||||
| :rtype: tuple of strings | |||||
| """ | |||||
| platforms = getattr(Options.options, 'msvc_targets', '').split(',') | platforms = getattr(Options.options, 'msvc_targets', '').split(',') | ||||
| if platforms == ['']: | if platforms == ['']: | ||||
| platforms=Utils.to_list(conf.env['MSVC_TARGETS']) or [i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms] | platforms=Utils.to_list(conf.env['MSVC_TARGETS']) or [i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms] | ||||
| @@ -102,15 +116,20 @@ def setup_msvc(conf, versions, arch = False): | |||||
| for version in desired_versions: | for version in desired_versions: | ||||
| try: | try: | ||||
| targets = dict(versiondict [version]) | |||||
| targets = dict(versiondict[version]) | |||||
| for target in platforms: | for target in platforms: | ||||
| try: | try: | ||||
| arch,(p1,p2,p3) = targets[target] | |||||
| compiler,revision = version.rsplit(' ', 1) | |||||
| if arch: | |||||
| return compiler,revision,p1,p2,p3,arch | |||||
| try: | |||||
| realtarget,(p1,p2,p3) = targets[target] | |||||
| except conf.errors.ConfigurationError: | |||||
| # lazytup target evaluation errors | |||||
| del(targets[target]) | |||||
| else: | else: | ||||
| return compiler,revision,p1,p2,p3 | |||||
| compiler,revision = version.rsplit(' ', 1) | |||||
| if arch: | |||||
| return compiler,revision,p1,p2,p3,realtarget | |||||
| else: | |||||
| return compiler,revision,p1,p2,p3 | |||||
| except KeyError: continue | except KeyError: continue | ||||
| except KeyError: continue | except KeyError: continue | ||||
| conf.fatal('msvc: Impossible to find a valid architecture for building (in setup_msvc)') | conf.fatal('msvc: Impossible to find a valid architecture for building (in setup_msvc)') | ||||
| @@ -118,13 +137,14 @@ def setup_msvc(conf, versions, arch = False): | |||||
| @conf | @conf | ||||
| def get_msvc_version(conf, compiler, version, target, vcvars): | def get_msvc_version(conf, compiler, version, target, vcvars): | ||||
| """ | """ | ||||
| Create a bat file to obtain the location of the libraries | |||||
| :param compiler: ? | |||||
| :param version: ? | |||||
| :target: ? | |||||
| :vcvars: ? | |||||
| :return: the location of msvc, the location of include dirs, and the library paths | |||||
| Checks that an installed compiler actually runs and uses vcvars to obtain the | |||||
| environment needed by the compiler. | |||||
| :param compiler: compiler type, for looking up the executable name | |||||
| :param version: compiler version, for debugging only | |||||
| :param target: target architecture | |||||
| :param vcvars: batch file to run to check the environment | |||||
| :return: the location of the compiler executable, the location of include dirs, and the library paths | |||||
| :rtype: tuple of strings | :rtype: tuple of strings | ||||
| """ | """ | ||||
| debug('msvc: get_msvc_version: %r %r %r', compiler, version, target) | debug('msvc: get_msvc_version: %r %r %r', compiler, version, target) | ||||
| @@ -219,7 +239,7 @@ def gather_wsdk_versions(conf, versions): | |||||
| targets = [] | targets = [] | ||||
| for target,arch in all_msvc_platforms: | for target,arch in all_msvc_platforms: | ||||
| try: | try: | ||||
| targets.append((target, (arch, conf.get_msvc_version('wsdk', version, '/'+target, os.path.join(path, 'bin', 'SetEnv.cmd'))))) | |||||
| targets.append((target, (arch, get_compiler_env(conf, 'wsdk', version, '/'+target, os.path.join(path, 'bin', 'SetEnv.cmd'))))) | |||||
| except conf.errors.ConfigurationError: | except conf.errors.ConfigurationError: | ||||
| pass | pass | ||||
| versions.append(('wsdk ' + version[1:], targets)) | versions.append(('wsdk ' + version[1:], targets)) | ||||
| @@ -262,12 +282,12 @@ def gather_wince_supported_platforms(): | |||||
| path,device = os.path.split(path) | path,device = os.path.split(path) | ||||
| if not device: | if not device: | ||||
| path,device = os.path.split(path) | path,device = os.path.split(path) | ||||
| platforms = [] | |||||
| for arch,compiler in all_wince_platforms: | for arch,compiler in all_wince_platforms: | ||||
| platforms = [] | |||||
| if os.path.isdir(os.path.join(path, device, 'Lib', arch)): | if os.path.isdir(os.path.join(path, device, 'Lib', arch)): | ||||
| platforms.append((arch, compiler, os.path.join(path, device, 'Include', arch), os.path.join(path, device, 'Lib', arch))) | platforms.append((arch, compiler, os.path.join(path, device, 'Include', arch), os.path.join(path, device, 'Lib', arch))) | ||||
| if platforms: | |||||
| supported_wince_platforms.append((device, platforms)) | |||||
| if platforms: | |||||
| supported_wince_platforms.append((device, platforms)) | |||||
| return supported_wince_platforms | return supported_wince_platforms | ||||
| def gather_msvc_detected_versions(): | def gather_msvc_detected_versions(): | ||||
| @@ -304,6 +324,65 @@ def gather_msvc_detected_versions(): | |||||
| detected_versions.sort(key = fun) | detected_versions.sort(key = fun) | ||||
| return detected_versions | return detected_versions | ||||
| def get_compiler_env(conf, compiler, version, bat_target, bat, select=None): | |||||
| """ | |||||
| Gets the compiler environment variables as a tuple. Evaluation is eager by default. | |||||
| If set to lazy with ``--msvc_lazy_autodetect`` or ``env.MSVC_LAZY_AUTODETECT`` | |||||
| the environment is evaluated when the tuple is destructured or iterated. This means | |||||
| destructuring can throw :py:class:`conf.errors.ConfigurationError`. | |||||
| :param conf: configuration context to use to eventually get the version environment | |||||
| :param compiler: compiler name | |||||
| :param version: compiler version number | |||||
| :param bat: path to the batch file to run | |||||
| :param select: optional function to take the realized environment variables tup and map it (e.g. to combine other constant paths) | |||||
| """ | |||||
| lazy = getattr(Options.options, 'msvc_lazy_autodetect', False) or conf.env['MSVC_LAZY_AUTODETECT'] | |||||
| def msvc_thunk(): | |||||
| vs = conf.get_msvc_version(compiler, version, bat_target, bat) | |||||
| if select: | |||||
| return select(vs) | |||||
| else: | |||||
| return vs | |||||
| return lazytup(msvc_thunk, lazy, ([], [], [])) | |||||
| class lazytup(object): | |||||
| """ | |||||
| A tuple that evaluates its elements from a function when iterated or destructured. | |||||
| :param fn: thunk to evaluate the tuple on demand | |||||
| :param lazy: whether to delay evaluation or evaluate in the constructor | |||||
| :param default: optional default for :py:func:`repr` if it should not evaluate | |||||
| """ | |||||
| def __init__(self, fn, lazy=True, default=None): | |||||
| self.fn = fn | |||||
| self.default = default | |||||
| if not lazy: | |||||
| self.evaluate() | |||||
| def __len__(self): | |||||
| self.evaluate() | |||||
| return len(self.value) | |||||
| def __iter__(self): | |||||
| self.evaluate() | |||||
| for i, v in enumerate(self.value): | |||||
| yield v | |||||
| def __getitem__(self, i): | |||||
| self.evaluate() | |||||
| return self.value[i] | |||||
| def __repr__(self): | |||||
| if hasattr(self, 'value'): | |||||
| return repr(self.value) | |||||
| elif self.default: | |||||
| return repr(self.default) | |||||
| else: | |||||
| self.evaluate() | |||||
| return repr(self.value) | |||||
| def evaluate(self): | |||||
| if hasattr(self, 'value'): | |||||
| return | |||||
| self.value = self.fn() | |||||
| @conf | @conf | ||||
| def gather_msvc_targets(conf, versions, version, vc_path): | def gather_msvc_targets(conf, versions, version, vc_path): | ||||
| #Looking for normal MSVC compilers! | #Looking for normal MSVC compilers! | ||||
| @@ -311,17 +390,17 @@ def gather_msvc_targets(conf, versions, version, vc_path): | |||||
| if os.path.isfile(os.path.join(vc_path, 'vcvarsall.bat')): | if os.path.isfile(os.path.join(vc_path, 'vcvarsall.bat')): | ||||
| for target,realtarget in all_msvc_platforms[::-1]: | for target,realtarget in all_msvc_platforms[::-1]: | ||||
| try: | try: | ||||
| targets.append((target, (realtarget, conf.get_msvc_version('msvc', version, target, os.path.join(vc_path, 'vcvarsall.bat'))))) | |||||
| targets.append((target, (realtarget, get_compiler_env(conf, 'msvc', version, target, os.path.join(vc_path, 'vcvarsall.bat'))))) | |||||
| except conf.errors.ConfigurationError: | except conf.errors.ConfigurationError: | ||||
| pass | pass | ||||
| elif os.path.isfile(os.path.join(vc_path, 'Common7', 'Tools', 'vsvars32.bat')): | elif os.path.isfile(os.path.join(vc_path, 'Common7', 'Tools', 'vsvars32.bat')): | ||||
| try: | try: | ||||
| targets.append(('x86', ('x86', conf.get_msvc_version('msvc', version, 'x86', os.path.join(vc_path, 'Common7', 'Tools', 'vsvars32.bat'))))) | |||||
| targets.append(('x86', ('x86', get_compiler_env(conf, 'msvc', version, 'x86', os.path.join(vc_path, 'Common7', 'Tools', 'vsvars32.bat'))))) | |||||
| except conf.errors.ConfigurationError: | except conf.errors.ConfigurationError: | ||||
| pass | pass | ||||
| elif os.path.isfile(os.path.join(vc_path, 'Bin', 'vcvars32.bat')): | elif os.path.isfile(os.path.join(vc_path, 'Bin', 'vcvars32.bat')): | ||||
| try: | try: | ||||
| targets.append(('x86', ('x86', conf.get_msvc_version('msvc', version, '', os.path.join(vc_path, 'Bin', 'vcvars32.bat'))))) | |||||
| targets.append(('x86', ('x86', get_compiler_env(conf, 'msvc', version, '', os.path.join(vc_path, 'Bin', 'vcvars32.bat'))))) | |||||
| except conf.errors.ConfigurationError: | except conf.errors.ConfigurationError: | ||||
| pass | pass | ||||
| if targets: | if targets: | ||||
| @@ -336,15 +415,18 @@ def gather_wince_targets(conf, versions, version, vc_path, vsvars, supported_pla | |||||
| winCEpath = os.path.join(vc_path, 'ce') | winCEpath = os.path.join(vc_path, 'ce') | ||||
| if not os.path.isdir(winCEpath): | if not os.path.isdir(winCEpath): | ||||
| continue | continue | ||||
| try: | |||||
| common_bindirs,_1,_2 = conf.get_msvc_version('msvc', version, 'x86', vsvars) | |||||
| except conf.errors.ConfigurationError: | |||||
| continue | |||||
| if os.path.isdir(os.path.join(winCEpath, 'lib', platform)): | if os.path.isdir(os.path.join(winCEpath, 'lib', platform)): | ||||
| bindirs = [os.path.join(winCEpath, 'bin', compiler), os.path.join(winCEpath, 'bin', 'x86_'+compiler)] + common_bindirs | |||||
| bindirs = [os.path.join(winCEpath, 'bin', compiler), os.path.join(winCEpath, 'bin', 'x86_'+compiler)] | |||||
| incdirs = [os.path.join(winCEpath, 'include'), os.path.join(winCEpath, 'atlmfc', 'include'), include] | incdirs = [os.path.join(winCEpath, 'include'), os.path.join(winCEpath, 'atlmfc', 'include'), include] | ||||
| libdirs = [os.path.join(winCEpath, 'lib', platform), os.path.join(winCEpath, 'atlmfc', 'lib', platform), lib] | libdirs = [os.path.join(winCEpath, 'lib', platform), os.path.join(winCEpath, 'atlmfc', 'lib', platform), lib] | ||||
| cetargets.append((platform, (platform, (bindirs,incdirs,libdirs)))) | |||||
| def combine_common(compiler_env): | |||||
| (common_bindirs,_1,_2) = compiler_env | |||||
| return (bindirs + common_bindirs, incdirs, libdirs) | |||||
| try: | |||||
| cetargets.append((platform, (platform, get_compiler_env(conf, 'msvc', version, 'x86', vsvars, combine_common)))) | |||||
| except conf.errors.ConfigurationError: | |||||
| continue | |||||
| if cetargets: | if cetargets: | ||||
| versions.append((device + ' ' + version, cetargets)) | versions.append((device + ' ' + version, cetargets)) | ||||
| @@ -354,8 +436,8 @@ def gather_winphone_targets(conf, versions, version, vc_path, vsvars): | |||||
| targets = [] | targets = [] | ||||
| for target,realtarget in all_msvc_platforms[::-1]: | for target,realtarget in all_msvc_platforms[::-1]: | ||||
| try: | try: | ||||
| targets.append((target, (realtarget, conf.get_msvc_version('winphone', version, target, vsvars)))) | |||||
| except conf.errors.ConfigurationError as e: | |||||
| targets.append((target, (realtarget, get_compiler_env(conf, 'winphone', version, target, vsvars)))) | |||||
| except conf.errors.ConfigurationError: | |||||
| pass | pass | ||||
| if targets: | if targets: | ||||
| versions.append(('winphone '+ version, targets)) | versions.append(('winphone '+ version, targets)) | ||||
| @@ -382,9 +464,14 @@ def gather_msvc_versions(conf, versions): | |||||
| if wince_supported_platforms and os.path.isfile(vsvars): | if wince_supported_platforms and os.path.isfile(vsvars): | ||||
| conf.gather_wince_targets(versions, version, vc_path, vsvars, wince_supported_platforms) | conf.gather_wince_targets(versions, version, vc_path, vsvars, wince_supported_platforms) | ||||
| # WP80 works with 11.0Exp and 11.0, both of which resolve to the same vc_path. | |||||
| # Stop after one is found. | |||||
| for version,vc_path in vc_paths: | |||||
| vs_path = os.path.dirname(vc_path) | |||||
| vsvars = os.path.join(vs_path, 'VC', 'WPSDK', 'WP80', 'vcvarsphoneall.bat') | vsvars = os.path.join(vs_path, 'VC', 'WPSDK', 'WP80', 'vcvarsphoneall.bat') | ||||
| if os.path.isfile(vsvars): | if os.path.isfile(vsvars): | ||||
| conf.gather_winphone_targets(versions, '8.0', vc_path, vsvars) | conf.gather_winphone_targets(versions, '8.0', vc_path, vsvars) | ||||
| break | |||||
| for version,vc_path in vc_paths: | for version,vc_path in vc_paths: | ||||
| vs_path = os.path.dirname(vc_path) | vs_path = os.path.dirname(vc_path) | ||||
| @@ -426,7 +513,7 @@ def gather_icl_versions(conf, versions): | |||||
| batch_file=os.path.join(path,'bin','iclvars.bat') | batch_file=os.path.join(path,'bin','iclvars.bat') | ||||
| if os.path.isfile(batch_file): | if os.path.isfile(batch_file): | ||||
| try: | try: | ||||
| targets.append((target,(arch,conf.get_msvc_version('intel',version,target,batch_file)))) | |||||
| targets.append((target,(arch,get_compiler_env(conf,'intel',version,target,batch_file)))) | |||||
| except conf.errors.ConfigurationError: | except conf.errors.ConfigurationError: | ||||
| pass | pass | ||||
| except WindowsError: | except WindowsError: | ||||
| @@ -438,7 +525,7 @@ def gather_icl_versions(conf, versions): | |||||
| batch_file=os.path.join(path,'bin','iclvars.bat') | batch_file=os.path.join(path,'bin','iclvars.bat') | ||||
| if os.path.isfile(batch_file): | if os.path.isfile(batch_file): | ||||
| try: | try: | ||||
| targets.append((target, (arch, conf.get_msvc_version('intel', version, target, batch_file)))) | |||||
| targets.append((target, (arch, get_compiler_env(conf, 'intel', version, target, batch_file)))) | |||||
| except conf.errors.ConfigurationError: | except conf.errors.ConfigurationError: | ||||
| pass | pass | ||||
| except WindowsError: | except WindowsError: | ||||
| @@ -490,8 +577,8 @@ def gather_intel_composer_versions(conf, versions): | |||||
| batch_file=os.path.join(path,'bin','iclvars.bat') | batch_file=os.path.join(path,'bin','iclvars.bat') | ||||
| if os.path.isfile(batch_file): | if os.path.isfile(batch_file): | ||||
| try: | try: | ||||
| targets.append((target,(arch,conf.get_msvc_version('intel',version,target,batch_file)))) | |||||
| except conf.errors.ConfigurationError as e: | |||||
| targets.append((target,(arch,get_compiler_env(conf,'intel',version,target,batch_file)))) | |||||
| except conf.errors.ConfigurationError: | |||||
| pass | pass | ||||
| # The intel compilervar_arch.bat is broken when used with Visual Studio Express 2012 | # The intel compilervar_arch.bat is broken when used with Visual Studio Express 2012 | ||||
| # http://software.intel.com/en-us/forums/topic/328487 | # http://software.intel.com/en-us/forums/topic/328487 | ||||
| @@ -516,19 +603,36 @@ def gather_intel_composer_versions(conf, versions): | |||||
| versions.append(('intel ' + major, targets)) | versions.append(('intel ' + major, targets)) | ||||
| @conf | @conf | ||||
| def get_msvc_versions(conf): | |||||
| def get_msvc_versions(conf, eval_and_save=True): | |||||
| """ | """ | ||||
| :return: list of compilers installed | :return: list of compilers installed | ||||
| :rtype: list of string | :rtype: list of string | ||||
| """ | """ | ||||
| if not conf.env['MSVC_INSTALLED_VERSIONS']: | |||||
| lst = [] | |||||
| conf.gather_icl_versions(lst) | |||||
| conf.gather_intel_composer_versions(lst) | |||||
| conf.gather_wsdk_versions(lst) | |||||
| conf.gather_msvc_versions(lst) | |||||
| if conf.env['MSVC_INSTALLED_VERSIONS']: | |||||
| return conf.env['MSVC_INSTALLED_VERSIONS'] | |||||
| # Gather all the compiler versions and targets. This phase can be lazy | |||||
| # per lazy detection settings. | |||||
| lst = [] | |||||
| conf.gather_icl_versions(lst) | |||||
| conf.gather_intel_composer_versions(lst) | |||||
| conf.gather_wsdk_versions(lst) | |||||
| conf.gather_msvc_versions(lst) | |||||
| # Override lazy detection by evaluating after the fact. | |||||
| if eval_and_save: | |||||
| def checked_target(t): | |||||
| target,(arch,paths) = t | |||||
| try: | |||||
| paths.evaluate() | |||||
| except conf.errors.ConfigurationError: | |||||
| return None | |||||
| else: | |||||
| return t | |||||
| lst = [(version, list(filter(checked_target, targets))) for version, targets in lst] | |||||
| conf.env['MSVC_INSTALLED_VERSIONS'] = lst | conf.env['MSVC_INSTALLED_VERSIONS'] = lst | ||||
| return conf.env['MSVC_INSTALLED_VERSIONS'] | |||||
| return lst | |||||
| @conf | @conf | ||||
| def print_all_msvc_detected(conf): | def print_all_msvc_detected(conf): | ||||
| @@ -542,7 +646,9 @@ def print_all_msvc_detected(conf): | |||||
| @conf | @conf | ||||
| def detect_msvc(conf, arch = False): | def detect_msvc(conf, arch = False): | ||||
| versions = get_msvc_versions(conf) | |||||
| # Save installed versions only if lazy detection is disabled. | |||||
| lazy_detect = getattr(Options.options, 'msvc_lazy_autodetect', False) or conf.env['MSVC_LAZY_AUTODETECT'] | |||||
| versions = get_msvc_versions(conf, not lazy_detect) | |||||
| return setup_msvc(conf, versions, arch) | return setup_msvc(conf, versions, arch) | ||||
| @conf | @conf | ||||
| @@ -3,7 +3,6 @@ | |||||
| # Thomas Nagy, 2006-2010 (ita) | # Thomas Nagy, 2006-2010 (ita) | ||||
| # Ralf Habacker, 2006 (rh) | # Ralf Habacker, 2006 (rh) | ||||
| from waflib import Utils | |||||
| from waflib.Tools import ccroot, ar | from waflib.Tools import ccroot, ar | ||||
| from waflib.Configure import conf | from waflib.Configure import conf | ||||
| @@ -53,7 +52,7 @@ def scc_common_flags(conf): | |||||
| v['cprogram_PATTERN'] = '%s' | v['cprogram_PATTERN'] = '%s' | ||||
| # shared library | # shared library | ||||
| v['CFLAGS_cshlib'] = ['-Kpic', '-DPIC'] | |||||
| v['CFLAGS_cshlib'] = ['-xcode=pic32', '-DPIC'] | |||||
| v['LINKFLAGS_cshlib'] = ['-G'] | v['LINKFLAGS_cshlib'] = ['-G'] | ||||
| v['cshlib_PATTERN'] = 'lib%s.so' | v['cshlib_PATTERN'] = 'lib%s.so' | ||||
| @@ -68,4 +67,3 @@ def configure(conf): | |||||
| conf.cc_load_tools() | conf.cc_load_tools() | ||||
| conf.cc_add_flags() | conf.cc_add_flags() | ||||
| conf.link_add_flags() | conf.link_add_flags() | ||||
| @@ -3,7 +3,6 @@ | |||||
| # Thomas Nagy, 2006-2010 (ita) | # Thomas Nagy, 2006-2010 (ita) | ||||
| # Ralf Habacker, 2006 (rh) | # Ralf Habacker, 2006 (rh) | ||||
| from waflib import Utils | |||||
| from waflib.Tools import ccroot, ar | from waflib.Tools import ccroot, ar | ||||
| from waflib.Configure import conf | from waflib.Configure import conf | ||||
| @@ -51,7 +50,7 @@ def sxx_common_flags(conf): | |||||
| v['cxxprogram_PATTERN'] = '%s' | v['cxxprogram_PATTERN'] = '%s' | ||||
| # shared library | # shared library | ||||
| v['CXXFLAGS_cxxshlib'] = ['-Kpic', '-DPIC'] | |||||
| v['CXXFLAGS_cxxshlib'] = ['-xcode=pic32', '-DPIC'] | |||||
| v['LINKFLAGS_cxxshlib'] = ['-G'] | v['LINKFLAGS_cxxshlib'] = ['-G'] | ||||
| v['cxxshlib_PATTERN'] = 'lib%s.so' | v['cxxshlib_PATTERN'] = 'lib%s.so' | ||||
| @@ -284,7 +284,7 @@ else: | |||||
| wlock.release() | wlock.release() | ||||
| def writeconsole(self, txt): | def writeconsole(self, txt): | ||||
| chars_written = c_int() | |||||
| chars_written = c_ulong() | |||||
| writeconsole = windll.kernel32.WriteConsoleA | writeconsole = windll.kernel32.WriteConsoleA | ||||
| if isinstance(txt, _type): | if isinstance(txt, _type): | ||||
| writeconsole = windll.kernel32.WriteConsoleW | writeconsole = windll.kernel32.WriteConsoleW | ||||
| @@ -1,98 +0,0 @@ | |||||
| #! /usr/bin/env python | |||||
| # encoding: utf-8 | |||||
| # Thomas Nagy 2011 | |||||
| import os, shutil, re | |||||
| from waflib import Options, Build, Logs | |||||
| """ | |||||
| Apply a least recently used policy to the Waf cache. | |||||
| For performance reasons, it is called after the build is complete. | |||||
| We assume that the the folders are written atomically | |||||
| Do export WAFCACHE=/tmp/foo_xyz where xyz represents the cache size in bytes | |||||
| If missing, the default cache size will be set to 10GB | |||||
| """ | |||||
| re_num = re.compile('[a-zA-Z_-]+(\d+)') | |||||
| CACHESIZE = 10*1024*1024*1024 # in bytes | |||||
| CLEANRATIO = 0.8 | |||||
| DIRSIZE = 4096 | |||||
| def compile(self): | |||||
| if Options.cache_global and not Options.options.nocache: | |||||
| try: | |||||
| os.makedirs(Options.cache_global) | |||||
| except: | |||||
| pass | |||||
| try: | |||||
| self.raw_compile() | |||||
| finally: | |||||
| if Options.cache_global and not Options.options.nocache: | |||||
| self.sweep() | |||||
| def sweep(self): | |||||
| global CACHESIZE | |||||
| CACHEDIR = Options.cache_global | |||||
| # get the cache max size from the WAFCACHE filename | |||||
| re_num = re.compile('[a-zA-Z_]+(\d+)') | |||||
| val = re_num.sub('\\1', os.path.basename(Options.cache_global)) | |||||
| try: | |||||
| CACHESIZE = int(val) | |||||
| except: | |||||
| pass | |||||
| # map folder names to timestamps | |||||
| flist = {} | |||||
| for x in os.listdir(CACHEDIR): | |||||
| j = os.path.join(CACHEDIR, x) | |||||
| if os.path.isdir(j) and len(x) == 64: # dir names are md5 hexdigests | |||||
| flist[x] = [os.stat(j).st_mtime, 0] | |||||
| for (x, v) in flist.items(): | |||||
| cnt = DIRSIZE # each entry takes 4kB | |||||
| d = os.path.join(CACHEDIR, x) | |||||
| for k in os.listdir(d): | |||||
| cnt += os.stat(os.path.join(d, k)).st_size | |||||
| flist[x][1] = cnt | |||||
| total = sum([x[1] for x in flist.values()]) | |||||
| Logs.debug('lru: Cache size is %r' % total) | |||||
| if total >= CACHESIZE: | |||||
| Logs.debug('lru: Trimming the cache since %r > %r' % (total, CACHESIZE)) | |||||
| # make a list to sort the folders by timestamp | |||||
| lst = [(p, v[0], v[1]) for (p, v) in flist.items()] | |||||
| lst.sort(key=lambda x: x[1]) # sort by timestamp | |||||
| lst.reverse() | |||||
| while total >= CACHESIZE * CLEANRATIO: | |||||
| (k, t, s) = lst.pop() | |||||
| p = os.path.join(CACHEDIR, k) | |||||
| v = p + '.del' | |||||
| try: | |||||
| os.rename(p, v) | |||||
| except: | |||||
| # someone already did it | |||||
| pass | |||||
| else: | |||||
| try: | |||||
| shutil.rmtree(v) | |||||
| except: | |||||
| # this should not happen, but who knows? | |||||
| Logs.warn('If you ever see this message, report it (%r)' % v) | |||||
| total -= s | |||||
| del flist[k] | |||||
| Logs.debug('lru: Total at the end %r' % total) | |||||
| Build.BuildContext.raw_compile = Build.BuildContext.compile | |||||
| Build.BuildContext.compile = compile | |||||
| Build.BuildContext.sweep = sweep | |||||