diff options
author | Luca Falavigna <dktrkranz@debian.org> | 2010-01-02 20:31:28 +0100 |
---|---|---|
committer | Luca Falavigna <dktrkranz@debian.org> | 2010-01-02 20:31:28 +0100 |
commit | d4ced36437871acd01f2ed12b1899d1969b5492f (patch) | |
tree | 25ca9dae17a0b768409dce20de86749ec157e0f5 /engine | |
parent | a26d8475c55ab30f5940ba815235e6bb0036a6de (diff) | |
parent | 9b73d2781acfc322319eb5c59b30f2dfa0fea977 (diff) |
Merge commit 'upstream/1.2.0.d20091224'
Diffstat (limited to 'engine')
187 files changed, 48096 insertions, 0 deletions
diff --git a/engine/SCons/Action.py b/engine/SCons/Action.py new file mode 100644 index 0000000..ec12a66 --- /dev/null +++ b/engine/SCons/Action.py @@ -0,0 +1,1240 @@ +"""SCons.Action + +This encapsulates information about executing any sort of action that +can build one or more target Nodes (typically files) from one or more +source Nodes (also typically files) given a specific Environment. + +The base class here is ActionBase. The base class supplies just a few +OO utility methods and some generic methods for displaying information +about an Action in response to the various commands that control printing. + +A second-level base class is _ActionAction. This extends ActionBase +by providing the methods that can be used to show and perform an +action. True Action objects will subclass _ActionAction; Action +factory class objects will subclass ActionBase. + +The heavy lifting is handled by subclasses for the different types of +actions we might execute: + + CommandAction + CommandGeneratorAction + FunctionAction + ListAction + +The subclasses supply the following public interface methods used by +other modules: + + __call__() + THE public interface, "calling" an Action object executes the + command or Python function. This also takes care of printing + a pre-substitution command for debugging purposes. + + get_contents() + Fetches the "contents" of an Action for signature calculation + plus the varlist. This is what gets MD5 checksummed to decide + if a target needs to be rebuilt because its action changed. + + genstring() + Returns a string representation of the Action *without* + command substitution, but allows a CommandGeneratorAction to + generate the right action based on the specified target, + source and env. This is used by the Signature subsystem + (through the Executor) to obtain an (imprecise) representation + of the Action operation for informative purposes. + + +Subclasses also supply the following methods for internal use within +this module: + + __str__() + Returns a string approximation of the Action; no variable + substitution is performed. + + execute() + The internal method that really, truly, actually handles the + execution of a command or Python function. This is used so + that the __call__() methods can take care of displaying any + pre-substitution representations, and *then* execute an action + without worrying about the specific Actions involved. + + get_presig() + Fetches the "contents" of a subclass for signature calculation. + The varlist is added to this to produce the Action's contents. + + strfunction() + Returns a substituted string representation of the Action. + This is used by the _ActionAction.show() command to display the + command/function that will be executed to generate the target(s). + +There is a related independent ActionCaller class that looks like a +regular Action, and which serves as a wrapper for arbitrary functions +that we want to let the user specify the arguments to now, but actually +execute later (when an out-of-date check determines that it's needed to +be executed, for example). Objects of this class are returned by an +ActionFactory class that provides a __call__() method as a convenient +way for wrapping up the functions. + +""" + +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +__revision__ = "src/engine/SCons/Action.py 4577 2009/12/27 19:43:56 scons" + +import cPickle +import dis +import os +import re +import string +import sys +import subprocess + +from SCons.Debug import logInstanceCreation +import SCons.Errors +import SCons.Executor +import SCons.Util +import SCons.Subst + +# we use these a lot, so try to optimize them +is_String = SCons.Util.is_String +is_List = SCons.Util.is_List + +class _null: + pass + +print_actions = 1 +execute_actions = 1 +print_actions_presub = 0 + +def rfile(n): + try: + return n.rfile() + except AttributeError: + return n + +def default_exitstatfunc(s): + return s + +try: + SET_LINENO = dis.SET_LINENO + HAVE_ARGUMENT = dis.HAVE_ARGUMENT +except AttributeError: + remove_set_lineno_codes = lambda x: x +else: + def remove_set_lineno_codes(code): + result = [] + n = len(code) + i = 0 + while i < n: + c = code[i] + op = ord(c) + if op >= HAVE_ARGUMENT: + if op != SET_LINENO: + result.append(code[i:i+3]) + i = i+3 + else: + result.append(c) + i = i+1 + return string.join(result, '') + +strip_quotes = re.compile('^[\'"](.*)[\'"]$') + + +def _callable_contents(obj): + """Return the signature contents of a callable Python object. + """ + try: + # Test if obj is a method. + return _function_contents(obj.im_func) + + except AttributeError: + try: + # Test if obj is a callable object. + return _function_contents(obj.__call__.im_func) + + except AttributeError: + try: + # Test if obj is a code object. + return _code_contents(obj) + + except AttributeError: + # Test if obj is a function object. + return _function_contents(obj) + + +def _object_contents(obj): + """Return the signature contents of any Python object. + + We have to handle the case where object contains a code object + since it can be pickled directly. + """ + try: + # Test if obj is a method. + return _function_contents(obj.im_func) + + except AttributeError: + try: + # Test if obj is a callable object. + return _function_contents(obj.__call__.im_func) + + except AttributeError: + try: + # Test if obj is a code object. + return _code_contents(obj) + + except AttributeError: + try: + # Test if obj is a function object. + return _function_contents(obj) + + except AttributeError: + # Should be a pickable Python object. + try: + return cPickle.dumps(obj) + except (cPickle.PicklingError, TypeError): + # This is weird, but it seems that nested classes + # are unpickable. The Python docs say it should + # always be a PicklingError, but some Python + # versions seem to return TypeError. Just do + # the best we can. + return str(obj) + + +def _code_contents(code): + """Return the signature contents of a code object. + + By providing direct access to the code object of the + function, Python makes this extremely easy. Hooray! + + Unfortunately, older versions of Python include line + number indications in the compiled byte code. Boo! + So we remove the line number byte codes to prevent + recompilations from moving a Python function. + """ + + contents = [] + + # The code contents depends on the number of local variables + # but not their actual names. + contents.append("%s,%s" % (code.co_argcount, len(code.co_varnames))) + try: + contents.append(",%s,%s" % (len(code.co_cellvars), len(code.co_freevars))) + except AttributeError: + # Older versions of Python do not support closures. + contents.append(",0,0") + + # The code contents depends on any constants accessed by the + # function. Note that we have to call _object_contents on each + # constants because the code object of nested functions can + # show-up among the constants. + # + # Note that we also always ignore the first entry of co_consts + # which contains the function doc string. We assume that the + # function does not access its doc string. + contents.append(',(' + string.join(map(_object_contents,code.co_consts[1:]),',') + ')') + + # The code contents depends on the variable names used to + # accessed global variable, as changing the variable name changes + # the variable actually accessed and therefore changes the + # function result. + contents.append(',(' + string.join(map(_object_contents,code.co_names),',') + ')') + + + # The code contents depends on its actual code!!! + contents.append(',(' + str(remove_set_lineno_codes(code.co_code)) + ')') + + return string.join(contents, '') + + +def _function_contents(func): + """Return the signature contents of a function.""" + + contents = [_code_contents(func.func_code)] + + # The function contents depends on the value of defaults arguments + if func.func_defaults: + contents.append(',(' + string.join(map(_object_contents,func.func_defaults),',') + ')') + else: + contents.append(',()') + + # The function contents depends on the closure captured cell values. + try: + closure = func.func_closure or [] + except AttributeError: + # Older versions of Python do not support closures. + closure = [] + + #xxx = [_object_contents(x.cell_contents) for x in closure] + try: + xxx = map(lambda x: _object_contents(x.cell_contents), closure) + except AttributeError: + xxx = [] + contents.append(',(' + string.join(xxx, ',') + ')') + + return string.join(contents, '') + + +def _actionAppend(act1, act2): + # This function knows how to slap two actions together. + # Mainly, it handles ListActions by concatenating into + # a single ListAction. + a1 = Action(act1) + a2 = Action(act2) + if a1 is None or a2 is None: + raise TypeError, "Cannot append %s to %s" % (type(act1), type(act2)) + if isinstance(a1, ListAction): + if isinstance(a2, ListAction): + return ListAction(a1.list + a2.list) + else: + return ListAction(a1.list + [ a2 ]) + else: + if isinstance(a2, ListAction): + return ListAction([ a1 ] + a2.list) + else: + return ListAction([ a1, a2 ]) + +def _do_create_keywords(args, kw): + """This converts any arguments after the action argument into + their equivalent keywords and adds them to the kw argument. + """ + v = kw.get('varlist', ()) + # prevent varlist="FOO" from being interpreted as ['F', 'O', 'O'] + if is_String(v): v = (v,) + kw['varlist'] = tuple(v) + if args: + # turn positional args into equivalent keywords + cmdstrfunc = args[0] + if cmdstrfunc is None or is_String(cmdstrfunc): + kw['cmdstr'] = cmdstrfunc + elif callable(cmdstrfunc): + kw['strfunction'] = cmdstrfunc + else: + raise SCons.Errors.UserError( + 'Invalid command display variable type. ' + 'You must either pass a string or a callback which ' + 'accepts (target, source, env) as parameters.') + if len(args) > 1: + kw['varlist'] = args[1:] + kw['varlist'] + if kw.get('strfunction', _null) is not _null \ + and kw.get('cmdstr', _null) is not _null: + raise SCons.Errors.UserError( + 'Cannot have both strfunction and cmdstr args to Action()') + +def _do_create_action(act, kw): + """This is the actual "implementation" for the + Action factory method, below. This handles the + fact that passing lists to Action() itself has + different semantics than passing lists as elements + of lists. + + The former will create a ListAction, the latter + will create a CommandAction by converting the inner + list elements to strings.""" + + if isinstance(act, ActionBase): + return act + + if is_List(act): + #TODO(1.5) return CommandAction(act, **kw) + return apply(CommandAction, (act,), kw) + + if callable(act): + try: + gen = kw['generator'] + del kw['generator'] + except KeyError: + gen = 0 + if gen: + action_type = CommandGeneratorAction + else: + action_type = FunctionAction + return action_type(act, kw) + + if is_String(act): + var=SCons.Util.get_environment_var(act) + if var: + # This looks like a string that is purely an Environment + # variable reference, like "$FOO" or "${FOO}". We do + # something special here...we lazily evaluate the contents + # of that Environment variable, so a user could put something + # like a function or a CommandGenerator in that variable + # instead of a string. + return LazyAction(var, kw) + commands = string.split(str(act), '\n') + if len(commands) == 1: + #TODO(1.5) return CommandAction(commands[0], **kw) + return apply(CommandAction, (commands[0],), kw) + # The list of string commands may include a LazyAction, so we + # reprocess them via _do_create_list_action. + return _do_create_list_action(commands, kw) + return None + +def _do_create_list_action(act, kw): + """A factory for list actions. Convert the input list into Actions + and then wrap them in a ListAction.""" + acts = [] + for a in act: + aa = _do_create_action(a, kw) + if aa is not None: acts.append(aa) + if not acts: + return ListAction([]) + elif len(acts) == 1: + return acts[0] + else: + return ListAction(acts) + +def Action(act, *args, **kw): + """A factory for action objects.""" + # Really simple: the _do_create_* routines do the heavy lifting. + _do_create_keywords(args, kw) + if is_List(act): + return _do_create_list_action(act, kw) + return _do_create_action(act, kw) + +class ActionBase: + """Base class for all types of action objects that can be held by + other objects (Builders, Executors, etc.) This provides the + common methods for manipulating and combining those actions.""" + + def __cmp__(self, other): + return cmp(self.__dict__, other) + + def no_batch_key(self, env, target, source): + return None + + batch_key = no_batch_key + + def genstring(self, target, source, env): + return str(self) + + def get_contents(self, target, source, env): + result = [ self.get_presig(target, source, env) ] + # This should never happen, as the Action() factory should wrap + # the varlist, but just in case an action is created directly, + # we duplicate this check here. + vl = self.varlist + if is_String(vl): vl = (vl,) + for v in vl: + result.append(env.subst('${'+v+'}')) + return string.join(result, '') + + def __add__(self, other): + return _actionAppend(self, other) + + def __radd__(self, other): + return _actionAppend(other, self) + + def presub_lines(self, env): + # CommandGeneratorAction needs a real environment + # in order to return the proper string here, since + # it may call LazyAction, which looks up a key + # in that env. So we temporarily remember the env here, + # and CommandGeneratorAction will use this env + # when it calls its _generate method. + self.presub_env = env + lines = string.split(str(self), '\n') + self.presub_env = None # don't need this any more + return lines + + def get_targets(self, env, executor): + """ + Returns the type of targets ($TARGETS, $CHANGED_TARGETS) used + by this action. + """ + return self.targets + +class _ActionAction(ActionBase): + """Base class for actions that create output objects.""" + def __init__(self, cmdstr=_null, strfunction=_null, varlist=(), + presub=_null, chdir=None, exitstatfunc=None, + batch_key=None, targets='$TARGETS', + **kw): + self.cmdstr = cmdstr + if strfunction is not _null: + if strfunction is None: + self.cmdstr = None + else: + self.strfunction = strfunction + self.varlist = varlist + self.presub = presub + self.chdir = chdir + if not exitstatfunc: + exitstatfunc = default_exitstatfunc + self.exitstatfunc = exitstatfunc + + self.targets = targets + + if batch_key: + if not callable(batch_key): + # They have set batch_key, but not to their own + # callable. The default behavior here will batch + # *all* targets+sources using this action, separated + # for each construction environment. + def default_batch_key(self, env, target, source): + return (id(self), id(env)) + batch_key = default_batch_key + SCons.Util.AddMethod(self, batch_key, 'batch_key') + + def print_cmd_line(self, s, target, source, env): + sys.stdout.write(s + "\n") + + def __call__(self, target, source, env, + exitstatfunc=_null, + presub=_null, + show=_null, + execute=_null, + chdir=_null, + executor=None): + if not is_List(target): + target = [target] + if not is_List(source): + source = [source] + + if presub is _null: + presub = self.presub + if presub is _null: + presub = print_actions_presub + if exitstatfunc is _null: exitstatfunc = self.exitstatfunc + if show is _null: show = print_actions + if execute is _null: execute = execute_actions + if chdir is _null: chdir = self.chdir + save_cwd = None + if chdir: + save_cwd = os.getcwd() + try: + chdir = str(chdir.abspath) + except AttributeError: + if not is_String(chdir): + if executor: + chdir = str(executor.batches[0].targets[0].dir) + else: + chdir = str(target[0].dir) + if presub: + if executor: + target = executor.get_all_targets() + source = executor.get_all_sources() + t = string.join(map(str, target), ' and ') + l = string.join(self.presub_lines(env), '\n ') + out = "Building %s with action:\n %s\n" % (t, l) + sys.stdout.write(out) + cmd = None + if show and self.strfunction: + if executor: + target = executor.get_all_targets() + source = executor.get_all_sources() + try: + cmd = self.strfunction(target, source, env, executor) + except TypeError: + cmd = self.strfunction(target, source, env) + if cmd: + if chdir: + cmd = ('os.chdir(%s)\n' % repr(chdir)) + cmd + try: + get = env.get + except AttributeError: + print_func = self.print_cmd_line + else: + print_func = get('PRINT_CMD_LINE_FUNC') + if not print_func: + print_func = self.print_cmd_line + print_func(cmd, target, source, env) + stat = 0 + if execute: + if chdir: + os.chdir(chdir) + try: + stat = self.execute(target, source, env, executor=executor) + if isinstance(stat, SCons.Errors.BuildError): + s = exitstatfunc(stat.status) + if s: + stat.status = s + else: + stat = s + else: + stat = exitstatfunc(stat) + finally: + if save_cwd: + os.chdir(save_cwd) + if cmd and save_cwd: + print_func('os.chdir(%s)' % repr(save_cwd), target, source, env) + + return stat + + +def _string_from_cmd_list(cmd_list): + """Takes a list of command line arguments and returns a pretty + representation for printing.""" + cl = [] + for arg in map(str, cmd_list): + if ' ' in arg or '\t' in arg: + arg = '"' + arg + '"' + cl.append(arg) + return string.join(cl) + +# A fiddlin' little function that has an 'import SCons.Environment' which +# can't be moved to the top level without creating an import loop. Since +# this import creates a local variable named 'SCons', it blocks access to +# the global variable, so we move it here to prevent complaints about local +# variables being used uninitialized. +default_ENV = None +def get_default_ENV(env): + global default_ENV + try: + return env['ENV'] + except KeyError: + if not default_ENV: + import SCons.Environment + # This is a hideously expensive way to get a default shell + # environment. What it really should do is run the platform + # setup to get the default ENV. Fortunately, it's incredibly + # rare for an Environment not to have a shell environment, so + # we're not going to worry about it overmuch. + default_ENV = SCons.Environment.Environment()['ENV'] + return default_ENV + +# This function is still in draft mode. We're going to need something like +# it in the long run as more and more places use subprocess, but I'm sure +# it'll have to be tweaked to get the full desired functionality. +# one special arg (so far?), 'error', to tell what to do with exceptions. +def _subproc(env, cmd, error = 'ignore', **kw): + """Do common setup for a subprocess.Popen() call""" + # allow std{in,out,err} to be "'devnull'" + io = kw.get('stdin') + if is_String(io) and io == 'devnull': + kw['stdin'] = open(os.devnull) + io = kw.get('stdout') + if is_String(io) and io == 'devnull': + kw['stdout'] = open(os.devnull, 'w') + io = kw.get('stderr') + if is_String(io) and io == 'devnull': + kw['stderr'] = open(os.devnull, 'w') + + # Figure out what shell environment to use + ENV = kw.get('env', None) + if ENV is None: ENV = get_default_ENV(env) + + # Ensure that the ENV values are all strings: + new_env = {} + for key, value in ENV.items(): + if is_List(value): + # If the value is a list, then we assume it is a path list, + # because that's a pretty common list-like value to stick + # in an environment variable: + value = SCons.Util.flatten_sequence(value) + new_env[key] = string.join(map(str, value), os.pathsep) + else: + # It's either a string or something else. If it's a string, + # we still want to call str() because it might be a *Unicode* + # string, which makes subprocess.Popen() gag. If it isn't a + # string or a list, then we just coerce it to a string, which + # is the proper way to handle Dir and File instances and will + # produce something reasonable for just about everything else: + new_env[key] = str(value) + kw['env'] = new_env + + try: + #FUTURE return subprocess.Popen(cmd, **kw) + return apply(subprocess.Popen, (cmd,), kw) + except EnvironmentError, e: + if error == 'raise': raise + # return a dummy Popen instance that only returns error + class dummyPopen: + def __init__(self, e): self.exception = e + def communicate(self): return ('','') + def wait(self): return -self.exception.errno + stdin = None + class f: + def read(self): return '' + def readline(self): return '' + stdout = stderr = f() + return dummyPopen(e) + +class CommandAction(_ActionAction): + """Class for command-execution actions.""" + def __init__(self, cmd, **kw): + # Cmd can actually be a list or a single item; if it's a + # single item it should be the command string to execute; if a + # list then it should be the words of the command string to + # execute. Only a single command should be executed by this + # object; lists of commands should be handled by embedding + # these objects in a ListAction object (which the Action() + # factory above does). cmd will be passed to + # Environment.subst_list() for substituting environment + # variables. + if __debug__: logInstanceCreation(self, 'Action.CommandAction') + + #TODO(1.5) _ActionAction.__init__(self, **kw) + apply(_ActionAction.__init__, (self,), kw) + if is_List(cmd): + if filter(is_List, cmd): + raise TypeError, "CommandAction should be given only " \ + "a single command" + self.cmd_list = cmd + + def __str__(self): + if is_List(self.cmd_list): + return string.join(map(str, self.cmd_list), ' ') + return str(self.cmd_list) + + def process(self, target, source, env, executor=None): + if executor: + result = env.subst_list(self.cmd_list, 0, executor=executor) + else: + result = env.subst_list(self.cmd_list, 0, target, source) + silent = None + ignore = None + while 1: + try: c = result[0][0][0] + except IndexError: c = None + if c == '@': silent = 1 + elif c == '-': ignore = 1 + else: break + result[0][0] = result[0][0][1:] + try: + if not result[0][0]: + result[0] = result[0][1:] + except IndexError: + pass + return result, ignore, silent + + def strfunction(self, target, source, env, executor=None): + if self.cmdstr is None: + return None + if self.cmdstr is not _null: + from SCons.Subst import SUBST_RAW + if executor: + c = env.subst(self.cmdstr, SUBST_RAW, executor=executor) + else: + c = env.subst(self.cmdstr, SUBST_RAW, target, source) + if c: + return c + cmd_list, ignore, silent = self.process(target, source, env, executor) + if silent: + return '' + return _string_from_cmd_list(cmd_list[0]) + + def execute(self, target, source, env, executor=None): + """Execute a command action. + + This will handle lists of commands as well as individual commands, + because construction variable substitution may turn a single + "command" into a list. This means that this class can actually + handle lists of commands, even though that's not how we use it + externally. + """ + escape_list = SCons.Subst.escape_list + flatten_sequence = SCons.Util.flatten_sequence + + try: + shell = env['SHELL'] + except KeyError: + raise SCons.Errors.UserError('Missing SHELL construction variable.') + + try: + spawn = env['SPAWN'] + except KeyError: + raise SCons.Errors.UserError('Missing SPAWN construction variable.') + else: + if is_String(spawn): + spawn = env.subst(spawn, raw=1, conv=lambda x: x) + + escape = env.get('ESCAPE', lambda x: x) + + ENV = get_default_ENV(env) + + # Ensure that the ENV values are all strings: + for key, value in ENV.items(): + if not is_String(value): + if is_List(value): + # If the value is a list, then we assume it is a + # path list, because that's a pretty common list-like + # value to stick in an environment variable: + value = flatten_sequence(value) + ENV[key] = string.join(map(str, value), os.pathsep) + else: + # If it isn't a string or a list, then we just coerce + # it to a string, which is the proper way to handle + # Dir and File instances and will produce something + # reasonable for just about everything else: + ENV[key] = str(value) + + if executor: + target = executor.get_all_targets() + source = executor.get_all_sources() + cmd_list, ignore, silent = self.process(target, map(rfile, source), env, executor) + + # Use len() to filter out any "command" that's zero-length. + for cmd_line in filter(len, cmd_list): + # Escape the command line for the interpreter we are using. + cmd_line = escape_list(cmd_line, escape) + result = spawn(shell, escape, cmd_line[0], cmd_line, ENV) + if not ignore and result: + msg = "Error %s" % result + return SCons.Errors.BuildError(errstr=msg, + status=result, + action=self, + command=cmd_line) + return 0 + + def get_presig(self, target, source, env, executor=None): + """Return the signature contents of this action's command line. + + This strips $(-$) and everything in between the string, + since those parts don't affect signatures. + """ + from SCons.Subst import SUBST_SIG + cmd = self.cmd_list + if is_List(cmd): + cmd = string.join(map(str, cmd)) + else: + cmd = str(cmd) + if executor: + return env.subst_target_source(cmd, SUBST_SIG, executor=executor) + else: + return env.subst_target_source(cmd, SUBST_SIG, target, source) + + def get_implicit_deps(self, target, source, env, executor=None): + icd = env.get('IMPLICIT_COMMAND_DEPENDENCIES', True) + if is_String(icd) and icd[:1] == '$': + icd = env.subst(icd) + if not icd or icd in ('0', 'None'): + return [] + from SCons.Subst import SUBST_SIG + if executor: + cmd_list = env.subst_list(self.cmd_list, SUBST_SIG, executor=executor) + else: + cmd_list = env.subst_list(self.cmd_list, SUBST_SIG, target, source) + res = [] + for cmd_line in cmd_list: + if cmd_line: + d = str(cmd_line[0]) + m = strip_quotes.match(d) + if m: + d = m.group(1) + d = env.WhereIs(d) + if d: + res.append(env.fs.File(d)) + return res + +class CommandGeneratorAction(ActionBase): + """Class for command-generator actions.""" + def __init__(self, generator, kw): + if __debug__: logInstanceCreation(self, 'Action.CommandGeneratorAction') + self.generator = generator + self.gen_kw = kw + self.varlist = kw.get('varlist', ()) + self.targets = kw.get('targets', '$TARGETS') + + def _generate(self, target, source, env, for_signature, executor=None): + # ensure that target is a list, to make it easier to write + # generator functions: + if not is_List(target): + target = [target] + + if executor: + target = executor.get_all_targets() + source = executor.get_all_sources() + ret = self.generator(target=target, + source=source, + env=env, + for_signature=for_signature) + #TODO(1.5) gen_cmd = Action(ret, **self.gen_kw) + gen_cmd = apply(Action, (ret,), self.gen_kw) + if not gen_cmd: + raise SCons.Errors.UserError("Object returned from command generator: %s cannot be used to create an Action." % repr(ret)) + return gen_cmd + + def __str__(self): + try: + env = self.presub_env + except AttributeError: + env = None + if env is None: + env = SCons.Defaults.DefaultEnvironment() + act = self._generate([], [], env, 1) + return str(act) + + def batch_key(self, env, target, source): + return self._generate(target, source, env, 1).batch_key(env, target, source) + + def genstring(self, target, source, env, executor=None): + return self._generate(target, source, env, 1, executor).genstring(target, source, env) + + def __call__(self, target, source, env, exitstatfunc=_null, presub=_null, + show=_null, execute=_null, chdir=_null, executor=None): + act = self._generate(target, source, env, 0, executor) + if act is None: + raise UserError("While building `%s': Cannot deduce file extension from source files: %s" % (repr(map(str, target)), repr(map(str, source)))) + return act(target, source, env, exitstatfunc, presub, + show, execute, chdir, executor) + + def get_presig(self, target, source, env, executor=None): + """Return the signature contents of this action's command line. + + This strips $(-$) and everything in between the string, + since those parts don't affect signatures. + """ + return self._generate(target, source, env, 1, executor).get_presig(target, source, env) + + def get_implicit_deps(self, target, source, env, executor=None): + return self._generate(target, source, env, 1, executor).get_implicit_deps(target, source, env) + + def get_targets(self, env, executor): + return self._generate(None, None, env, 1, executor).get_targets(env, executor) + + + +# A LazyAction is a kind of hybrid generator and command action for +# strings of the form "$VAR". These strings normally expand to other +# strings (think "$CCCOM" to "$CC -c -o $TARGET $SOURCE"), but we also +# want to be able to replace them with functions in the construction +# environment. Consequently, we want lazy evaluation and creation of +# an Action in the case of the function, but that's overkill in the more +# normal case of expansion to other strings. +# +# So we do this with a subclass that's both a generator *and* +# a command action. The overridden methods all do a quick check +# of the construction variable, and if it's a string we just call +# the corresponding CommandAction method to do the heavy lifting. +# If not, then we call the same-named CommandGeneratorAction method. +# The CommandGeneratorAction methods work by using the overridden +# _generate() method, that is, our own way of handling "generation" of +# an action based on what's in the construction variable. + +class LazyAction(CommandGeneratorAction, CommandAction): + + def __init__(self, var, kw): + if __debug__: logInstanceCreation(self, 'Action.LazyAction') + #FUTURE CommandAction.__init__(self, '${'+var+'}', **kw) + apply(CommandAction.__init__, (self, '${'+var+'}'), kw) + self.var = SCons.Util.to_String(var) + self.gen_kw = kw + + def get_parent_class(self, env): + c = env.get(self.var) + if is_String(c) and not '\n' in c: + return CommandAction + return CommandGeneratorAction + + def _generate_cache(self, env): + if env: + c = env.get(self.var, '') + else: + c = '' + #TODO(1.5) gen_cmd = Action(c, **self.gen_kw) + gen_cmd = apply(Action, (c,), self.gen_kw) + if not gen_cmd: + raise SCons.Errors.UserError("$%s value %s cannot be used to create an Action." % (self.var, repr(c))) + return gen_cmd + + def _generate(self, target, source, env, for_signature, executor=None): + return self._generate_cache(env) + + def __call__(self, target, source, env, *args, **kw): + args = (self, target, source, env) + args + c = self.get_parent_class(env) + #TODO(1.5) return c.__call__(*args, **kw) + return apply(c.__call__, args, kw) + + def get_presig(self, target, source, env): + c = self.get_parent_class(env) + return c.get_presig(self, target, source, env) + + + +class FunctionAction(_ActionAction): + """Class for Python function actions.""" + + def __init__(self, execfunction, kw): + if __debug__: logInstanceCreation(self, 'Action.FunctionAction') + + self.execfunction = execfunction + try: + self.funccontents = _callable_contents(execfunction) + except AttributeError: + try: + # See if execfunction will do the heavy lifting for us. + self.gc = execfunction.get_contents + except AttributeError: + # This is weird, just do the best we can. + self.funccontents = _object_contents(execfunction) + + #TODO(1.5) _ActionAction.__init__(self, **kw) + apply(_ActionAction.__init__, (self,), kw) + + def function_name(self): + try: + return self.execfunction.__name__ + except AttributeError: + try: + return self.execfunction.__class__.__name__ + except AttributeError: + return "unknown_python_function" + + def strfunction(self, target, source, env, executor=None): + if self.cmdstr is None: + return None + if self.cmdstr is not _null: + from SCons.Subst import SUBST_RAW + if executor: + c = env.subst(self.cmdstr, SUBST_RAW, executor=executor) + else: + c = env.subst(self.cmdstr, SUBST_RAW, target, source) + if c: + return c + def array(a): + def quote(s): + try: + str_for_display = s.str_for_display + except AttributeError: + s = repr(s) + else: + s = str_for_display() + return s + return '[' + string.join(map(quote, a), ", ") + ']' + try: + strfunc = self.execfunction.strfunction + except AttributeError: + pass + else: + if strfunc is None: + return None + if callable(strfunc): + return strfunc(target, source, env) + name = self.function_name() + tstr = array(target) + sstr = array(source) + return "%s(%s, %s)" % (name, tstr, sstr) + + def __str__(self): + name = self.function_name() + if name == 'ActionCaller': + return str(self.execfunction) + return "%s(target, source, env)" % name + + def execute(self, target, source, env, executor=None): + exc_info = (None,None,None) + try: + if executor: + target = executor.get_all_targets() + source = executor.get_all_sources() + rsources = map(rfile, source) + try: + result = self.execfunction(target=target, source=rsources, env=env) + except KeyboardInterrupt, e: + raise + except SystemExit, e: + raise + except Exception, e: + result = e + exc_info = sys.exc_info() + + if result: + result = SCons.Errors.convert_to_BuildError(result, exc_info) + result.node=target + result.action=self + try: + result.command=self.strfunction(target, source, env, executor) + except TypeError: + result.command=self.strfunction(target, source, env) + + # FIXME: This maintains backward compatibility with respect to + # which type of exceptions were returned by raising an + # exception and which ones were returned by value. It would + # probably be best to always return them by value here, but + # some codes do not check the return value of Actions and I do + # not have the time to modify them at this point. + if (exc_info[1] and + not isinstance(exc_info[1],EnvironmentError)): + raise result + + return result + finally: + # Break the cycle between the traceback object and this + # function stack frame. See the sys.exc_info() doc info for + # more information about this issue. + del exc_info + + + def get_presig(self, target, source, env): + """Return the signature contents of this callable action.""" + try: + return self.gc(target, source, env) + except AttributeError: + return self.funccontents + + def get_implicit_deps(self, target, source, env): + return [] + +class ListAction(ActionBase): + """Class for lists of other actions.""" + def __init__(self, list): + if __debug__: logInstanceCreation(self, 'Action.ListAction') + def list_of_actions(x): + if isinstance(x, ActionBase): + return x + return Action(x) + self.list = map(list_of_actions, list) + # our children will have had any varlist + # applied; we don't need to do it again + self.varlist = () + self.targets = '$TARGETS' + + def genstring(self, target, source, env): + return string.join(map(lambda a, t=target, s=source, e=env: + a.genstring(t, s, e), + self.list), + '\n') + + def __str__(self): + return string.join(map(str, self.list), '\n') + + def presub_lines(self, env): + return SCons.Util.flatten_sequence( + map(lambda a, env=env: a.presub_lines(env), self.list)) + + def get_presig(self, target, source, env): + """Return the signature contents of this action list. + + Simple concatenation of the signatures of the elements. + """ + return string.join(map(lambda x, t=target, s=source, e=env: + x.get_contents(t, s, e), + self.list), + "") + + def __call__(self, target, source, env, exitstatfunc=_null, presub=_null, + show=_null, execute=_null, chdir=_null, executor=None): + if executor: + target = executor.get_all_targets() + source = executor.get_all_sources() + for act in self.list: + stat = act(target, source, env, exitstatfunc, presub, + show, execute, chdir, executor) + if stat: + return stat + return 0 + + def get_implicit_deps(self, target, source, env): + result = [] + for act in self.list: + result.extend(act.get_implicit_deps(target, source, env)) + return result + +class ActionCaller: + """A class for delaying calling an Action function with specific + (positional and keyword) arguments until the Action is actually + executed. + + This class looks to the rest of the world like a normal Action object, + but what it's really doing is hanging on to the arguments until we + have a target, source and env to use for the expansion. + """ + def __init__(self, parent, args, kw): + self.parent = parent + self.args = args + self.kw = kw + + def get_contents(self, target, source, env): + actfunc = self.parent.actfunc + try: + # "self.actfunc" is a function. + contents = str(actfunc.func_code.co_code) + except AttributeError: + # "self.actfunc" is a callable object. + try: + contents = str(actfunc.__call__.im_func.func_code.co_code) + except AttributeError: + # No __call__() method, so it might be a builtin + # or something like that. Do the best we can. + contents = str(actfunc) + contents = remove_set_lineno_codes(contents) + return contents + + def subst(self, s, target, source, env): + # If s is a list, recursively apply subst() + # to every element in the list + if is_List(s): + result = [] + for elem in s: + result.append(self.subst(elem, target, source, env)) + return self.parent.convert(result) + + # Special-case hack: Let a custom function wrapped in an + # ActionCaller get at the environment through which the action + # was called by using this hard-coded value as a special return. + if s == '$__env__': + return env + elif is_String(s): + return env.subst(s, 1, target, source) + return self.parent.convert(s) + + def subst_args(self, target, source, env): + return map(lambda x, self=self, t=target, s=source, e=env: + self.subst(x, t, s, e), + self.args) + + def subst_kw(self, target, source, env): + kw = {} + for key in self.kw.keys(): + kw[key] = self.subst(self.kw[key], target, source, env) + return kw + + def __call__(self, target, source, env, executor=None): + args = self.subst_args(target, source, env) + kw = self.subst_kw(target, source, env) + #TODO(1.5) return self.parent.actfunc(*args, **kw) + return apply(self.parent.actfunc, args, kw) + + def strfunction(self, target, source, env): + args = self.subst_args(target, source, env) + kw = self.subst_kw(target, source, env) + #TODO(1.5) return self.parent.strfunc(*args, **kw) + return apply(self.parent.strfunc, args, kw) + + def __str__(self): + #TODO(1.5) return self.parent.strfunc(*self.args, **self.kw) + return apply(self.parent.strfunc, self.args, self.kw) + +class ActionFactory: + """A factory class that will wrap up an arbitrary function + as an SCons-executable Action object. + + The real heavy lifting here is done by the ActionCaller class. + We just collect the (positional and keyword) arguments that we're + called with and give them to the ActionCaller object we create, + so it can hang onto them until it needs them. + """ + def __init__(self, actfunc, strfunc, convert=lambda x: x): + self.actfunc = actfunc + self.strfunc = strfunc + self.convert = convert + + def __call__(self, *args, **kw): + ac = ActionCaller(self, args, kw) + action = Action(ac, strfunction=ac.strfunction) + return action + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Builder.py b/engine/SCons/Builder.py new file mode 100644 index 0000000..750b741 --- /dev/null +++ b/engine/SCons/Builder.py @@ -0,0 +1,868 @@ +"""SCons.Builder + +Builder object subsystem. + +A Builder object is a callable that encapsulates information about how +to execute actions to create a target Node (file) from source Nodes +(files), and how to create those dependencies for tracking. + +The main entry point here is the Builder() factory method. This provides +a procedural interface that creates the right underlying Builder object +based on the keyword arguments supplied and the types of the arguments. + +The goal is for this external interface to be simple enough that the +vast majority of users can create new Builders as necessary to support +building new types of files in their configurations, without having to +dive any deeper into this subsystem. + +The base class here is BuilderBase. This is a concrete base class which +does, in fact, represent the Builder objects that we (or users) create. + +There is also a proxy that looks like a Builder: + + CompositeBuilder + + This proxies for a Builder with an action that is actually a + dictionary that knows how to map file suffixes to a specific + action. This is so that we can invoke different actions + (compilers, compile options) for different flavors of source + files. + +Builders and their proxies have the following public interface methods +used by other modules: + + __call__() + THE public interface. Calling a Builder object (with the + use of internal helper methods) sets up the target and source + dependencies, appropriate mapping to a specific action, and the + environment manipulation necessary for overridden construction + variable. This also takes care of warning about possible mistakes + in keyword arguments. + + add_emitter() + Adds an emitter for a specific file suffix, used by some Tool + modules to specify that (for example) a yacc invocation on a .y + can create a .h *and* a .c file. + + add_action() + Adds an action for a specific file suffix, heavily used by + Tool modules to add their specific action(s) for turning + a source file into an object file to the global static + and shared object file Builders. + +There are the following methods for internal use within this module: + + _execute() + The internal method that handles the heavily lifting when a + Builder is called. This is used so that the __call__() methods + can set up warning about possible mistakes in keyword-argument + overrides, and *then* execute all of the steps necessary so that + the warnings only occur once. + + get_name() + Returns the Builder's name within a specific Environment, + primarily used to try to return helpful information in error + messages. + + adjust_suffix() + get_prefix() + get_suffix() + get_src_suffix() + set_src_suffix() + Miscellaneous stuff for handling the prefix and suffix + manipulation we use in turning source file names into target + file names. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Builder.py 4577 2009/12/27 19:43:56 scons" + +import UserDict +import UserList + +import SCons.Action +from SCons.Debug import logInstanceCreation +from SCons.Errors import InternalError, UserError +import SCons.Executor +import SCons.Memoize +import SCons.Node +import SCons.Node.FS +import SCons.Util +import SCons.Warnings + +class _Null: + pass + +_null = _Null + +def match_splitext(path, suffixes = []): + if suffixes: + matchsuf = filter(lambda S,path=path: path[-len(S):] == S, + suffixes) + if matchsuf: + suf = max(map(None, map(len, matchsuf), matchsuf))[1] + return [path[:-len(suf)], path[-len(suf):]] + return SCons.Util.splitext(path) + +class DictCmdGenerator(SCons.Util.Selector): + """This is a callable class that can be used as a + command generator function. It holds on to a dictionary + mapping file suffixes to Actions. It uses that dictionary + to return the proper action based on the file suffix of + the source file.""" + + def __init__(self, dict=None, source_ext_match=1): + SCons.Util.Selector.__init__(self, dict) + self.source_ext_match = source_ext_match + + def src_suffixes(self): + return self.keys() + + def add_action(self, suffix, action): + """Add a suffix-action pair to the mapping. + """ + self[suffix] = action + + def __call__(self, target, source, env, for_signature): + if not source: + return [] + + if self.source_ext_match: + suffixes = self.src_suffixes() + ext = None + for src in map(str, source): + my_ext = match_splitext(src, suffixes)[1] + if ext and my_ext != ext: + raise UserError("While building `%s' from `%s': Cannot build multiple sources with different extensions: %s, %s" % (repr(map(str, target)), src, ext, my_ext)) + ext = my_ext + else: + ext = match_splitext(str(source[0]), self.src_suffixes())[1] + + if not ext: + #return ext + raise UserError("While building `%s': Cannot deduce file extension from source files: %s" % (repr(map(str, target)), repr(map(str, source)))) + + try: + ret = SCons.Util.Selector.__call__(self, env, source, ext) + except KeyError, e: + raise UserError("Ambiguous suffixes after environment substitution: %s == %s == %s" % (e[0], e[1], e[2])) + if ret is None: + raise UserError("While building `%s' from `%s': Don't know how to build from a source file with suffix `%s'. Expected a suffix in this list: %s." % \ + (repr(map(str, target)), repr(map(str, source)), ext, repr(self.keys()))) + return ret + +class CallableSelector(SCons.Util.Selector): + """A callable dictionary that will, in turn, call the value it + finds if it can.""" + def __call__(self, env, source): + value = SCons.Util.Selector.__call__(self, env, source) + if callable(value): + value = value(env, source) + return value + +class DictEmitter(SCons.Util.Selector): + """A callable dictionary that maps file suffixes to emitters. + When called, it finds the right emitter in its dictionary for the + suffix of the first source file, and calls that emitter to get the + right lists of targets and sources to return. If there's no emitter + for the suffix in its dictionary, the original target and source are + returned. + """ + def __call__(self, target, source, env): + emitter = SCons.Util.Selector.__call__(self, env, source) + if emitter: + target, source = emitter(target, source, env) + return (target, source) + +class ListEmitter(UserList.UserList): + """A callable list of emitters that calls each in sequence, + returning the result. + """ + def __call__(self, target, source, env): + for e in self.data: + target, source = e(target, source, env) + return (target, source) + +# These are a common errors when calling a Builder; +# they are similar to the 'target' and 'source' keyword args to builders, +# so we issue warnings when we see them. The warnings can, of course, +# be disabled. +misleading_keywords = { + 'targets' : 'target', + 'sources' : 'source', +} + +class OverrideWarner(UserDict.UserDict): + """A class for warning about keyword arguments that we use as + overrides in a Builder call. + + This class exists to handle the fact that a single Builder call + can actually invoke multiple builders. This class only emits the + warnings once, no matter how many Builders are invoked. + """ + def __init__(self, dict): + UserDict.UserDict.__init__(self, dict) + if __debug__: logInstanceCreation(self, 'Builder.OverrideWarner') + self.already_warned = None + def warn(self): + if self.already_warned: + return + for k in self.keys(): + if misleading_keywords.has_key(k): + alt = misleading_keywords[k] + msg = "Did you mean to use `%s' instead of `%s'?" % (alt, k) + SCons.Warnings.warn(SCons.Warnings.MisleadingKeywordsWarning, msg) + self.already_warned = 1 + +def Builder(**kw): + """A factory for builder objects.""" + composite = None + if kw.has_key('generator'): + if kw.has_key('action'): + raise UserError, "You must not specify both an action and a generator." + kw['action'] = SCons.Action.CommandGeneratorAction(kw['generator'], {}) + del kw['generator'] + elif kw.has_key('action'): + source_ext_match = kw.get('source_ext_match', 1) + if kw.has_key('source_ext_match'): + del kw['source_ext_match'] + if SCons.Util.is_Dict(kw['action']): + composite = DictCmdGenerator(kw['action'], source_ext_match) + kw['action'] = SCons.Action.CommandGeneratorAction(composite, {}) + kw['src_suffix'] = composite.src_suffixes() + else: + kw['action'] = SCons.Action.Action(kw['action']) + + if kw.has_key('emitter'): + emitter = kw['emitter'] + if SCons.Util.is_String(emitter): + # This allows users to pass in an Environment + # variable reference (like "$FOO") as an emitter. + # We will look in that Environment variable for + # a callable to use as the actual emitter. + var = SCons.Util.get_environment_var(emitter) + if not var: + raise UserError, "Supplied emitter '%s' does not appear to refer to an Environment variable" % emitter + kw['emitter'] = EmitterProxy(var) + elif SCons.Util.is_Dict(emitter): + kw['emitter'] = DictEmitter(emitter) + elif SCons.Util.is_List(emitter): + kw['emitter'] = ListEmitter(emitter) + + result = apply(BuilderBase, (), kw) + + if not composite is None: + result = CompositeBuilder(result, composite) + + return result + +def _node_errors(builder, env, tlist, slist): + """Validate that the lists of target and source nodes are + legal for this builder and environment. Raise errors or + issue warnings as appropriate. + """ + + # First, figure out if there are any errors in the way the targets + # were specified. + for t in tlist: + if t.side_effect: + raise UserError, "Multiple ways to build the same target were specified for: %s" % t + if t.has_explicit_builder(): + if not t.env is None and not t.env is env: + action = t.builder.action + t_contents = action.get_contents(tlist, slist, t.env) + contents = action.get_contents(tlist, slist, env) + + if t_contents == contents: + msg = "Two different environments were specified for target %s,\n\tbut they appear to have the same action: %s" % (t, action.genstring(tlist, slist, t.env)) + SCons.Warnings.warn(SCons.Warnings.DuplicateEnvironmentWarning, msg) + else: + msg = "Two environments with different actions were specified for the same target: %s" % t + raise UserError, msg + if builder.multi: + if t.builder != builder: + msg = "Two different builders (%s and %s) were specified for the same target: %s" % (t.builder.get_name(env), builder.get_name(env), t) + raise UserError, msg + # TODO(batch): list constructed each time! + if t.get_executor().get_all_targets() != tlist: + msg = "Two different target lists have a target in common: %s (from %s and from %s)" % (t, map(str, t.get_executor().get_all_targets()), map(str, tlist)) + raise UserError, msg + elif t.sources != slist: + msg = "Multiple ways to build the same target were specified for: %s (from %s and from %s)" % (t, map(str, t.sources), map(str, slist)) + raise UserError, msg + + if builder.single_source: + if len(slist) > 1: + raise UserError, "More than one source given for single-source builder: targets=%s sources=%s" % (map(str,tlist), map(str,slist)) + +class EmitterProxy: + """This is a callable class that can act as a + Builder emitter. It holds on to a string that + is a key into an Environment dictionary, and will + look there at actual build time to see if it holds + a callable. If so, we will call that as the actual + emitter.""" + def __init__(self, var): + self.var = SCons.Util.to_String(var) + + def __call__(self, target, source, env): + emitter = self.var + + # Recursively substitute the variable. + # We can't use env.subst() because it deals only + # in strings. Maybe we should change that? + while SCons.Util.is_String(emitter) and env.has_key(emitter): + emitter = env[emitter] + if callable(emitter): + target, source = emitter(target, source, env) + elif SCons.Util.is_List(emitter): + for e in emitter: + target, source = e(target, source, env) + + return (target, source) + + + def __cmp__(self, other): + return cmp(self.var, other.var) + +class BuilderBase: + """Base class for Builders, objects that create output + nodes (files) from input nodes (files). + """ + + if SCons.Memoize.use_memoizer: + __metaclass__ = SCons.Memoize.Memoized_Metaclass + + memoizer_counters = [] + + def __init__(self, action = None, + prefix = '', + suffix = '', + src_suffix = '', + target_factory = None, + source_factory = None, + target_scanner = None, + source_scanner = None, + emitter = None, + multi = 0, + env = None, + single_source = 0, + name = None, + chdir = _null, + is_explicit = 1, + src_builder = None, + ensure_suffix = False, + **overrides): + if __debug__: logInstanceCreation(self, 'Builder.BuilderBase') + self._memo = {} + self.action = action + self.multi = multi + if SCons.Util.is_Dict(prefix): + prefix = CallableSelector(prefix) + self.prefix = prefix + if SCons.Util.is_Dict(suffix): + suffix = CallableSelector(suffix) + self.env = env + self.single_source = single_source + if overrides.has_key('overrides'): + SCons.Warnings.warn(SCons.Warnings.DeprecatedWarning, + "The \"overrides\" keyword to Builder() creation has been deprecated;\n" +\ + "\tspecify the items as keyword arguments to the Builder() call instead.") + overrides.update(overrides['overrides']) + del overrides['overrides'] + if overrides.has_key('scanner'): + SCons.Warnings.warn(SCons.Warnings.DeprecatedWarning, + "The \"scanner\" keyword to Builder() creation has been deprecated;\n" + "\tuse: source_scanner or target_scanner as appropriate.") + del overrides['scanner'] + self.overrides = overrides + + self.set_suffix(suffix) + self.set_src_suffix(src_suffix) + self.ensure_suffix = ensure_suffix + + self.target_factory = target_factory + self.source_factory = source_factory + self.target_scanner = target_scanner + self.source_scanner = source_scanner + + self.emitter = emitter + + # Optional Builder name should only be used for Builders + # that don't get attached to construction environments. + if name: + self.name = name + self.executor_kw = {} + if not chdir is _null: + self.executor_kw['chdir'] = chdir + self.is_explicit = is_explicit + + if src_builder is None: + src_builder = [] + elif not SCons.Util.is_List(src_builder): + src_builder = [ src_builder ] + self.src_builder = src_builder + + def __nonzero__(self): + raise InternalError, "Do not test for the Node.builder attribute directly; use Node.has_builder() instead" + + def get_name(self, env): + """Attempts to get the name of the Builder. + + Look at the BUILDERS variable of env, expecting it to be a + dictionary containing this Builder, and return the key of the + dictionary. If there's no key, then return a directly-configured + name (if there is one) or the name of the class (by default).""" + + try: + index = env['BUILDERS'].values().index(self) + return env['BUILDERS'].keys()[index] + except (AttributeError, KeyError, TypeError, ValueError): + try: + return self.name + except AttributeError: + return str(self.__class__) + + def __cmp__(self, other): + return cmp(self.__dict__, other.__dict__) + + def splitext(self, path, env=None): + if not env: + env = self.env + if env: + suffixes = self.src_suffixes(env) + else: + suffixes = [] + return match_splitext(path, suffixes) + + def _adjustixes(self, files, pre, suf, ensure_suffix=False): + if not files: + return [] + result = [] + if not SCons.Util.is_List(files): + files = [files] + + for f in files: + if SCons.Util.is_String(f): + f = SCons.Util.adjustixes(f, pre, suf, ensure_suffix) + result.append(f) + return result + + def _create_nodes(self, env, target = None, source = None): + """Create and return lists of target and source nodes. + """ + src_suf = self.get_src_suffix(env) + + target_factory = env.get_factory(self.target_factory) + source_factory = env.get_factory(self.source_factory) + + source = self._adjustixes(source, None, src_suf) + slist = env.arg2nodes(source, source_factory) + + pre = self.get_prefix(env, slist) + suf = self.get_suffix(env, slist) + + if target is None: + try: + t_from_s = slist[0].target_from_source + except AttributeError: + raise UserError("Do not know how to create a target from source `%s'" % slist[0]) + except IndexError: + tlist = [] + else: + splitext = lambda S,self=self,env=env: self.splitext(S,env) + tlist = [ t_from_s(pre, suf, splitext) ] + else: + target = self._adjustixes(target, pre, suf, self.ensure_suffix) + tlist = env.arg2nodes(target, target_factory, target=target, source=source) + + if self.emitter: + # The emitter is going to do str(node), but because we're + # being called *from* a builder invocation, the new targets + # don't yet have a builder set on them and will look like + # source files. Fool the emitter's str() calls by setting + # up a temporary builder on the new targets. + new_targets = [] + for t in tlist: + if not t.is_derived(): + t.builder_set(self) + new_targets.append(t) + + orig_tlist = tlist[:] + orig_slist = slist[:] + + target, source = self.emitter(target=tlist, source=slist, env=env) + + # Now delete the temporary builders that we attached to any + # new targets, so that _node_errors() doesn't do weird stuff + # to them because it thinks they already have builders. + for t in new_targets: + if t.builder is self: + # Only delete the temporary builder if the emitter + # didn't change it on us. + t.builder_set(None) + + # Have to call arg2nodes yet again, since it is legal for + # emitters to spit out strings as well as Node instances. + tlist = env.arg2nodes(target, target_factory, + target=orig_tlist, source=orig_slist) + slist = env.arg2nodes(source, source_factory, + target=orig_tlist, source=orig_slist) + + return tlist, slist + + def _execute(self, env, target, source, overwarn={}, executor_kw={}): + # We now assume that target and source are lists or None. + if self.src_builder: + source = self.src_builder_sources(env, source, overwarn) + + if self.single_source and len(source) > 1 and target is None: + result = [] + if target is None: target = [None]*len(source) + for tgt, src in zip(target, source): + if not tgt is None: tgt = [tgt] + if not src is None: src = [src] + result.extend(self._execute(env, tgt, src, overwarn)) + return SCons.Node.NodeList(result) + + overwarn.warn() + + tlist, slist = self._create_nodes(env, target, source) + + # Check for errors with the specified target/source lists. + _node_errors(self, env, tlist, slist) + + # The targets are fine, so find or make the appropriate Executor to + # build this particular list of targets from this particular list of + # sources. + + executor = None + key = None + + if self.multi: + try: + executor = tlist[0].get_executor(create = 0) + except (AttributeError, IndexError): + pass + else: + executor.add_sources(slist) + + if executor is None: + if not self.action: + fmt = "Builder %s must have an action to build %s." + raise UserError, fmt % (self.get_name(env or self.env), + map(str,tlist)) + key = self.action.batch_key(env or self.env, tlist, slist) + if key: + try: + executor = SCons.Executor.GetBatchExecutor(key) + except KeyError: + pass + else: + executor.add_batch(tlist, slist) + + if executor is None: + executor = SCons.Executor.Executor(self.action, env, [], + tlist, slist, executor_kw) + if key: + SCons.Executor.AddBatchExecutor(key, executor) + + # Now set up the relevant information in the target Nodes themselves. + for t in tlist: + t.cwd = env.fs.getcwd() + t.builder_set(self) + t.env_set(env) + t.add_source(slist) + t.set_executor(executor) + t.set_explicit(self.is_explicit) + + return SCons.Node.NodeList(tlist) + + def __call__(self, env, target=None, source=None, chdir=_null, **kw): + # We now assume that target and source are lists or None. + # The caller (typically Environment.BuilderWrapper) is + # responsible for converting any scalar values to lists. + if chdir is _null: + ekw = self.executor_kw + else: + ekw = self.executor_kw.copy() + ekw['chdir'] = chdir + if kw: + if kw.has_key('srcdir'): + def prependDirIfRelative(f, srcdir=kw['srcdir']): + import os.path + if SCons.Util.is_String(f) and not os.path.isabs(f): + f = os.path.join(srcdir, f) + return f + if not SCons.Util.is_List(source): + source = [source] + source = map(prependDirIfRelative, source) + del kw['srcdir'] + if self.overrides: + env_kw = self.overrides.copy() + env_kw.update(kw) + else: + env_kw = kw + else: + env_kw = self.overrides + env = env.Override(env_kw) + return self._execute(env, target, source, OverrideWarner(kw), ekw) + + def adjust_suffix(self, suff): + if suff and not suff[0] in [ '.', '_', '$' ]: + return '.' + suff + return suff + + def get_prefix(self, env, sources=[]): + prefix = self.prefix + if callable(prefix): + prefix = prefix(env, sources) + return env.subst(prefix) + + def set_suffix(self, suffix): + if not callable(suffix): + suffix = self.adjust_suffix(suffix) + self.suffix = suffix + + def get_suffix(self, env, sources=[]): + suffix = self.suffix + if callable(suffix): + suffix = suffix(env, sources) + return env.subst(suffix) + + def set_src_suffix(self, src_suffix): + if not src_suffix: + src_suffix = [] + elif not SCons.Util.is_List(src_suffix): + src_suffix = [ src_suffix ] + adjust = lambda suf, s=self: \ + callable(suf) and suf or s.adjust_suffix(suf) + self.src_suffix = map(adjust, src_suffix) + + def get_src_suffix(self, env): + """Get the first src_suffix in the list of src_suffixes.""" + ret = self.src_suffixes(env) + if not ret: + return '' + return ret[0] + + def add_emitter(self, suffix, emitter): + """Add a suffix-emitter mapping to this Builder. + + This assumes that emitter has been initialized with an + appropriate dictionary type, and will throw a TypeError if + not, so the caller is responsible for knowing that this is an + appropriate method to call for the Builder in question. + """ + self.emitter[suffix] = emitter + + def add_src_builder(self, builder): + """ + Add a new Builder to the list of src_builders. + + This requires wiping out cached values so that the computed + lists of source suffixes get re-calculated. + """ + self._memo = {} + self.src_builder.append(builder) + + def _get_sdict(self, env): + """ + Returns a dictionary mapping all of the source suffixes of all + src_builders of this Builder to the underlying Builder that + should be called first. + + This dictionary is used for each target specified, so we save a + lot of extra computation by memoizing it for each construction + environment. + + Note that this is re-computed each time, not cached, because there + might be changes to one of our source Builders (or one of their + source Builders, and so on, and so on...) that we can't "see." + + The underlying methods we call cache their computed values, + though, so we hope repeatedly aggregating them into a dictionary + like this won't be too big a hit. We may need to look for a + better way to do this if performance data show this has turned + into a significant bottleneck. + """ + sdict = {} + for bld in self.get_src_builders(env): + for suf in bld.src_suffixes(env): + sdict[suf] = bld + return sdict + + def src_builder_sources(self, env, source, overwarn={}): + sdict = self._get_sdict(env) + + src_suffixes = self.src_suffixes(env) + + lengths = list(set(map(len, src_suffixes))) + + def match_src_suffix(name, src_suffixes=src_suffixes, lengths=lengths): + node_suffixes = map(lambda l, n=name: n[-l:], lengths) + for suf in src_suffixes: + if suf in node_suffixes: + return suf + return None + + result = [] + for s in SCons.Util.flatten(source): + if SCons.Util.is_String(s): + match_suffix = match_src_suffix(env.subst(s)) + if not match_suffix and not '.' in s: + src_suf = self.get_src_suffix(env) + s = self._adjustixes(s, None, src_suf)[0] + else: + match_suffix = match_src_suffix(s.name) + if match_suffix: + try: + bld = sdict[match_suffix] + except KeyError: + result.append(s) + else: + tlist = bld._execute(env, None, [s], overwarn) + # If the subsidiary Builder returned more than one + # target, then filter out any sources that this + # Builder isn't capable of building. + if len(tlist) > 1: + mss = lambda t, m=match_src_suffix: m(t.name) + tlist = filter(mss, tlist) + result.extend(tlist) + else: + result.append(s) + + source_factory = env.get_factory(self.source_factory) + + return env.arg2nodes(result, source_factory) + + def _get_src_builders_key(self, env): + return id(env) + + memoizer_counters.append(SCons.Memoize.CountDict('get_src_builders', _get_src_builders_key)) + + def get_src_builders(self, env): + """ + Returns the list of source Builders for this Builder. + + This exists mainly to look up Builders referenced as + strings in the 'BUILDER' variable of the construction + environment and cache the result. + """ + memo_key = id(env) + try: + memo_dict = self._memo['get_src_builders'] + except KeyError: + memo_dict = {} + self._memo['get_src_builders'] = memo_dict + else: + try: + return memo_dict[memo_key] + except KeyError: + pass + + builders = [] + for bld in self.src_builder: + if SCons.Util.is_String(bld): + try: + bld = env['BUILDERS'][bld] + except KeyError: + continue + builders.append(bld) + + memo_dict[memo_key] = builders + return builders + + def _subst_src_suffixes_key(self, env): + return id(env) + + memoizer_counters.append(SCons.Memoize.CountDict('subst_src_suffixes', _subst_src_suffixes_key)) + + def subst_src_suffixes(self, env): + """ + The suffix list may contain construction variable expansions, + so we have to evaluate the individual strings. To avoid doing + this over and over, we memoize the results for each construction + environment. + """ + memo_key = id(env) + try: + memo_dict = self._memo['subst_src_suffixes'] + except KeyError: + memo_dict = {} + self._memo['subst_src_suffixes'] = memo_dict + else: + try: + return memo_dict[memo_key] + except KeyError: + pass + suffixes = map(lambda x, s=self, e=env: e.subst(x), self.src_suffix) + memo_dict[memo_key] = suffixes + return suffixes + + def src_suffixes(self, env): + """ + Returns the list of source suffixes for all src_builders of this + Builder. + + This is essentially a recursive descent of the src_builder "tree." + (This value isn't cached because there may be changes in a + src_builder many levels deep that we can't see.) + """ + sdict = {} + suffixes = self.subst_src_suffixes(env) + for s in suffixes: + sdict[s] = 1 + for builder in self.get_src_builders(env): + for s in builder.src_suffixes(env): + if not sdict.has_key(s): + sdict[s] = 1 + suffixes.append(s) + return suffixes + +class CompositeBuilder(SCons.Util.Proxy): + """A Builder Proxy whose main purpose is to always have + a DictCmdGenerator as its action, and to provide access + to the DictCmdGenerator's add_action() method. + """ + + def __init__(self, builder, cmdgen): + if __debug__: logInstanceCreation(self, 'Builder.CompositeBuilder') + SCons.Util.Proxy.__init__(self, builder) + + # cmdgen should always be an instance of DictCmdGenerator. + self.cmdgen = cmdgen + self.builder = builder + + def add_action(self, suffix, action): + self.cmdgen.add_action(suffix, action) + self.set_src_suffix(self.cmdgen.src_suffixes()) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/CacheDir.py b/engine/SCons/CacheDir.py new file mode 100644 index 0000000..f14aa55 --- /dev/null +++ b/engine/SCons/CacheDir.py @@ -0,0 +1,217 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/CacheDir.py 4577 2009/12/27 19:43:56 scons" + +__doc__ = """ +CacheDir support +""" + +import os.path +import stat +import string +import sys + +import SCons.Action + +cache_enabled = True +cache_debug = False +cache_force = False +cache_show = False + +def CacheRetrieveFunc(target, source, env): + t = target[0] + fs = t.fs + cd = env.get_CacheDir() + cachedir, cachefile = cd.cachepath(t) + if not fs.exists(cachefile): + cd.CacheDebug('CacheRetrieve(%s): %s not in cache\n', t, cachefile) + return 1 + cd.CacheDebug('CacheRetrieve(%s): retrieving from %s\n', t, cachefile) + if SCons.Action.execute_actions: + if fs.islink(cachefile): + fs.symlink(fs.readlink(cachefile), t.path) + else: + env.copy_from_cache(cachefile, t.path) + st = fs.stat(cachefile) + fs.chmod(t.path, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE) + return 0 + +def CacheRetrieveString(target, source, env): + t = target[0] + fs = t.fs + cd = env.get_CacheDir() + cachedir, cachefile = cd.cachepath(t) + if t.fs.exists(cachefile): + return "Retrieved `%s' from cache" % t.path + return None + +CacheRetrieve = SCons.Action.Action(CacheRetrieveFunc, CacheRetrieveString) + +CacheRetrieveSilent = SCons.Action.Action(CacheRetrieveFunc, None) + +def CachePushFunc(target, source, env): + t = target[0] + if t.nocache: + return + fs = t.fs + cd = env.get_CacheDir() + cachedir, cachefile = cd.cachepath(t) + if fs.exists(cachefile): + # Don't bother copying it if it's already there. Note that + # usually this "shouldn't happen" because if the file already + # existed in cache, we'd have retrieved the file from there, + # not built it. This can happen, though, in a race, if some + # other person running the same build pushes their copy to + # the cache after we decide we need to build it but before our + # build completes. + cd.CacheDebug('CachePush(%s): %s already exists in cache\n', t, cachefile) + return + + cd.CacheDebug('CachePush(%s): pushing to %s\n', t, cachefile) + + tempfile = cachefile+'.tmp'+str(os.getpid()) + errfmt = "Unable to copy %s to cache. Cache file is %s" + + if not fs.isdir(cachedir): + try: + fs.makedirs(cachedir) + except EnvironmentError: + # We may have received an exception because another process + # has beaten us creating the directory. + if not fs.isdir(cachedir): + msg = errfmt % (str(target), cachefile) + raise SCons.Errors.EnvironmentError, msg + + try: + if fs.islink(t.path): + fs.symlink(fs.readlink(t.path), tempfile) + else: + fs.copy2(t.path, tempfile) + fs.rename(tempfile, cachefile) + st = fs.stat(t.path) + fs.chmod(cachefile, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE) + except EnvironmentError: + # It's possible someone else tried writing the file at the + # same time we did, or else that there was some problem like + # the CacheDir being on a separate file system that's full. + # In any case, inability to push a file to cache doesn't affect + # the correctness of the build, so just print a warning. + msg = errfmt % (str(target), cachefile) + SCons.Warnings.warn(SCons.Warnings.CacheWriteErrorWarning, msg) + +CachePush = SCons.Action.Action(CachePushFunc, None) + +class CacheDir: + + def __init__(self, path): + try: + import hashlib + except ImportError: + msg = "No hashlib or MD5 module available, CacheDir() not supported" + SCons.Warnings.warn(SCons.Warnings.NoMD5ModuleWarning, msg) + self.path = None + else: + self.path = path + self.current_cache_debug = None + self.debugFP = None + + def CacheDebug(self, fmt, target, cachefile): + if cache_debug != self.current_cache_debug: + if cache_debug == '-': + self.debugFP = sys.stdout + elif cache_debug: + self.debugFP = open(cache_debug, 'w') + else: + self.debugFP = None + self.current_cache_debug = cache_debug + if self.debugFP: + self.debugFP.write(fmt % (target, os.path.split(cachefile)[1])) + + def is_enabled(self): + return (cache_enabled and not self.path is None) + + def cachepath(self, node): + """ + """ + if not self.is_enabled(): + return None, None + + sig = node.get_cachedir_bsig() + subdir = string.upper(sig[0]) + dir = os.path.join(self.path, subdir) + return dir, os.path.join(dir, sig) + + def retrieve(self, node): + """ + This method is called from multiple threads in a parallel build, + so only do thread safe stuff here. Do thread unsafe stuff in + built(). + + Note that there's a special trick here with the execute flag + (one that's not normally done for other actions). Basically + if the user requested a no_exec (-n) build, then + SCons.Action.execute_actions is set to 0 and when any action + is called, it does its showing but then just returns zero + instead of actually calling the action execution operation. + The problem for caching is that if the file does NOT exist in + cache then the CacheRetrieveString won't return anything to + show for the task, but the Action.__call__ won't call + CacheRetrieveFunc; instead it just returns zero, which makes + the code below think that the file *was* successfully + retrieved from the cache, therefore it doesn't do any + subsequent building. However, the CacheRetrieveString didn't + print anything because it didn't actually exist in the cache, + and no more build actions will be performed, so the user just + sees nothing. The fix is to tell Action.__call__ to always + execute the CacheRetrieveFunc and then have the latter + explicitly check SCons.Action.execute_actions itself. + """ + if not self.is_enabled(): + return False + + env = node.get_build_env() + if cache_show: + if CacheRetrieveSilent(node, [], env, execute=1) == 0: + node.build(presub=0, execute=0) + return True + else: + if CacheRetrieve(node, [], env, execute=1) == 0: + return True + + return False + + def push(self, node): + if not self.is_enabled(): + return + return CachePush(node, [], node.get_build_env()) + + def push_if_forced(self, node): + if cache_force: + return self.push(node) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Conftest.py b/engine/SCons/Conftest.py new file mode 100644 index 0000000..e995e77 --- /dev/null +++ b/engine/SCons/Conftest.py @@ -0,0 +1,794 @@ +"""SCons.Conftest + +Autoconf-like configuration support; low level implementation of tests. +""" + +# +# Copyright (c) 2003 Stichting NLnet Labs +# Copyright (c) 2001, 2002, 2003 Steven Knight +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +# +# The purpose of this module is to define how a check is to be performed. +# Use one of the Check...() functions below. +# + +# +# A context class is used that defines functions for carrying out the tests, +# logging and messages. The following methods and members must be present: +# +# context.Display(msg) Function called to print messages that are normally +# displayed for the user. Newlines are explicitly used. +# The text should also be written to the logfile! +# +# context.Log(msg) Function called to write to a log file. +# +# context.BuildProg(text, ext) +# Function called to build a program, using "ext" for the +# file extention. Must return an empty string for +# success, an error message for failure. +# For reliable test results building should be done just +# like an actual program would be build, using the same +# command and arguments (including configure results so +# far). +# +# context.CompileProg(text, ext) +# Function called to compile a program, using "ext" for +# the file extention. Must return an empty string for +# success, an error message for failure. +# For reliable test results compiling should be done just +# like an actual source file would be compiled, using the +# same command and arguments (including configure results +# so far). +# +# context.AppendLIBS(lib_name_list) +# Append "lib_name_list" to the value of LIBS. +# "lib_namelist" is a list of strings. +# Return the value of LIBS before changing it (any type +# can be used, it is passed to SetLIBS() later.) +# +# context.PrependLIBS(lib_name_list) +# Prepend "lib_name_list" to the value of LIBS. +# "lib_namelist" is a list of strings. +# Return the value of LIBS before changing it (any type +# can be used, it is passed to SetLIBS() later.) +# +# context.SetLIBS(value) +# Set LIBS to "value". The type of "value" is what +# AppendLIBS() returned. +# Return the value of LIBS before changing it (any type +# can be used, it is passed to SetLIBS() later.) +# +# context.headerfilename +# Name of file to append configure results to, usually +# "confdefs.h". +# The file must not exist or be empty when starting. +# Empty or None to skip this (some tests will not work!). +# +# context.config_h (may be missing). If present, must be a string, which +# will be filled with the contents of a config_h file. +# +# context.vardict Dictionary holding variables used for the tests and +# stores results from the tests, used for the build +# commands. +# Normally contains "CC", "LIBS", "CPPFLAGS", etc. +# +# context.havedict Dictionary holding results from the tests that are to +# be used inside a program. +# Names often start with "HAVE_". These are zero +# (feature not present) or one (feature present). Other +# variables may have any value, e.g., "PERLVERSION" can +# be a number and "SYSTEMNAME" a string. +# + +import re +import string +from types import IntType + +# +# PUBLIC VARIABLES +# + +LogInputFiles = 1 # Set that to log the input files in case of a failed test +LogErrorMessages = 1 # Set that to log Conftest-generated error messages + +# +# PUBLIC FUNCTIONS +# + +# Generic remarks: +# - When a language is specified which is not supported the test fails. The +# message is a bit different, because not all the arguments for the normal +# message are available yet (chicken-egg problem). + + +def CheckBuilder(context, text = None, language = None): + """ + Configure check to see if the compiler works. + Note that this uses the current value of compiler and linker flags, make + sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly. + "language" should be "C" or "C++" and is used to select the compiler. + Default is "C". + "text" may be used to specify the code to be build. + Returns an empty string for success, an error message for failure. + """ + lang, suffix, msg = _lang2suffix(language) + if msg: + context.Display("%s\n" % msg) + return msg + + if not text: + text = """ +int main() { + return 0; +} +""" + + context.Display("Checking if building a %s file works... " % lang) + ret = context.BuildProg(text, suffix) + _YesNoResult(context, ret, None, text) + return ret + +def CheckCC(context): + """ + Configure check for a working C compiler. + + This checks whether the C compiler, as defined in the $CC construction + variable, can compile a C source file. It uses the current $CCCOM value + too, so that it can test against non working flags. + + """ + context.Display("Checking whether the C compiler works") + text = """ +int main() +{ + return 0; +} +""" + ret = _check_empty_program(context, 'CC', text, 'C') + _YesNoResult(context, ret, None, text) + return ret + +def CheckSHCC(context): + """ + Configure check for a working shared C compiler. + + This checks whether the C compiler, as defined in the $SHCC construction + variable, can compile a C source file. It uses the current $SHCCCOM value + too, so that it can test against non working flags. + + """ + context.Display("Checking whether the (shared) C compiler works") + text = """ +int foo() +{ + return 0; +} +""" + ret = _check_empty_program(context, 'SHCC', text, 'C', use_shared = True) + _YesNoResult(context, ret, None, text) + return ret + +def CheckCXX(context): + """ + Configure check for a working CXX compiler. + + This checks whether the CXX compiler, as defined in the $CXX construction + variable, can compile a CXX source file. It uses the current $CXXCOM value + too, so that it can test against non working flags. + + """ + context.Display("Checking whether the C++ compiler works") + text = """ +int main() +{ + return 0; +} +""" + ret = _check_empty_program(context, 'CXX', text, 'C++') + _YesNoResult(context, ret, None, text) + return ret + +def CheckSHCXX(context): + """ + Configure check for a working shared CXX compiler. + + This checks whether the CXX compiler, as defined in the $SHCXX construction + variable, can compile a CXX source file. It uses the current $SHCXXCOM value + too, so that it can test against non working flags. + + """ + context.Display("Checking whether the (shared) C++ compiler works") + text = """ +int main() +{ + return 0; +} +""" + ret = _check_empty_program(context, 'SHCXX', text, 'C++', use_shared = True) + _YesNoResult(context, ret, None, text) + return ret + +def _check_empty_program(context, comp, text, language, use_shared = False): + """Return 0 on success, 1 otherwise.""" + if not context.env.has_key(comp) or not context.env[comp]: + # The compiler construction variable is not set or empty + return 1 + + lang, suffix, msg = _lang2suffix(language) + if msg: + return 1 + + if use_shared: + return context.CompileSharedObject(text, suffix) + else: + return context.CompileProg(text, suffix) + + +def CheckFunc(context, function_name, header = None, language = None): + """ + Configure check for a function "function_name". + "language" should be "C" or "C++" and is used to select the compiler. + Default is "C". + Optional "header" can be defined to define a function prototype, include a + header file or anything else that comes before main(). + Sets HAVE_function_name in context.havedict according to the result. + Note that this uses the current value of compiler and linker flags, make + sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly. + Returns an empty string for success, an error message for failure. + """ + + # Remarks from autoconf: + # - Don't include <ctype.h> because on OSF/1 3.0 it includes <sys/types.h> + # which includes <sys/select.h> which contains a prototype for select. + # Similarly for bzero. + # - assert.h is included to define __stub macros and hopefully few + # prototypes, which can conflict with char $1(); below. + # - Override any gcc2 internal prototype to avoid an error. + # - We use char for the function declaration because int might match the + # return type of a gcc2 builtin and then its argument prototype would + # still apply. + # - The GNU C library defines this for functions which it implements to + # always fail with ENOSYS. Some functions are actually named something + # starting with __ and the normal name is an alias. + + if context.headerfilename: + includetext = '#include "%s"' % context.headerfilename + else: + includetext = '' + if not header: + header = """ +#ifdef __cplusplus +extern "C" +#endif +char %s();""" % function_name + + lang, suffix, msg = _lang2suffix(language) + if msg: + context.Display("Cannot check for %s(): %s\n" % (function_name, msg)) + return msg + + text = """ +%(include)s +#include <assert.h> +%(hdr)s + +int main() { +#if defined (__stub_%(name)s) || defined (__stub___%(name)s) + fail fail fail +#else + %(name)s(); +#endif + + return 0; +} +""" % { 'name': function_name, + 'include': includetext, + 'hdr': header } + + context.Display("Checking for %s function %s()... " % (lang, function_name)) + ret = context.BuildProg(text, suffix) + _YesNoResult(context, ret, "HAVE_" + function_name, text, + "Define to 1 if the system has the function `%s'." %\ + function_name) + return ret + + +def CheckHeader(context, header_name, header = None, language = None, + include_quotes = None): + """ + Configure check for a C or C++ header file "header_name". + Optional "header" can be defined to do something before including the + header file (unusual, supported for consistency). + "language" should be "C" or "C++" and is used to select the compiler. + Default is "C". + Sets HAVE_header_name in context.havedict according to the result. + Note that this uses the current value of compiler and linker flags, make + sure $CFLAGS and $CPPFLAGS are set correctly. + Returns an empty string for success, an error message for failure. + """ + # Why compile the program instead of just running the preprocessor? + # It is possible that the header file exists, but actually using it may + # fail (e.g., because it depends on other header files). Thus this test is + # more strict. It may require using the "header" argument. + # + # Use <> by default, because the check is normally used for system header + # files. SCons passes '""' to overrule this. + + # Include "confdefs.h" first, so that the header can use HAVE_HEADER_H. + if context.headerfilename: + includetext = '#include "%s"\n' % context.headerfilename + else: + includetext = '' + if not header: + header = "" + + lang, suffix, msg = _lang2suffix(language) + if msg: + context.Display("Cannot check for header file %s: %s\n" + % (header_name, msg)) + return msg + + if not include_quotes: + include_quotes = "<>" + + text = "%s%s\n#include %s%s%s\n\n" % (includetext, header, + include_quotes[0], header_name, include_quotes[1]) + + context.Display("Checking for %s header file %s... " % (lang, header_name)) + ret = context.CompileProg(text, suffix) + _YesNoResult(context, ret, "HAVE_" + header_name, text, + "Define to 1 if you have the <%s> header file." % header_name) + return ret + + +def CheckType(context, type_name, fallback = None, + header = None, language = None): + """ + Configure check for a C or C++ type "type_name". + Optional "header" can be defined to include a header file. + "language" should be "C" or "C++" and is used to select the compiler. + Default is "C". + Sets HAVE_type_name in context.havedict according to the result. + Note that this uses the current value of compiler and linker flags, make + sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly. + Returns an empty string for success, an error message for failure. + """ + + # Include "confdefs.h" first, so that the header can use HAVE_HEADER_H. + if context.headerfilename: + includetext = '#include "%s"' % context.headerfilename + else: + includetext = '' + if not header: + header = "" + + lang, suffix, msg = _lang2suffix(language) + if msg: + context.Display("Cannot check for %s type: %s\n" % (type_name, msg)) + return msg + + # Remarks from autoconf about this test: + # - Grepping for the type in include files is not reliable (grep isn't + # portable anyway). + # - Using "TYPE my_var;" doesn't work for const qualified types in C++. + # Adding an initializer is not valid for some C++ classes. + # - Using the type as parameter to a function either fails for K&$ C or for + # C++. + # - Using "TYPE *my_var;" is valid in C for some types that are not + # declared (struct something). + # - Using "sizeof(TYPE)" is valid when TYPE is actually a variable. + # - Using the previous two together works reliably. + text = """ +%(include)s +%(header)s + +int main() { + if ((%(name)s *) 0) + return 0; + if (sizeof (%(name)s)) + return 0; +} +""" % { 'include': includetext, + 'header': header, + 'name': type_name } + + context.Display("Checking for %s type %s... " % (lang, type_name)) + ret = context.BuildProg(text, suffix) + _YesNoResult(context, ret, "HAVE_" + type_name, text, + "Define to 1 if the system has the type `%s'." % type_name) + if ret and fallback and context.headerfilename: + f = open(context.headerfilename, "a") + f.write("typedef %s %s;\n" % (fallback, type_name)) + f.close() + + return ret + +def CheckTypeSize(context, type_name, header = None, language = None, expect = None): + """This check can be used to get the size of a given type, or to check whether + the type is of expected size. + + Arguments: + - type : str + the type to check + - includes : sequence + list of headers to include in the test code before testing the type + - language : str + 'C' or 'C++' + - expect : int + if given, will test wether the type has the given number of bytes. + If not given, will automatically find the size. + + Returns: + status : int + 0 if the check failed, or the found size of the type if the check succeeded.""" + + # Include "confdefs.h" first, so that the header can use HAVE_HEADER_H. + if context.headerfilename: + includetext = '#include "%s"' % context.headerfilename + else: + includetext = '' + + if not header: + header = "" + + lang, suffix, msg = _lang2suffix(language) + if msg: + context.Display("Cannot check for %s type: %s\n" % (type_name, msg)) + return msg + + src = includetext + header + if not expect is None: + # Only check if the given size is the right one + context.Display('Checking %s is %d bytes... ' % (type_name, expect)) + + # test code taken from autoconf: this is a pretty clever hack to find that + # a type is of a given size using only compilation. This speeds things up + # quite a bit compared to straightforward code using TryRun + src = src + r""" +typedef %s scons_check_type; + +int main() +{ + static int test_array[1 - 2 * !(((long int) (sizeof(scons_check_type))) == %d)]; + test_array[0] = 0; + + return 0; +} +""" + + st = context.CompileProg(src % (type_name, expect), suffix) + if not st: + context.Display("yes\n") + _Have(context, "SIZEOF_%s" % type_name, expect, + "The size of `%s', as computed by sizeof." % type_name) + return expect + else: + context.Display("no\n") + _LogFailed(context, src, st) + return 0 + else: + # Only check if the given size is the right one + context.Message('Checking size of %s ... ' % type_name) + + # We have to be careful with the program we wish to test here since + # compilation will be attempted using the current environment's flags. + # So make sure that the program will compile without any warning. For + # example using: 'int main(int argc, char** argv)' will fail with the + # '-Wall -Werror' flags since the variables argc and argv would not be + # used in the program... + # + src = src + """ +#include <stdlib.h> +#include <stdio.h> +int main() { + printf("%d", (int)sizeof(""" + type_name + """)); + return 0; +} + """ + st, out = context.RunProg(src, suffix) + try: + size = int(out) + except ValueError: + # If cannot convert output of test prog to an integer (the size), + # something went wront, so just fail + st = 1 + size = 0 + + if not st: + context.Display("yes\n") + _Have(context, "SIZEOF_%s" % type_name, size, + "The size of `%s', as computed by sizeof." % type_name) + return size + else: + context.Display("no\n") + _LogFailed(context, src, st) + return 0 + + return 0 + +def CheckDeclaration(context, symbol, includes = None, language = None): + """Checks whether symbol is declared. + + Use the same test as autoconf, that is test whether the symbol is defined + as a macro or can be used as an r-value. + + Arguments: + symbol : str + the symbol to check + includes : str + Optional "header" can be defined to include a header file. + language : str + only C and C++ supported. + + Returns: + status : bool + True if the check failed, False if succeeded.""" + + # Include "confdefs.h" first, so that the header can use HAVE_HEADER_H. + if context.headerfilename: + includetext = '#include "%s"' % context.headerfilename + else: + includetext = '' + + if not includes: + includes = "" + + lang, suffix, msg = _lang2suffix(language) + if msg: + context.Display("Cannot check for declaration %s: %s\n" % (type_name, msg)) + return msg + + src = includetext + includes + context.Display('Checking whether %s is declared... ' % symbol) + + src = src + r""" +int main() +{ +#ifndef %s + (void) %s; +#endif + ; + return 0; +} +""" % (symbol, symbol) + + st = context.CompileProg(src, suffix) + _YesNoResult(context, st, "HAVE_DECL_" + symbol, src, + "Set to 1 if %s is defined." % symbol) + return st + +def CheckLib(context, libs, func_name = None, header = None, + extra_libs = None, call = None, language = None, autoadd = 1, + append = True): + """ + Configure check for a C or C++ libraries "libs". Searches through + the list of libraries, until one is found where the test succeeds. + Tests if "func_name" or "call" exists in the library. Note: if it exists + in another library the test succeeds anyway! + Optional "header" can be defined to include a header file. If not given a + default prototype for "func_name" is added. + Optional "extra_libs" is a list of library names to be added after + "lib_name" in the build command. To be used for libraries that "lib_name" + depends on. + Optional "call" replaces the call to "func_name" in the test code. It must + consist of complete C statements, including a trailing ";". + Both "func_name" and "call" arguments are optional, and in that case, just + linking against the libs is tested. + "language" should be "C" or "C++" and is used to select the compiler. + Default is "C". + Note that this uses the current value of compiler and linker flags, make + sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly. + Returns an empty string for success, an error message for failure. + """ + # Include "confdefs.h" first, so that the header can use HAVE_HEADER_H. + if context.headerfilename: + includetext = '#include "%s"' % context.headerfilename + else: + includetext = '' + if not header: + header = "" + + text = """ +%s +%s""" % (includetext, header) + + # Add a function declaration if needed. + if func_name and func_name != "main": + if not header: + text = text + """ +#ifdef __cplusplus +extern "C" +#endif +char %s(); +""" % func_name + + # The actual test code. + if not call: + call = "%s();" % func_name + + # if no function to test, leave main() blank + text = text + """ +int +main() { + %s +return 0; +} +""" % (call or "") + + if call: + i = string.find(call, "\n") + if i > 0: + calltext = call[:i] + ".." + elif call[-1] == ';': + calltext = call[:-1] + else: + calltext = call + + for lib_name in libs: + + lang, suffix, msg = _lang2suffix(language) + if msg: + context.Display("Cannot check for library %s: %s\n" % (lib_name, msg)) + return msg + + # if a function was specified to run in main(), say it + if call: + context.Display("Checking for %s in %s library %s... " + % (calltext, lang, lib_name)) + # otherwise, just say the name of library and language + else: + context.Display("Checking for %s library %s... " + % (lang, lib_name)) + + if lib_name: + l = [ lib_name ] + if extra_libs: + l.extend(extra_libs) + if append: + oldLIBS = context.AppendLIBS(l) + else: + oldLIBS = context.PrependLIBS(l) + sym = "HAVE_LIB" + lib_name + else: + oldLIBS = -1 + sym = None + + ret = context.BuildProg(text, suffix) + + _YesNoResult(context, ret, sym, text, + "Define to 1 if you have the `%s' library." % lib_name) + if oldLIBS != -1 and (ret or not autoadd): + context.SetLIBS(oldLIBS) + + if not ret: + return ret + + return ret + +# +# END OF PUBLIC FUNCTIONS +# + +def _YesNoResult(context, ret, key, text, comment = None): + """ + Handle the result of a test with a "yes" or "no" result. + "ret" is the return value: empty if OK, error message when not. + "key" is the name of the symbol to be defined (HAVE_foo). + "text" is the source code of the program used for testing. + "comment" is the C comment to add above the line defining the symbol (the + comment is automatically put inside a /* */). If None, no comment is added. + """ + if key: + _Have(context, key, not ret, comment) + if ret: + context.Display("no\n") + _LogFailed(context, text, ret) + else: + context.Display("yes\n") + + +def _Have(context, key, have, comment = None): + """ + Store result of a test in context.havedict and context.headerfilename. + "key" is a "HAVE_abc" name. It is turned into all CAPITALS and non- + alphanumerics are replaced by an underscore. + The value of "have" can be: + 1 - Feature is defined, add "#define key". + 0 - Feature is not defined, add "/* #undef key */". + Adding "undef" is what autoconf does. Not useful for the + compiler, but it shows that the test was done. + number - Feature is defined to this number "#define key have". + Doesn't work for 0 or 1, use a string then. + string - Feature is defined to this string "#define key have". + Give "have" as is should appear in the header file, include quotes + when desired and escape special characters! + """ + key_up = string.upper(key) + key_up = re.sub('[^A-Z0-9_]', '_', key_up) + context.havedict[key_up] = have + if have == 1: + line = "#define %s 1\n" % key_up + elif have == 0: + line = "/* #undef %s */\n" % key_up + elif type(have) == IntType: + line = "#define %s %d\n" % (key_up, have) + else: + line = "#define %s %s\n" % (key_up, str(have)) + + if comment is not None: + lines = "\n/* %s */\n" % comment + line + else: + lines = "\n" + line + + if context.headerfilename: + f = open(context.headerfilename, "a") + f.write(lines) + f.close() + elif hasattr(context,'config_h'): + context.config_h = context.config_h + lines + + +def _LogFailed(context, text, msg): + """ + Write to the log about a failed program. + Add line numbers, so that error messages can be understood. + """ + if LogInputFiles: + context.Log("Failed program was:\n") + lines = string.split(text, '\n') + if len(lines) and lines[-1] == '': + lines = lines[:-1] # remove trailing empty line + n = 1 + for line in lines: + context.Log("%d: %s\n" % (n, line)) + n = n + 1 + if LogErrorMessages: + context.Log("Error message: %s\n" % msg) + + +def _lang2suffix(lang): + """ + Convert a language name to a suffix. + When "lang" is empty or None C is assumed. + Returns a tuple (lang, suffix, None) when it works. + For an unrecognized language returns (None, None, msg). + Where: + lang = the unified language name + suffix = the suffix, including the leading dot + msg = an error message + """ + if not lang or lang in ["C", "c"]: + return ("C", ".c", None) + if lang in ["c++", "C++", "cpp", "CXX", "cxx"]: + return ("C++", ".cpp", None) + + return None, None, "Unsupported language: %s" % lang + + +# vim: set sw=4 et sts=4 tw=79 fo+=l: + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Debug.py b/engine/SCons/Debug.py new file mode 100644 index 0000000..a7681c3 --- /dev/null +++ b/engine/SCons/Debug.py @@ -0,0 +1,237 @@ +"""SCons.Debug + +Code for debugging SCons internal things. Not everything here is +guaranteed to work all the way back to Python 1.5.2, and shouldn't be +needed by most users. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Debug.py 4577 2009/12/27 19:43:56 scons" + +import os +import string +import sys +import time + +# Recipe 14.10 from the Python Cookbook. +try: + import weakref +except ImportError: + def logInstanceCreation(instance, name=None): + pass +else: + def logInstanceCreation(instance, name=None): + if name is None: + name = instance.__class__.__name__ + if not tracked_classes.has_key(name): + tracked_classes[name] = [] + tracked_classes[name].append(weakref.ref(instance)) + + + +tracked_classes = {} + +def string_to_classes(s): + if s == '*': + c = tracked_classes.keys() + c.sort() + return c + else: + return string.split(s) + +def fetchLoggedInstances(classes="*"): + classnames = string_to_classes(classes) + return map(lambda cn: (cn, len(tracked_classes[cn])), classnames) + +def countLoggedInstances(classes, file=sys.stdout): + for classname in string_to_classes(classes): + file.write("%s: %d\n" % (classname, len(tracked_classes[classname]))) + +def listLoggedInstances(classes, file=sys.stdout): + for classname in string_to_classes(classes): + file.write('\n%s:\n' % classname) + for ref in tracked_classes[classname]: + obj = ref() + if obj is not None: + file.write(' %s\n' % repr(obj)) + +def dumpLoggedInstances(classes, file=sys.stdout): + for classname in string_to_classes(classes): + file.write('\n%s:\n' % classname) + for ref in tracked_classes[classname]: + obj = ref() + if obj is not None: + file.write(' %s:\n' % obj) + for key, value in obj.__dict__.items(): + file.write(' %20s : %s\n' % (key, value)) + + + +if sys.platform[:5] == "linux": + # Linux doesn't actually support memory usage stats from getrusage(). + def memory(): + mstr = open('/proc/self/stat').read() + mstr = string.split(mstr)[22] + return int(mstr) +else: + try: + import resource + except ImportError: + try: + import win32process + import win32api + except ImportError: + def memory(): + return 0 + else: + def memory(): + process_handle = win32api.GetCurrentProcess() + memory_info = win32process.GetProcessMemoryInfo( process_handle ) + return memory_info['PeakWorkingSetSize'] + else: + def memory(): + res = resource.getrusage(resource.RUSAGE_SELF) + return res[4] + +# returns caller's stack +def caller_stack(*backlist): + import traceback + if not backlist: + backlist = [0] + result = [] + for back in backlist: + tb = traceback.extract_stack(limit=3+back) + key = tb[0][:3] + result.append('%s:%d(%s)' % func_shorten(key)) + return result + +caller_bases = {} +caller_dicts = {} + +# trace a caller's stack +def caller_trace(back=0): + import traceback + tb = traceback.extract_stack(limit=3+back) + tb.reverse() + callee = tb[1][:3] + caller_bases[callee] = caller_bases.get(callee, 0) + 1 + for caller in tb[2:]: + caller = callee + caller[:3] + try: + entry = caller_dicts[callee] + except KeyError: + caller_dicts[callee] = entry = {} + entry[caller] = entry.get(caller, 0) + 1 + callee = caller + +# print a single caller and its callers, if any +def _dump_one_caller(key, file, level=0): + l = [] + for c,v in caller_dicts[key].items(): + l.append((-v,c)) + l.sort() + leader = ' '*level + for v,c in l: + file.write("%s %6d %s:%d(%s)\n" % ((leader,-v) + func_shorten(c[-3:]))) + if caller_dicts.has_key(c): + _dump_one_caller(c, file, level+1) + +# print each call tree +def dump_caller_counts(file=sys.stdout): + keys = caller_bases.keys() + keys.sort() + for k in keys: + file.write("Callers of %s:%d(%s), %d calls:\n" + % (func_shorten(k) + (caller_bases[k],))) + _dump_one_caller(k, file) + +shorten_list = [ + ( '/scons/SCons/', 1), + ( '/src/engine/SCons/', 1), + ( '/usr/lib/python', 0), +] + +if os.sep != '/': + def platformize(t): + return (string.replace(t[0], '/', os.sep), t[1]) + shorten_list = map(platformize, shorten_list) + del platformize + +def func_shorten(func_tuple): + f = func_tuple[0] + for t in shorten_list: + i = string.find(f, t[0]) + if i >= 0: + if t[1]: + i = i + len(t[0]) + return (f[i:],)+func_tuple[1:] + return func_tuple + + +TraceFP = {} +if sys.platform == 'win32': + TraceDefault = 'con' +else: + TraceDefault = '/dev/tty' + +TimeStampDefault = None +StartTime = time.time() +PreviousTime = StartTime + +def Trace(msg, file=None, mode='w', tstamp=None): + """Write a trace message to a file. Whenever a file is specified, + it becomes the default for the next call to Trace().""" + global TraceDefault + global TimeStamp + global PreviousTime + if file is None: + file = TraceDefault + else: + TraceDefault = file + if tstamp is None: + tstamp = TimeStampDefault + else: + TimeStampDefault = tstamp + try: + fp = TraceFP[file] + except KeyError: + try: + fp = TraceFP[file] = open(file, mode) + except TypeError: + # Assume we were passed an open file pointer. + fp = file + if tstamp: + now = time.time() + fp.write('%8.4f %8.4f: ' % (now - StartTime, now - PreviousTime)) + PreviousTime = now + fp.write(msg) + fp.flush() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Defaults.py b/engine/SCons/Defaults.py new file mode 100644 index 0000000..0de18e7 --- /dev/null +++ b/engine/SCons/Defaults.py @@ -0,0 +1,485 @@ +"""SCons.Defaults + +Builders and other things for the local site. Here's where we'll +duplicate the functionality of autoconf until we move it into the +installation procedure or use something like qmconf. + +The code that reads the registry to find MSVC components was borrowed +from distutils.msvccompiler. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Defaults.py 4577 2009/12/27 19:43:56 scons" + + + +import os +import os.path +import errno +import shutil +import stat +import string +import time +import types +import sys + +import SCons.Action +import SCons.Builder +import SCons.CacheDir +import SCons.Environment +import SCons.PathList +import SCons.Subst +import SCons.Tool + +# A placeholder for a default Environment (for fetching source files +# from source code management systems and the like). This must be +# initialized later, after the top-level directory is set by the calling +# interface. +_default_env = None + +# Lazily instantiate the default environment so the overhead of creating +# it doesn't apply when it's not needed. +def _fetch_DefaultEnvironment(*args, **kw): + """ + Returns the already-created default construction environment. + """ + global _default_env + return _default_env + +def DefaultEnvironment(*args, **kw): + """ + Initial public entry point for creating the default construction + Environment. + + After creating the environment, we overwrite our name + (DefaultEnvironment) with the _fetch_DefaultEnvironment() function, + which more efficiently returns the initialized default construction + environment without checking for its existence. + + (This function still exists with its _default_check because someone + else (*cough* Script/__init__.py *cough*) may keep a reference + to this function. So we can't use the fully functional idiom of + having the name originally be a something that *only* creates the + construction environment and then overwrites the name.) + """ + global _default_env + if not _default_env: + import SCons.Util + _default_env = apply(SCons.Environment.Environment, args, kw) + if SCons.Util.md5: + _default_env.Decider('MD5') + else: + _default_env.Decider('timestamp-match') + global DefaultEnvironment + DefaultEnvironment = _fetch_DefaultEnvironment + _default_env._CacheDir_path = None + return _default_env + +# Emitters for setting the shared attribute on object files, +# and an action for checking that all of the source files +# going into a shared library are, in fact, shared. +def StaticObjectEmitter(target, source, env): + for tgt in target: + tgt.attributes.shared = None + return (target, source) + +def SharedObjectEmitter(target, source, env): + for tgt in target: + tgt.attributes.shared = 1 + return (target, source) + +def SharedFlagChecker(source, target, env): + same = env.subst('$STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME') + if same == '0' or same == '' or same == 'False': + for src in source: + try: + shared = src.attributes.shared + except AttributeError: + shared = None + if not shared: + raise SCons.Errors.UserError, "Source file: %s is static and is not compatible with shared target: %s" % (src, target[0]) + +SharedCheck = SCons.Action.Action(SharedFlagChecker, None) + +# Some people were using these variable name before we made +# SourceFileScanner part of the public interface. Don't break their +# SConscript files until we've given them some fair warning and a +# transition period. +CScan = SCons.Tool.CScanner +DScan = SCons.Tool.DScanner +LaTeXScan = SCons.Tool.LaTeXScanner +ObjSourceScan = SCons.Tool.SourceFileScanner +ProgScan = SCons.Tool.ProgramScanner + +# These aren't really tool scanners, so they don't quite belong with +# the rest of those in Tool/__init__.py, but I'm not sure where else +# they should go. Leave them here for now. +import SCons.Scanner.Dir +DirScanner = SCons.Scanner.Dir.DirScanner() +DirEntryScanner = SCons.Scanner.Dir.DirEntryScanner() + +# Actions for common languages. +CAction = SCons.Action.Action("$CCCOM", "$CCCOMSTR") +ShCAction = SCons.Action.Action("$SHCCCOM", "$SHCCCOMSTR") +CXXAction = SCons.Action.Action("$CXXCOM", "$CXXCOMSTR") +ShCXXAction = SCons.Action.Action("$SHCXXCOM", "$SHCXXCOMSTR") + +ASAction = SCons.Action.Action("$ASCOM", "$ASCOMSTR") +ASPPAction = SCons.Action.Action("$ASPPCOM", "$ASPPCOMSTR") + +LinkAction = SCons.Action.Action("$LINKCOM", "$LINKCOMSTR") +ShLinkAction = SCons.Action.Action("$SHLINKCOM", "$SHLINKCOMSTR") + +LdModuleLinkAction = SCons.Action.Action("$LDMODULECOM", "$LDMODULECOMSTR") + +# Common tasks that we allow users to perform in platform-independent +# ways by creating ActionFactory instances. +ActionFactory = SCons.Action.ActionFactory + +def get_paths_str(dest): + # If dest is a list, we need to manually call str() on each element + if SCons.Util.is_List(dest): + elem_strs = [] + for element in dest: + elem_strs.append('"' + str(element) + '"') + return '[' + string.join(elem_strs, ', ') + ']' + else: + return '"' + str(dest) + '"' + +def chmod_func(dest, mode): + SCons.Node.FS.invalidate_node_memos(dest) + if not SCons.Util.is_List(dest): + dest = [dest] + for element in dest: + os.chmod(str(element), mode) + +def chmod_strfunc(dest, mode): + return 'Chmod(%s, 0%o)' % (get_paths_str(dest), mode) + +Chmod = ActionFactory(chmod_func, chmod_strfunc) + +def copy_func(dest, src): + SCons.Node.FS.invalidate_node_memos(dest) + if SCons.Util.is_List(src) and os.path.isdir(dest): + for file in src: + shutil.copy2(file, dest) + return 0 + elif os.path.isfile(src): + return shutil.copy2(src, dest) + else: + return shutil.copytree(src, dest, 1) + +Copy = ActionFactory(copy_func, + lambda dest, src: 'Copy("%s", "%s")' % (dest, src), + convert=str) + +def delete_func(dest, must_exist=0): + SCons.Node.FS.invalidate_node_memos(dest) + if not SCons.Util.is_List(dest): + dest = [dest] + for entry in dest: + entry = str(entry) + if not must_exist and not os.path.exists(entry): + continue + if not os.path.exists(entry) or os.path.isfile(entry): + os.unlink(entry) + continue + else: + shutil.rmtree(entry, 1) + continue + +def delete_strfunc(dest, must_exist=0): + return 'Delete(%s)' % get_paths_str(dest) + +Delete = ActionFactory(delete_func, delete_strfunc) + +def mkdir_func(dest): + SCons.Node.FS.invalidate_node_memos(dest) + if not SCons.Util.is_List(dest): + dest = [dest] + for entry in dest: + try: + os.makedirs(str(entry)) + except os.error, e: + p = str(entry) + if (e[0] == errno.EEXIST or (sys.platform=='win32' and e[0]==183)) \ + and os.path.isdir(str(entry)): + pass # not an error if already exists + else: + raise + +Mkdir = ActionFactory(mkdir_func, + lambda dir: 'Mkdir(%s)' % get_paths_str(dir)) + +def move_func(dest, src): + SCons.Node.FS.invalidate_node_memos(dest) + SCons.Node.FS.invalidate_node_memos(src) + shutil.move(src, dest) + +Move = ActionFactory(move_func, + lambda dest, src: 'Move("%s", "%s")' % (dest, src), + convert=str) + +def touch_func(dest): + SCons.Node.FS.invalidate_node_memos(dest) + if not SCons.Util.is_List(dest): + dest = [dest] + for file in dest: + file = str(file) + mtime = int(time.time()) + if os.path.exists(file): + atime = os.path.getatime(file) + else: + open(file, 'w') + atime = mtime + os.utime(file, (atime, mtime)) + +Touch = ActionFactory(touch_func, + lambda file: 'Touch(%s)' % get_paths_str(file)) + +# Internal utility functions + +def _concat(prefix, list, suffix, env, f=lambda x: x, target=None, source=None): + """ + Creates a new list from 'list' by first interpolating each element + in the list using the 'env' dictionary and then calling f on the + list, and finally calling _concat_ixes to concatenate 'prefix' and + 'suffix' onto each element of the list. + """ + if not list: + return list + + l = f(SCons.PathList.PathList(list).subst_path(env, target, source)) + if l is not None: + list = l + + return _concat_ixes(prefix, list, suffix, env) + +def _concat_ixes(prefix, list, suffix, env): + """ + Creates a new list from 'list' by concatenating the 'prefix' and + 'suffix' arguments onto each element of the list. A trailing space + on 'prefix' or leading space on 'suffix' will cause them to be put + into separate list elements rather than being concatenated. + """ + + result = [] + + # ensure that prefix and suffix are strings + prefix = str(env.subst(prefix, SCons.Subst.SUBST_RAW)) + suffix = str(env.subst(suffix, SCons.Subst.SUBST_RAW)) + + for x in list: + if isinstance(x, SCons.Node.FS.File): + result.append(x) + continue + x = str(x) + if x: + + if prefix: + if prefix[-1] == ' ': + result.append(prefix[:-1]) + elif x[:len(prefix)] != prefix: + x = prefix + x + + result.append(x) + + if suffix: + if suffix[0] == ' ': + result.append(suffix[1:]) + elif x[-len(suffix):] != suffix: + result[-1] = result[-1]+suffix + + return result + +def _stripixes(prefix, list, suffix, stripprefixes, stripsuffixes, env, c=None): + """ + This is a wrapper around _concat()/_concat_ixes() that checks for the + existence of prefixes or suffixes on list elements and strips them + where it finds them. This is used by tools (like the GNU linker) + that need to turn something like 'libfoo.a' into '-lfoo'. + """ + + if not list: + return list + + if not callable(c): + env_c = env['_concat'] + if env_c != _concat and callable(env_c): + # There's a custom _concat() method in the construction + # environment, and we've allowed people to set that in + # the past (see test/custom-concat.py), so preserve the + # backwards compatibility. + c = env_c + else: + c = _concat_ixes + + stripprefixes = map(env.subst, SCons.Util.flatten(stripprefixes)) + stripsuffixes = map(env.subst, SCons.Util.flatten(stripsuffixes)) + + stripped = [] + for l in SCons.PathList.PathList(list).subst_path(env, None, None): + if isinstance(l, SCons.Node.FS.File): + stripped.append(l) + continue + + if not SCons.Util.is_String(l): + l = str(l) + + for stripprefix in stripprefixes: + lsp = len(stripprefix) + if l[:lsp] == stripprefix: + l = l[lsp:] + # Do not strip more than one prefix + break + + for stripsuffix in stripsuffixes: + lss = len(stripsuffix) + if l[-lss:] == stripsuffix: + l = l[:-lss] + # Do not strip more than one suffix + break + + stripped.append(l) + + return c(prefix, stripped, suffix, env) + +def processDefines(defs): + """process defines, resolving strings, lists, dictionaries, into a list of + strings + """ + if SCons.Util.is_List(defs): + l = [] + for d in defs: + if SCons.Util.is_List(d) or type(d) is types.TupleType: + l.append(str(d[0]) + '=' + str(d[1])) + else: + l.append(str(d)) + elif SCons.Util.is_Dict(defs): + # The items in a dictionary are stored in random order, but + # if the order of the command-line options changes from + # invocation to invocation, then the signature of the command + # line will change and we'll get random unnecessary rebuilds. + # Consequently, we have to sort the keys to ensure a + # consistent order... + l = [] + keys = defs.keys() + keys.sort() + for k in keys: + v = defs[k] + if v is None: + l.append(str(k)) + else: + l.append(str(k) + '=' + str(v)) + else: + l = [str(defs)] + return l + +def _defines(prefix, defs, suffix, env, c=_concat_ixes): + """A wrapper around _concat_ixes that turns a list or string + into a list of C preprocessor command-line definitions. + """ + + return c(prefix, env.subst_path(processDefines(defs)), suffix, env) + +class NullCmdGenerator: + """This is a callable class that can be used in place of other + command generators if you don't want them to do anything. + + The __call__ method for this class simply returns the thing + you instantiated it with. + + Example usage: + env["DO_NOTHING"] = NullCmdGenerator + env["LINKCOM"] = "${DO_NOTHING('$LINK $SOURCES $TARGET')}" + """ + + def __init__(self, cmd): + self.cmd = cmd + + def __call__(self, target, source, env, for_signature=None): + return self.cmd + +class Variable_Method_Caller: + """A class for finding a construction variable on the stack and + calling one of its methods. + + We use this to support "construction variables" in our string + eval()s that actually stand in for methods--specifically, use + of "RDirs" in call to _concat that should actually execute the + "TARGET.RDirs" method. (We used to support this by creating a little + "build dictionary" that mapped RDirs to the method, but this got in + the way of Memoizing construction environments, because we had to + create new environment objects to hold the variables.) + """ + def __init__(self, variable, method): + self.variable = variable + self.method = method + def __call__(self, *args, **kw): + try: 1/0 + except ZeroDivisionError: + # Don't start iterating with the current stack-frame to + # prevent creating reference cycles (f_back is safe). + frame = sys.exc_info()[2].tb_frame.f_back + variable = self.variable + while frame: + if frame.f_locals.has_key(variable): + v = frame.f_locals[variable] + if v: + method = getattr(v, self.method) + return apply(method, args, kw) + frame = frame.f_back + return None + +ConstructionEnvironment = { + 'BUILDERS' : {}, + 'SCANNERS' : [], + 'CONFIGUREDIR' : '#/.sconf_temp', + 'CONFIGURELOG' : '#/config.log', + 'CPPSUFFIXES' : SCons.Tool.CSuffixes, + 'DSUFFIXES' : SCons.Tool.DSuffixes, + 'ENV' : {}, + 'IDLSUFFIXES' : SCons.Tool.IDLSuffixes, +# 'LATEXSUFFIXES' : SCons.Tool.LaTeXSuffixes, # moved to the TeX tools generate functions + '_concat' : _concat, + '_defines' : _defines, + '_stripixes' : _stripixes, + '_LIBFLAGS' : '${_concat(LIBLINKPREFIX, LIBS, LIBLINKSUFFIX, __env__)}', + '_LIBDIRFLAGS' : '$( ${_concat(LIBDIRPREFIX, LIBPATH, LIBDIRSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)', + '_CPPINCFLAGS' : '$( ${_concat(INCPREFIX, CPPPATH, INCSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)', + '_CPPDEFFLAGS' : '${_defines(CPPDEFPREFIX, CPPDEFINES, CPPDEFSUFFIX, __env__)}', + 'TEMPFILE' : NullCmdGenerator, + 'Dir' : Variable_Method_Caller('TARGET', 'Dir'), + 'Dirs' : Variable_Method_Caller('TARGET', 'Dirs'), + 'File' : Variable_Method_Caller('TARGET', 'File'), + 'RDirs' : Variable_Method_Caller('TARGET', 'RDirs'), +} + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Environment.py b/engine/SCons/Environment.py new file mode 100644 index 0000000..3ce1d9f --- /dev/null +++ b/engine/SCons/Environment.py @@ -0,0 +1,2327 @@ +"""SCons.Environment + +Base class for construction Environments. These are +the primary objects used to communicate dependency and +construction information to the build engine. + +Keyword arguments supplied when the construction Environment +is created are construction variables used to initialize the +Environment +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Environment.py 4577 2009/12/27 19:43:56 scons" + + +import copy +import os +import sys +import re +import shlex +import string +from UserDict import UserDict + +import SCons.Action +import SCons.Builder +from SCons.Debug import logInstanceCreation +import SCons.Defaults +import SCons.Errors +import SCons.Memoize +import SCons.Node +import SCons.Node.Alias +import SCons.Node.FS +import SCons.Node.Python +import SCons.Platform +import SCons.SConf +import SCons.SConsign +import SCons.Subst +import SCons.Tool +import SCons.Util +import SCons.Warnings + +class _Null: + pass + +_null = _Null + +_warn_copy_deprecated = True +_warn_source_signatures_deprecated = True +_warn_target_signatures_deprecated = True + +CleanTargets = {} +CalculatorArgs = {} + +semi_deepcopy = SCons.Util.semi_deepcopy + +# Pull UserError into the global name space for the benefit of +# Environment().SourceSignatures(), which has some import statements +# which seem to mess up its ability to reference SCons directly. +UserError = SCons.Errors.UserError + +def alias_builder(env, target, source): + pass + +AliasBuilder = SCons.Builder.Builder(action = alias_builder, + target_factory = SCons.Node.Alias.default_ans.Alias, + source_factory = SCons.Node.FS.Entry, + multi = 1, + is_explicit = None, + name='AliasBuilder') + +def apply_tools(env, tools, toolpath): + # Store the toolpath in the Environment. + if toolpath is not None: + env['toolpath'] = toolpath + + if not tools: + return + # Filter out null tools from the list. + for tool in filter(None, tools): + if SCons.Util.is_List(tool) or type(tool)==type(()): + toolname = tool[0] + toolargs = tool[1] # should be a dict of kw args + tool = apply(env.Tool, [toolname], toolargs) + else: + env.Tool(tool) + +# These names are (or will be) controlled by SCons; users should never +# set or override them. This warning can optionally be turned off, +# but scons will still ignore the illegal variable names even if it's off. +reserved_construction_var_names = [ + 'CHANGED_SOURCES', + 'CHANGED_TARGETS', + 'SOURCE', + 'SOURCES', + 'TARGET', + 'TARGETS', + 'UNCHANGED_SOURCES', + 'UNCHANGED_TARGETS', +] + +future_reserved_construction_var_names = [ + #'HOST_OS', + #'HOST_ARCH', + #'HOST_CPU', + ] + +def copy_non_reserved_keywords(dict): + result = semi_deepcopy(dict) + for k in result.keys(): + if k in reserved_construction_var_names: + msg = "Ignoring attempt to set reserved variable `$%s'" + SCons.Warnings.warn(SCons.Warnings.ReservedVariableWarning, msg % k) + del result[k] + return result + +def _set_reserved(env, key, value): + msg = "Ignoring attempt to set reserved variable `$%s'" + SCons.Warnings.warn(SCons.Warnings.ReservedVariableWarning, msg % key) + +def _set_future_reserved(env, key, value): + env._dict[key] = value + msg = "`$%s' will be reserved in a future release and setting it will become ignored" + SCons.Warnings.warn(SCons.Warnings.FutureReservedVariableWarning, msg % key) + +def _set_BUILDERS(env, key, value): + try: + bd = env._dict[key] + for k in bd.keys(): + del bd[k] + except KeyError: + bd = BuilderDict(kwbd, env) + env._dict[key] = bd + bd.update(value) + +def _del_SCANNERS(env, key): + del env._dict[key] + env.scanner_map_delete() + +def _set_SCANNERS(env, key, value): + env._dict[key] = value + env.scanner_map_delete() + +def _delete_duplicates(l, keep_last): + """Delete duplicates from a sequence, keeping the first or last.""" + seen={} + result=[] + if keep_last: # reverse in & out, then keep first + l.reverse() + for i in l: + try: + if not seen.has_key(i): + result.append(i) + seen[i]=1 + except TypeError: + # probably unhashable. Just keep it. + result.append(i) + if keep_last: + result.reverse() + return result + + + +# The following is partly based on code in a comment added by Peter +# Shannon at the following page (there called the "transplant" class): +# +# ASPN : Python Cookbook : Dynamically added methods to a class +# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/81732 +# +# We had independently been using the idiom as BuilderWrapper, but +# factoring out the common parts into this base class, and making +# BuilderWrapper a subclass that overrides __call__() to enforce specific +# Builder calling conventions, simplified some of our higher-layer code. + +class MethodWrapper: + """ + A generic Wrapper class that associates a method (which can + actually be any callable) with an object. As part of creating this + MethodWrapper object an attribute with the specified (by default, + the name of the supplied method) is added to the underlying object. + When that new "method" is called, our __call__() method adds the + object as the first argument, simulating the Python behavior of + supplying "self" on method calls. + + We hang on to the name by which the method was added to the underlying + base class so that we can provide a method to "clone" ourselves onto + a new underlying object being copied (without which we wouldn't need + to save that info). + """ + def __init__(self, object, method, name=None): + if name is None: + name = method.__name__ + self.object = object + self.method = method + self.name = name + setattr(self.object, name, self) + + def __call__(self, *args, **kwargs): + nargs = (self.object,) + args + return apply(self.method, nargs, kwargs) + + def clone(self, new_object): + """ + Returns an object that re-binds the underlying "method" to + the specified new object. + """ + return self.__class__(new_object, self.method, self.name) + +class BuilderWrapper(MethodWrapper): + """ + A MethodWrapper subclass that that associates an environment with + a Builder. + + This mainly exists to wrap the __call__() function so that all calls + to Builders can have their argument lists massaged in the same way + (treat a lone argument as the source, treat two arguments as target + then source, make sure both target and source are lists) without + having to have cut-and-paste code to do it. + + As a bit of obsessive backwards compatibility, we also intercept + attempts to get or set the "env" or "builder" attributes, which were + the names we used before we put the common functionality into the + MethodWrapper base class. We'll keep this around for a while in case + people shipped Tool modules that reached into the wrapper (like the + Tool/qt.py module does, or did). There shouldn't be a lot attribute + fetching or setting on these, so a little extra work shouldn't hurt. + """ + def __call__(self, target=None, source=_null, *args, **kw): + if source is _null: + source = target + target = None + if target is not None and not SCons.Util.is_List(target): + target = [target] + if source is not None and not SCons.Util.is_List(source): + source = [source] + return apply(MethodWrapper.__call__, (self, target, source) + args, kw) + + def __repr__(self): + return '<BuilderWrapper %s>' % repr(self.name) + + def __str__(self): + return self.__repr__() + + def __getattr__(self, name): + if name == 'env': + return self.object + elif name == 'builder': + return self.method + else: + raise AttributeError, name + + def __setattr__(self, name, value): + if name == 'env': + self.object = value + elif name == 'builder': + self.method = value + else: + self.__dict__[name] = value + + # This allows a Builder to be executed directly + # through the Environment to which it's attached. + # In practice, we shouldn't need this, because + # builders actually get executed through a Node. + # But we do have a unit test for this, and can't + # yet rule out that it would be useful in the + # future, so leave it for now. + #def execute(self, **kw): + # kw['env'] = self.env + # apply(self.builder.execute, (), kw) + +class BuilderDict(UserDict): + """This is a dictionary-like class used by an Environment to hold + the Builders. We need to do this because every time someone changes + the Builders in the Environment's BUILDERS dictionary, we must + update the Environment's attributes.""" + def __init__(self, dict, env): + # Set self.env before calling the superclass initialization, + # because it will end up calling our other methods, which will + # need to point the values in this dictionary to self.env. + self.env = env + UserDict.__init__(self, dict) + + def __semi_deepcopy__(self): + return self.__class__(self.data, self.env) + + def __setitem__(self, item, val): + try: + method = getattr(self.env, item).method + except AttributeError: + pass + else: + self.env.RemoveMethod(method) + UserDict.__setitem__(self, item, val) + BuilderWrapper(self.env, val, item) + + def __delitem__(self, item): + UserDict.__delitem__(self, item) + delattr(self.env, item) + + def update(self, dict): + for i, v in dict.items(): + self.__setitem__(i, v) + + + +_is_valid_var = re.compile(r'[_a-zA-Z]\w*$') + +def is_valid_construction_var(varstr): + """Return if the specified string is a legitimate construction + variable. + """ + return _is_valid_var.match(varstr) + + + +class SubstitutionEnvironment: + """Base class for different flavors of construction environments. + + This class contains a minimal set of methods that handle contruction + variable expansion and conversion of strings to Nodes, which may or + may not be actually useful as a stand-alone class. Which methods + ended up in this class is pretty arbitrary right now. They're + basically the ones which we've empirically determined are common to + the different construction environment subclasses, and most of the + others that use or touch the underlying dictionary of construction + variables. + + Eventually, this class should contain all the methods that we + determine are necessary for a "minimal" interface to the build engine. + A full "native Python" SCons environment has gotten pretty heavyweight + with all of the methods and Tools and construction variables we've + jammed in there, so it would be nice to have a lighter weight + alternative for interfaces that don't need all of the bells and + whistles. (At some point, we'll also probably rename this class + "Base," since that more reflects what we want this class to become, + but because we've released comments that tell people to subclass + Environment.Base to create their own flavors of construction + environment, we'll save that for a future refactoring when this + class actually becomes useful.) + """ + + if SCons.Memoize.use_memoizer: + __metaclass__ = SCons.Memoize.Memoized_Metaclass + + def __init__(self, **kw): + """Initialization of an underlying SubstitutionEnvironment class. + """ + if __debug__: logInstanceCreation(self, 'Environment.SubstitutionEnvironment') + self.fs = SCons.Node.FS.get_default_fs() + self.ans = SCons.Node.Alias.default_ans + self.lookup_list = SCons.Node.arg2nodes_lookups + self._dict = kw.copy() + self._init_special() + self.added_methods = [] + #self._memo = {} + + def _init_special(self): + """Initial the dispatch tables for special handling of + special construction variables.""" + self._special_del = {} + self._special_del['SCANNERS'] = _del_SCANNERS + + self._special_set = {} + for key in reserved_construction_var_names: + self._special_set[key] = _set_reserved + for key in future_reserved_construction_var_names: + self._special_set[key] = _set_future_reserved + self._special_set['BUILDERS'] = _set_BUILDERS + self._special_set['SCANNERS'] = _set_SCANNERS + + # Freeze the keys of self._special_set in a list for use by + # methods that need to check. (Empirically, list scanning has + # gotten better than dict.has_key() in Python 2.5.) + self._special_set_keys = self._special_set.keys() + + def __cmp__(self, other): + return cmp(self._dict, other._dict) + + def __delitem__(self, key): + special = self._special_del.get(key) + if special: + special(self, key) + else: + del self._dict[key] + + def __getitem__(self, key): + return self._dict[key] + + def __setitem__(self, key, value): + # This is heavily used. This implementation is the best we have + # according to the timings in bench/env.__setitem__.py. + # + # The "key in self._special_set_keys" test here seems to perform + # pretty well for the number of keys we have. A hard-coded + # list works a little better in Python 2.5, but that has the + # disadvantage of maybe getting out of sync if we ever add more + # variable names. Using self._special_set.has_key() works a + # little better in Python 2.4, but is worse then this test. + # So right now it seems like a good trade-off, but feel free to + # revisit this with bench/env.__setitem__.py as needed (and + # as newer versions of Python come out). + if key in self._special_set_keys: + self._special_set[key](self, key, value) + else: + # If we already have the entry, then it's obviously a valid + # key and we don't need to check. If we do check, using a + # global, pre-compiled regular expression directly is more + # efficient than calling another function or a method. + if not self._dict.has_key(key) \ + and not _is_valid_var.match(key): + raise SCons.Errors.UserError, "Illegal construction variable `%s'" % key + self._dict[key] = value + + def get(self, key, default=None): + """Emulates the get() method of dictionaries.""" + return self._dict.get(key, default) + + def has_key(self, key): + return self._dict.has_key(key) + + def __contains__(self, key): + return self._dict.__contains__(key) + + def items(self): + return self._dict.items() + + def arg2nodes(self, args, node_factory=_null, lookup_list=_null, **kw): + if node_factory is _null: + node_factory = self.fs.File + if lookup_list is _null: + lookup_list = self.lookup_list + + if not args: + return [] + + args = SCons.Util.flatten(args) + + nodes = [] + for v in args: + if SCons.Util.is_String(v): + n = None + for l in lookup_list: + n = l(v) + if n is not None: + break + if n is not None: + if SCons.Util.is_String(n): + # n = self.subst(n, raw=1, **kw) + kw['raw'] = 1 + n = apply(self.subst, (n,), kw) + if node_factory: + n = node_factory(n) + if SCons.Util.is_List(n): + nodes.extend(n) + else: + nodes.append(n) + elif node_factory: + # v = node_factory(self.subst(v, raw=1, **kw)) + kw['raw'] = 1 + v = node_factory(apply(self.subst, (v,), kw)) + if SCons.Util.is_List(v): + nodes.extend(v) + else: + nodes.append(v) + else: + nodes.append(v) + + return nodes + + def gvars(self): + return self._dict + + def lvars(self): + return {} + + def subst(self, string, raw=0, target=None, source=None, conv=None, executor=None): + """Recursively interpolates construction variables from the + Environment into the specified string, returning the expanded + result. Construction variables are specified by a $ prefix + in the string and begin with an initial underscore or + alphabetic character followed by any number of underscores + or alphanumeric characters. The construction variable names + may be surrounded by curly braces to separate the name from + trailing characters. + """ + gvars = self.gvars() + lvars = self.lvars() + lvars['__env__'] = self + if executor: + lvars.update(executor.get_lvars()) + return SCons.Subst.scons_subst(string, self, raw, target, source, gvars, lvars, conv) + + def subst_kw(self, kw, raw=0, target=None, source=None): + nkw = {} + for k, v in kw.items(): + k = self.subst(k, raw, target, source) + if SCons.Util.is_String(v): + v = self.subst(v, raw, target, source) + nkw[k] = v + return nkw + + def subst_list(self, string, raw=0, target=None, source=None, conv=None, executor=None): + """Calls through to SCons.Subst.scons_subst_list(). See + the documentation for that function.""" + gvars = self.gvars() + lvars = self.lvars() + lvars['__env__'] = self + if executor: + lvars.update(executor.get_lvars()) + return SCons.Subst.scons_subst_list(string, self, raw, target, source, gvars, lvars, conv) + + def subst_path(self, path, target=None, source=None): + """Substitute a path list, turning EntryProxies into Nodes + and leaving Nodes (and other objects) as-is.""" + + if not SCons.Util.is_List(path): + path = [path] + + def s(obj): + """This is the "string conversion" routine that we have our + substitutions use to return Nodes, not strings. This relies + on the fact that an EntryProxy object has a get() method that + returns the underlying Node that it wraps, which is a bit of + architectural dependence that we might need to break or modify + in the future in response to additional requirements.""" + try: + get = obj.get + except AttributeError: + obj = SCons.Util.to_String_for_subst(obj) + else: + obj = get() + return obj + + r = [] + for p in path: + if SCons.Util.is_String(p): + p = self.subst(p, target=target, source=source, conv=s) + if SCons.Util.is_List(p): + if len(p) == 1: + p = p[0] + else: + # We have an object plus a string, or multiple + # objects that we need to smush together. No choice + # but to make them into a string. + p = string.join(map(SCons.Util.to_String_for_subst, p), '') + else: + p = s(p) + r.append(p) + return r + + subst_target_source = subst + + def backtick(self, command): + import subprocess + # common arguments + kw = { 'stdin' : 'devnull', + 'stdout' : subprocess.PIPE, + 'stderr' : subprocess.PIPE, + 'universal_newlines' : True, + } + # if the command is a list, assume it's been quoted + # othewise force a shell + if not SCons.Util.is_List(command): kw['shell'] = True + # run constructed command + #TODO(1.5) p = SCons.Action._subproc(self, command, **kw) + p = apply(SCons.Action._subproc, (self, command), kw) + out,err = p.communicate() + status = p.wait() + if err: + sys.stderr.write(err) + if status: + raise OSError("'%s' exited %d" % (command, status)) + return out + + def AddMethod(self, function, name=None): + """ + Adds the specified function as a method of this construction + environment with the specified name. If the name is omitted, + the default name is the name of the function itself. + """ + method = MethodWrapper(self, function, name) + self.added_methods.append(method) + + def RemoveMethod(self, function): + """ + Removes the specified function's MethodWrapper from the + added_methods list, so we don't re-bind it when making a clone. + """ + is_not_func = lambda dm, f=function: not dm.method is f + self.added_methods = filter(is_not_func, self.added_methods) + + def Override(self, overrides): + """ + Produce a modified environment whose variables are overriden by + the overrides dictionaries. "overrides" is a dictionary that + will override the variables of this environment. + + This function is much more efficient than Clone() or creating + a new Environment because it doesn't copy the construction + environment dictionary, it just wraps the underlying construction + environment, and doesn't even create a wrapper object if there + are no overrides. + """ + if not overrides: return self + o = copy_non_reserved_keywords(overrides) + if not o: return self + overrides = {} + merges = None + for key, value in o.items(): + if key == 'parse_flags': + merges = value + else: + overrides[key] = SCons.Subst.scons_subst_once(value, self, key) + env = OverrideEnvironment(self, overrides) + if merges: env.MergeFlags(merges) + return env + + def ParseFlags(self, *flags): + """ + Parse the set of flags and return a dict with the flags placed + in the appropriate entry. The flags are treated as a typical + set of command-line flags for a GNU-like toolchain and used to + populate the entries in the dict immediately below. If one of + the flag strings begins with a bang (exclamation mark), it is + assumed to be a command and the rest of the string is executed; + the result of that evaluation is then added to the dict. + """ + dict = { + 'ASFLAGS' : SCons.Util.CLVar(''), + 'CFLAGS' : SCons.Util.CLVar(''), + 'CCFLAGS' : SCons.Util.CLVar(''), + 'CPPDEFINES' : [], + 'CPPFLAGS' : SCons.Util.CLVar(''), + 'CPPPATH' : [], + 'FRAMEWORKPATH' : SCons.Util.CLVar(''), + 'FRAMEWORKS' : SCons.Util.CLVar(''), + 'LIBPATH' : [], + 'LIBS' : [], + 'LINKFLAGS' : SCons.Util.CLVar(''), + 'RPATH' : [], + } + + # The use of the "me" parameter to provide our own name for + # recursion is an egregious hack to support Python 2.1 and before. + def do_parse(arg, me, self = self, dict = dict): + # if arg is a sequence, recurse with each element + if not arg: + return + + if not SCons.Util.is_String(arg): + for t in arg: me(t, me) + return + + # if arg is a command, execute it + if arg[0] == '!': + arg = self.backtick(arg[1:]) + + # utility function to deal with -D option + def append_define(name, dict = dict): + t = string.split(name, '=') + if len(t) == 1: + dict['CPPDEFINES'].append(name) + else: + dict['CPPDEFINES'].append([t[0], string.join(t[1:], '=')]) + + # Loop through the flags and add them to the appropriate option. + # This tries to strike a balance between checking for all possible + # flags and keeping the logic to a finite size, so it doesn't + # check for some that don't occur often. It particular, if the + # flag is not known to occur in a config script and there's a way + # of passing the flag to the right place (by wrapping it in a -W + # flag, for example) we don't check for it. Note that most + # preprocessor options are not handled, since unhandled options + # are placed in CCFLAGS, so unless the preprocessor is invoked + # separately, these flags will still get to the preprocessor. + # Other options not currently handled: + # -iqoutedir (preprocessor search path) + # -u symbol (linker undefined symbol) + # -s (linker strip files) + # -static* (linker static binding) + # -shared* (linker dynamic binding) + # -symbolic (linker global binding) + # -R dir (deprecated linker rpath) + # IBM compilers may also accept -qframeworkdir=foo + + params = shlex.split(arg) + append_next_arg_to = None # for multi-word args + for arg in params: + if append_next_arg_to: + if append_next_arg_to == 'CPPDEFINES': + append_define(arg) + elif append_next_arg_to == '-include': + t = ('-include', self.fs.File(arg)) + dict['CCFLAGS'].append(t) + elif append_next_arg_to == '-isysroot': + t = ('-isysroot', arg) + dict['CCFLAGS'].append(t) + dict['LINKFLAGS'].append(t) + elif append_next_arg_to == '-arch': + t = ('-arch', arg) + dict['CCFLAGS'].append(t) + dict['LINKFLAGS'].append(t) + else: + dict[append_next_arg_to].append(arg) + append_next_arg_to = None + elif not arg[0] in ['-', '+']: + dict['LIBS'].append(self.fs.File(arg)) + elif arg[:2] == '-L': + if arg[2:]: + dict['LIBPATH'].append(arg[2:]) + else: + append_next_arg_to = 'LIBPATH' + elif arg[:2] == '-l': + if arg[2:]: + dict['LIBS'].append(arg[2:]) + else: + append_next_arg_to = 'LIBS' + elif arg[:2] == '-I': + if arg[2:]: + dict['CPPPATH'].append(arg[2:]) + else: + append_next_arg_to = 'CPPPATH' + elif arg[:4] == '-Wa,': + dict['ASFLAGS'].append(arg[4:]) + dict['CCFLAGS'].append(arg) + elif arg[:4] == '-Wl,': + if arg[:11] == '-Wl,-rpath=': + dict['RPATH'].append(arg[11:]) + elif arg[:7] == '-Wl,-R,': + dict['RPATH'].append(arg[7:]) + elif arg[:6] == '-Wl,-R': + dict['RPATH'].append(arg[6:]) + else: + dict['LINKFLAGS'].append(arg) + elif arg[:4] == '-Wp,': + dict['CPPFLAGS'].append(arg) + elif arg[:2] == '-D': + if arg[2:]: + append_define(arg[2:]) + else: + append_next_arg_to = 'CPPDEFINES' + elif arg == '-framework': + append_next_arg_to = 'FRAMEWORKS' + elif arg[:14] == '-frameworkdir=': + dict['FRAMEWORKPATH'].append(arg[14:]) + elif arg[:2] == '-F': + if arg[2:]: + dict['FRAMEWORKPATH'].append(arg[2:]) + else: + append_next_arg_to = 'FRAMEWORKPATH' + elif arg == '-mno-cygwin': + dict['CCFLAGS'].append(arg) + dict['LINKFLAGS'].append(arg) + elif arg == '-mwindows': + dict['LINKFLAGS'].append(arg) + elif arg == '-pthread': + dict['CCFLAGS'].append(arg) + dict['LINKFLAGS'].append(arg) + elif arg[:5] == '-std=': + dict['CFLAGS'].append(arg) # C only + elif arg[0] == '+': + dict['CCFLAGS'].append(arg) + dict['LINKFLAGS'].append(arg) + elif arg in ['-include', '-isysroot', '-arch']: + append_next_arg_to = arg + else: + dict['CCFLAGS'].append(arg) + + for arg in flags: + do_parse(arg, do_parse) + return dict + + def MergeFlags(self, args, unique=1, dict=None): + """ + Merge the dict in args into the construction variables of this + env, or the passed-in dict. If args is not a dict, it is + converted into a dict using ParseFlags. If unique is not set, + the flags are appended rather than merged. + """ + + if dict is None: + dict = self + if not SCons.Util.is_Dict(args): + args = self.ParseFlags(args) + if not unique: + apply(self.Append, (), args) + return self + for key, value in args.items(): + if not value: + continue + try: + orig = self[key] + except KeyError: + orig = value + else: + if not orig: + orig = value + elif value: + # Add orig and value. The logic here was lifted from + # part of env.Append() (see there for a lot of comments + # about the order in which things are tried) and is + # used mainly to handle coercion of strings to CLVar to + # "do the right thing" given (e.g.) an original CCFLAGS + # string variable like '-pipe -Wall'. + try: + orig = orig + value + except (KeyError, TypeError): + try: + add_to_orig = orig.append + except AttributeError: + value.insert(0, orig) + orig = value + else: + add_to_orig(value) + t = [] + if key[-4:] == 'PATH': + ### keep left-most occurence + for v in orig: + if v not in t: + t.append(v) + else: + ### keep right-most occurence + orig.reverse() + for v in orig: + if v not in t: + t.insert(0, v) + self[key] = t + return self + +# def MergeShellPaths(self, args, prepend=1): +# """ +# Merge the dict in args into the shell environment in env['ENV']. +# Shell path elements are appended or prepended according to prepend. + +# Uses Pre/AppendENVPath, so it always appends or prepends uniquely. + +# Example: env.MergeShellPaths({'LIBPATH': '/usr/local/lib'}) +# prepends /usr/local/lib to env['ENV']['LIBPATH']. +# """ + +# for pathname, pathval in args.items(): +# if not pathval: +# continue +# if prepend: +# apply(self.PrependENVPath, (pathname, pathval)) +# else: +# apply(self.AppendENVPath, (pathname, pathval)) + + +# Used by the FindSourceFiles() method, below. +# Stuck here for support of pre-2.2 Python versions. +def build_source(ss, result): + for s in ss: + if isinstance(s, SCons.Node.FS.Dir): + build_source(s.all_children(), result) + elif s.has_builder(): + build_source(s.sources, result) + elif isinstance(s.disambiguate(), SCons.Node.FS.File): + result.append(s) + +def default_decide_source(dependency, target, prev_ni): + f = SCons.Defaults.DefaultEnvironment().decide_source + return f(dependency, target, prev_ni) + +def default_decide_target(dependency, target, prev_ni): + f = SCons.Defaults.DefaultEnvironment().decide_target + return f(dependency, target, prev_ni) + +def default_copy_from_cache(src, dst): + f = SCons.Defaults.DefaultEnvironment().copy_from_cache + return f(src, dst) + +class Base(SubstitutionEnvironment): + """Base class for "real" construction Environments. These are the + primary objects used to communicate dependency and construction + information to the build engine. + + Keyword arguments supplied when the construction Environment + is created are construction variables used to initialize the + Environment. + """ + + memoizer_counters = [] + + ####################################################################### + # This is THE class for interacting with the SCons build engine, + # and it contains a lot of stuff, so we're going to try to keep this + # a little organized by grouping the methods. + ####################################################################### + + ####################################################################### + # Methods that make an Environment act like a dictionary. These have + # the expected standard names for Python mapping objects. Note that + # we don't actually make an Environment a subclass of UserDict for + # performance reasons. Note also that we only supply methods for + # dictionary functionality that we actually need and use. + ####################################################################### + + def __init__(self, + platform=None, + tools=None, + toolpath=None, + variables=None, + parse_flags = None, + **kw): + """ + Initialization of a basic SCons construction environment, + including setting up special construction variables like BUILDER, + PLATFORM, etc., and searching for and applying available Tools. + + Note that we do *not* call the underlying base class + (SubsitutionEnvironment) initialization, because we need to + initialize things in a very specific order that doesn't work + with the much simpler base class initialization. + """ + if __debug__: logInstanceCreation(self, 'Environment.Base') + self._memo = {} + self.fs = SCons.Node.FS.get_default_fs() + self.ans = SCons.Node.Alias.default_ans + self.lookup_list = SCons.Node.arg2nodes_lookups + self._dict = semi_deepcopy(SCons.Defaults.ConstructionEnvironment) + self._init_special() + self.added_methods = [] + + # We don't use AddMethod, or define these as methods in this + # class, because we *don't* want these functions to be bound + # methods. They need to operate independently so that the + # settings will work properly regardless of whether a given + # target ends up being built with a Base environment or an + # OverrideEnvironment or what have you. + self.decide_target = default_decide_target + self.decide_source = default_decide_source + + self.copy_from_cache = default_copy_from_cache + + self._dict['BUILDERS'] = BuilderDict(self._dict['BUILDERS'], self) + + if platform is None: + platform = self._dict.get('PLATFORM', None) + if platform is None: + platform = SCons.Platform.Platform() + if SCons.Util.is_String(platform): + platform = SCons.Platform.Platform(platform) + self._dict['PLATFORM'] = str(platform) + platform(self) + + self._dict['HOST_OS'] = self._dict.get('HOST_OS',None) + self._dict['HOST_ARCH'] = self._dict.get('HOST_ARCH',None) + + # Now set defaults for TARGET_{OS|ARCH} + self._dict['TARGET_OS'] = self._dict.get('HOST_OS',None) + self._dict['TARGET_ARCH'] = self._dict.get('HOST_ARCH',None) + + + # Apply the passed-in and customizable variables to the + # environment before calling the tools, because they may use + # some of them during initialization. + if kw.has_key('options'): + # Backwards compatibility: they may stll be using the + # old "options" keyword. + variables = kw['options'] + del kw['options'] + apply(self.Replace, (), kw) + keys = kw.keys() + if variables: + keys = keys + variables.keys() + variables.Update(self) + + save = {} + for k in keys: + try: + save[k] = self._dict[k] + except KeyError: + # No value may have been set if they tried to pass in a + # reserved variable name like TARGETS. + pass + + SCons.Tool.Initializers(self) + + if tools is None: + tools = self._dict.get('TOOLS', None) + if tools is None: + tools = ['default'] + apply_tools(self, tools, toolpath) + + # Now restore the passed-in and customized variables + # to the environment, since the values the user set explicitly + # should override any values set by the tools. + for key, val in save.items(): + self._dict[key] = val + + # Finally, apply any flags to be merged in + if parse_flags: self.MergeFlags(parse_flags) + + ####################################################################### + # Utility methods that are primarily for internal use by SCons. + # These begin with lower-case letters. + ####################################################################### + + def get_builder(self, name): + """Fetch the builder with the specified name from the environment. + """ + try: + return self._dict['BUILDERS'][name] + except KeyError: + return None + + def get_CacheDir(self): + try: + path = self._CacheDir_path + except AttributeError: + path = SCons.Defaults.DefaultEnvironment()._CacheDir_path + try: + if path == self._last_CacheDir_path: + return self._last_CacheDir + except AttributeError: + pass + cd = SCons.CacheDir.CacheDir(path) + self._last_CacheDir_path = path + self._last_CacheDir = cd + return cd + + def get_factory(self, factory, default='File'): + """Return a factory function for creating Nodes for this + construction environment. + """ + name = default + try: + is_node = issubclass(factory, SCons.Node.FS.Base) + except TypeError: + # The specified factory isn't a Node itself--it's + # most likely None, or possibly a callable. + pass + else: + if is_node: + # The specified factory is a Node (sub)class. Try to + # return the FS method that corresponds to the Node's + # name--that is, we return self.fs.Dir if they want a Dir, + # self.fs.File for a File, etc. + try: name = factory.__name__ + except AttributeError: pass + else: factory = None + if not factory: + # They passed us None, or we picked up a name from a specified + # class, so return the FS method. (Note that we *don't* + # use our own self.{Dir,File} methods because that would + # cause env.subst() to be called twice on the file name, + # interfering with files that have $$ in them.) + factory = getattr(self.fs, name) + return factory + + memoizer_counters.append(SCons.Memoize.CountValue('_gsm')) + + def _gsm(self): + try: + return self._memo['_gsm'] + except KeyError: + pass + + result = {} + + try: + scanners = self._dict['SCANNERS'] + except KeyError: + pass + else: + # Reverse the scanner list so that, if multiple scanners + # claim they can scan the same suffix, earlier scanners + # in the list will overwrite later scanners, so that + # the result looks like a "first match" to the user. + if not SCons.Util.is_List(scanners): + scanners = [scanners] + else: + scanners = scanners[:] # copy so reverse() doesn't mod original + scanners.reverse() + for scanner in scanners: + for k in scanner.get_skeys(self): + if k and self['PLATFORM'] == 'win32': + k = string.lower(k) + result[k] = scanner + + self._memo['_gsm'] = result + + return result + + def get_scanner(self, skey): + """Find the appropriate scanner given a key (usually a file suffix). + """ + if skey and self['PLATFORM'] == 'win32': + skey = string.lower(skey) + return self._gsm().get(skey) + + def scanner_map_delete(self, kw=None): + """Delete the cached scanner map (if we need to). + """ + try: + del self._memo['_gsm'] + except KeyError: + pass + + def _update(self, dict): + """Update an environment's values directly, bypassing the normal + checks that occur when users try to set items. + """ + self._dict.update(dict) + + def get_src_sig_type(self): + try: + return self.src_sig_type + except AttributeError: + t = SCons.Defaults.DefaultEnvironment().src_sig_type + self.src_sig_type = t + return t + + def get_tgt_sig_type(self): + try: + return self.tgt_sig_type + except AttributeError: + t = SCons.Defaults.DefaultEnvironment().tgt_sig_type + self.tgt_sig_type = t + return t + + ####################################################################### + # Public methods for manipulating an Environment. These begin with + # upper-case letters. The essential characteristic of methods in + # this section is that they do *not* have corresponding same-named + # global functions. For example, a stand-alone Append() function + # makes no sense, because Append() is all about appending values to + # an Environment's construction variables. + ####################################################################### + + def Append(self, **kw): + """Append values to existing construction variables + in an Environment. + """ + kw = copy_non_reserved_keywords(kw) + for key, val in kw.items(): + # It would be easier on the eyes to write this using + # "continue" statements whenever we finish processing an item, + # but Python 1.5.2 apparently doesn't let you use "continue" + # within try:-except: blocks, so we have to nest our code. + try: + orig = self._dict[key] + except KeyError: + # No existing variable in the environment, so just set + # it to the new value. + self._dict[key] = val + else: + try: + # Check if the original looks like a dictionary. + # If it is, we can't just try adding the value because + # dictionaries don't have __add__() methods, and + # things like UserList will incorrectly coerce the + # original dict to a list (which we don't want). + update_dict = orig.update + except AttributeError: + try: + # Most straightforward: just try to add them + # together. This will work in most cases, when the + # original and new values are of compatible types. + self._dict[key] = orig + val + except (KeyError, TypeError): + try: + # Check if the original is a list. + add_to_orig = orig.append + except AttributeError: + # The original isn't a list, but the new + # value is (by process of elimination), + # so insert the original in the new value + # (if there's one to insert) and replace + # the variable with it. + if orig: + val.insert(0, orig) + self._dict[key] = val + else: + # The original is a list, so append the new + # value to it (if there's a value to append). + if val: + add_to_orig(val) + else: + # The original looks like a dictionary, so update it + # based on what we think the value looks like. + if SCons.Util.is_List(val): + for v in val: + orig[v] = None + else: + try: + update_dict(val) + except (AttributeError, TypeError, ValueError): + if SCons.Util.is_Dict(val): + for k, v in val.items(): + orig[k] = v + else: + orig[val] = None + self.scanner_map_delete(kw) + + # allow Dirs and strings beginning with # for top-relative + # Note this uses the current env's fs (in self). + def _canonicalize(self, path): + if not SCons.Util.is_String(path): # typically a Dir + path = str(path) + if path and path[0] == '#': + path = str(self.fs.Dir(path)) + return path + + def AppendENVPath(self, name, newpath, envname = 'ENV', + sep = os.pathsep, delete_existing=1): + """Append path elements to the path 'name' in the 'ENV' + dictionary for this environment. Will only add any particular + path once, and will normpath and normcase all paths to help + assure this. This can also handle the case where the env + variable is a list instead of a string. + + If delete_existing is 0, a newpath which is already in the path + will not be moved to the end (it will be left where it is). + """ + + orig = '' + if self._dict.has_key(envname) and self._dict[envname].has_key(name): + orig = self._dict[envname][name] + + nv = SCons.Util.AppendPath(orig, newpath, sep, delete_existing, + canonicalize=self._canonicalize) + + if not self._dict.has_key(envname): + self._dict[envname] = {} + + self._dict[envname][name] = nv + + def AppendUnique(self, delete_existing=0, **kw): + """Append values to existing construction variables + in an Environment, if they're not already there. + If delete_existing is 1, removes existing values first, so + values move to end. + """ + kw = copy_non_reserved_keywords(kw) + for key, val in kw.items(): + if SCons.Util.is_List(val): + val = _delete_duplicates(val, delete_existing) + if not self._dict.has_key(key) or self._dict[key] in ('', None): + self._dict[key] = val + elif SCons.Util.is_Dict(self._dict[key]) and \ + SCons.Util.is_Dict(val): + self._dict[key].update(val) + elif SCons.Util.is_List(val): + dk = self._dict[key] + if not SCons.Util.is_List(dk): + dk = [dk] + if delete_existing: + dk = filter(lambda x, val=val: x not in val, dk) + else: + val = filter(lambda x, dk=dk: x not in dk, val) + self._dict[key] = dk + val + else: + dk = self._dict[key] + if SCons.Util.is_List(dk): + # By elimination, val is not a list. Since dk is a + # list, wrap val in a list first. + if delete_existing: + dk = filter(lambda x, val=val: x not in val, dk) + self._dict[key] = dk + [val] + else: + if not val in dk: + self._dict[key] = dk + [val] + else: + if delete_existing: + dk = filter(lambda x, val=val: x not in val, dk) + self._dict[key] = dk + val + self.scanner_map_delete(kw) + + def Clone(self, tools=[], toolpath=None, parse_flags = None, **kw): + """Return a copy of a construction Environment. The + copy is like a Python "deep copy"--that is, independent + copies are made recursively of each objects--except that + a reference is copied when an object is not deep-copyable + (like a function). There are no references to any mutable + objects in the original Environment. + """ + clone = copy.copy(self) + clone._dict = semi_deepcopy(self._dict) + + try: + cbd = clone._dict['BUILDERS'] + except KeyError: + pass + else: + clone._dict['BUILDERS'] = BuilderDict(cbd, clone) + + # Check the methods added via AddMethod() and re-bind them to + # the cloned environment. Only do this if the attribute hasn't + # been overwritten by the user explicitly and still points to + # the added method. + clone.added_methods = [] + for mw in self.added_methods: + if mw == getattr(self, mw.name): + clone.added_methods.append(mw.clone(clone)) + + clone._memo = {} + + # Apply passed-in variables before the tools + # so the tools can use the new variables + kw = copy_non_reserved_keywords(kw) + new = {} + for key, value in kw.items(): + new[key] = SCons.Subst.scons_subst_once(value, self, key) + apply(clone.Replace, (), new) + + apply_tools(clone, tools, toolpath) + + # apply them again in case the tools overwrote them + apply(clone.Replace, (), new) + + # Finally, apply any flags to be merged in + if parse_flags: clone.MergeFlags(parse_flags) + + if __debug__: logInstanceCreation(self, 'Environment.EnvironmentClone') + return clone + + def Copy(self, *args, **kw): + global _warn_copy_deprecated + if _warn_copy_deprecated: + msg = "The env.Copy() method is deprecated; use the env.Clone() method instead." + SCons.Warnings.warn(SCons.Warnings.DeprecatedCopyWarning, msg) + _warn_copy_deprecated = False + return apply(self.Clone, args, kw) + + def _changed_build(self, dependency, target, prev_ni): + if dependency.changed_state(target, prev_ni): + return 1 + return self.decide_source(dependency, target, prev_ni) + + def _changed_content(self, dependency, target, prev_ni): + return dependency.changed_content(target, prev_ni) + + def _changed_source(self, dependency, target, prev_ni): + target_env = dependency.get_build_env() + type = target_env.get_tgt_sig_type() + if type == 'source': + return target_env.decide_source(dependency, target, prev_ni) + else: + return target_env.decide_target(dependency, target, prev_ni) + + def _changed_timestamp_then_content(self, dependency, target, prev_ni): + return dependency.changed_timestamp_then_content(target, prev_ni) + + def _changed_timestamp_newer(self, dependency, target, prev_ni): + return dependency.changed_timestamp_newer(target, prev_ni) + + def _changed_timestamp_match(self, dependency, target, prev_ni): + return dependency.changed_timestamp_match(target, prev_ni) + + def _copy_from_cache(self, src, dst): + return self.fs.copy(src, dst) + + def _copy2_from_cache(self, src, dst): + return self.fs.copy2(src, dst) + + def Decider(self, function): + copy_function = self._copy2_from_cache + if function in ('MD5', 'content'): + if not SCons.Util.md5: + raise UserError, "MD5 signatures are not available in this version of Python." + function = self._changed_content + elif function == 'MD5-timestamp': + function = self._changed_timestamp_then_content + elif function in ('timestamp-newer', 'make'): + function = self._changed_timestamp_newer + copy_function = self._copy_from_cache + elif function == 'timestamp-match': + function = self._changed_timestamp_match + elif not callable(function): + raise UserError, "Unknown Decider value %s" % repr(function) + + # We don't use AddMethod because we don't want to turn the + # function, which only expects three arguments, into a bound + # method, which would add self as an initial, fourth argument. + self.decide_target = function + self.decide_source = function + + self.copy_from_cache = copy_function + + def Detect(self, progs): + """Return the first available program in progs. + """ + if not SCons.Util.is_List(progs): + progs = [ progs ] + for prog in progs: + path = self.WhereIs(prog) + if path: return prog + return None + + def Dictionary(self, *args): + if not args: + return self._dict + dlist = map(lambda x, s=self: s._dict[x], args) + if len(dlist) == 1: + dlist = dlist[0] + return dlist + + def Dump(self, key = None): + """ + Using the standard Python pretty printer, dump the contents of the + scons build environment to stdout. + + If the key passed in is anything other than None, then that will + be used as an index into the build environment dictionary and + whatever is found there will be fed into the pretty printer. Note + that this key is case sensitive. + """ + import pprint + pp = pprint.PrettyPrinter(indent=2) + if key: + dict = self.Dictionary(key) + else: + dict = self.Dictionary() + return pp.pformat(dict) + + def FindIxes(self, paths, prefix, suffix): + """ + Search a list of paths for something that matches the prefix and suffix. + + paths - the list of paths or nodes. + prefix - construction variable for the prefix. + suffix - construction variable for the suffix. + """ + + suffix = self.subst('$'+suffix) + prefix = self.subst('$'+prefix) + + for path in paths: + dir,name = os.path.split(str(path)) + if name[:len(prefix)] == prefix and name[-len(suffix):] == suffix: + return path + + def ParseConfig(self, command, function=None, unique=1): + """ + Use the specified function to parse the output of the command + in order to modify the current environment. The 'command' can + be a string or a list of strings representing a command and + its arguments. 'Function' is an optional argument that takes + the environment, the output of the command, and the unique flag. + If no function is specified, MergeFlags, which treats the output + as the result of a typical 'X-config' command (i.e. gtk-config), + will merge the output into the appropriate variables. + """ + if function is None: + def parse_conf(env, cmd, unique=unique): + return env.MergeFlags(cmd, unique) + function = parse_conf + if SCons.Util.is_List(command): + command = string.join(command) + command = self.subst(command) + return function(self, self.backtick(command)) + + def ParseDepends(self, filename, must_exist=None, only_one=0): + """ + Parse a mkdep-style file for explicit dependencies. This is + completely abusable, and should be unnecessary in the "normal" + case of proper SCons configuration, but it may help make + the transition from a Make hierarchy easier for some people + to swallow. It can also be genuinely useful when using a tool + that can write a .d file, but for which writing a scanner would + be too complicated. + """ + filename = self.subst(filename) + try: + fp = open(filename, 'r') + except IOError: + if must_exist: + raise + return + lines = SCons.Util.LogicalLines(fp).readlines() + lines = filter(lambda l: l[0] != '#', lines) + tdlist = [] + for line in lines: + try: + target, depends = string.split(line, ':', 1) + except (AttributeError, TypeError, ValueError): + # Python 1.5.2 throws TypeError if line isn't a string, + # Python 2.x throws AttributeError because it tries + # to call line.split(). Either can throw ValueError + # if the line doesn't split into two or more elements. + pass + else: + tdlist.append((string.split(target), string.split(depends))) + if only_one: + targets = reduce(lambda x, y: x+y, map(lambda p: p[0], tdlist)) + if len(targets) > 1: + raise SCons.Errors.UserError, "More than one dependency target found in `%s': %s" % (filename, targets) + for target, depends in tdlist: + self.Depends(target, depends) + + def Platform(self, platform): + platform = self.subst(platform) + return SCons.Platform.Platform(platform)(self) + + def Prepend(self, **kw): + """Prepend values to existing construction variables + in an Environment. + """ + kw = copy_non_reserved_keywords(kw) + for key, val in kw.items(): + # It would be easier on the eyes to write this using + # "continue" statements whenever we finish processing an item, + # but Python 1.5.2 apparently doesn't let you use "continue" + # within try:-except: blocks, so we have to nest our code. + try: + orig = self._dict[key] + except KeyError: + # No existing variable in the environment, so just set + # it to the new value. + self._dict[key] = val + else: + try: + # Check if the original looks like a dictionary. + # If it is, we can't just try adding the value because + # dictionaries don't have __add__() methods, and + # things like UserList will incorrectly coerce the + # original dict to a list (which we don't want). + update_dict = orig.update + except AttributeError: + try: + # Most straightforward: just try to add them + # together. This will work in most cases, when the + # original and new values are of compatible types. + self._dict[key] = val + orig + except (KeyError, TypeError): + try: + # Check if the added value is a list. + add_to_val = val.append + except AttributeError: + # The added value isn't a list, but the + # original is (by process of elimination), + # so insert the the new value in the original + # (if there's one to insert). + if val: + orig.insert(0, val) + else: + # The added value is a list, so append + # the original to it (if there's a value + # to append). + if orig: + add_to_val(orig) + self._dict[key] = val + else: + # The original looks like a dictionary, so update it + # based on what we think the value looks like. + if SCons.Util.is_List(val): + for v in val: + orig[v] = None + else: + try: + update_dict(val) + except (AttributeError, TypeError, ValueError): + if SCons.Util.is_Dict(val): + for k, v in val.items(): + orig[k] = v + else: + orig[val] = None + self.scanner_map_delete(kw) + + def PrependENVPath(self, name, newpath, envname = 'ENV', sep = os.pathsep, + delete_existing=1): + """Prepend path elements to the path 'name' in the 'ENV' + dictionary for this environment. Will only add any particular + path once, and will normpath and normcase all paths to help + assure this. This can also handle the case where the env + variable is a list instead of a string. + + If delete_existing is 0, a newpath which is already in the path + will not be moved to the front (it will be left where it is). + """ + + orig = '' + if self._dict.has_key(envname) and self._dict[envname].has_key(name): + orig = self._dict[envname][name] + + nv = SCons.Util.PrependPath(orig, newpath, sep, delete_existing, + canonicalize=self._canonicalize) + + if not self._dict.has_key(envname): + self._dict[envname] = {} + + self._dict[envname][name] = nv + + def PrependUnique(self, delete_existing=0, **kw): + """Prepend values to existing construction variables + in an Environment, if they're not already there. + If delete_existing is 1, removes existing values first, so + values move to front. + """ + kw = copy_non_reserved_keywords(kw) + for key, val in kw.items(): + if SCons.Util.is_List(val): + val = _delete_duplicates(val, not delete_existing) + if not self._dict.has_key(key) or self._dict[key] in ('', None): + self._dict[key] = val + elif SCons.Util.is_Dict(self._dict[key]) and \ + SCons.Util.is_Dict(val): + self._dict[key].update(val) + elif SCons.Util.is_List(val): + dk = self._dict[key] + if not SCons.Util.is_List(dk): + dk = [dk] + if delete_existing: + dk = filter(lambda x, val=val: x not in val, dk) + else: + val = filter(lambda x, dk=dk: x not in dk, val) + self._dict[key] = val + dk + else: + dk = self._dict[key] + if SCons.Util.is_List(dk): + # By elimination, val is not a list. Since dk is a + # list, wrap val in a list first. + if delete_existing: + dk = filter(lambda x, val=val: x not in val, dk) + self._dict[key] = [val] + dk + else: + if not val in dk: + self._dict[key] = [val] + dk + else: + if delete_existing: + dk = filter(lambda x, val=val: x not in val, dk) + self._dict[key] = val + dk + self.scanner_map_delete(kw) + + def Replace(self, **kw): + """Replace existing construction variables in an Environment + with new construction variables and/or values. + """ + try: + kwbd = kw['BUILDERS'] + except KeyError: + pass + else: + kwbd = semi_deepcopy(kwbd) + del kw['BUILDERS'] + self.__setitem__('BUILDERS', kwbd) + kw = copy_non_reserved_keywords(kw) + self._update(semi_deepcopy(kw)) + self.scanner_map_delete(kw) + + def ReplaceIxes(self, path, old_prefix, old_suffix, new_prefix, new_suffix): + """ + Replace old_prefix with new_prefix and old_suffix with new_suffix. + + env - Environment used to interpolate variables. + path - the path that will be modified. + old_prefix - construction variable for the old prefix. + old_suffix - construction variable for the old suffix. + new_prefix - construction variable for the new prefix. + new_suffix - construction variable for the new suffix. + """ + old_prefix = self.subst('$'+old_prefix) + old_suffix = self.subst('$'+old_suffix) + + new_prefix = self.subst('$'+new_prefix) + new_suffix = self.subst('$'+new_suffix) + + dir,name = os.path.split(str(path)) + if name[:len(old_prefix)] == old_prefix: + name = name[len(old_prefix):] + if name[-len(old_suffix):] == old_suffix: + name = name[:-len(old_suffix)] + return os.path.join(dir, new_prefix+name+new_suffix) + + def SetDefault(self, **kw): + for k in kw.keys(): + if self._dict.has_key(k): + del kw[k] + apply(self.Replace, (), kw) + + def _find_toolpath_dir(self, tp): + return self.fs.Dir(self.subst(tp)).srcnode().abspath + + def Tool(self, tool, toolpath=None, **kw): + if SCons.Util.is_String(tool): + tool = self.subst(tool) + if toolpath is None: + toolpath = self.get('toolpath', []) + toolpath = map(self._find_toolpath_dir, toolpath) + tool = apply(SCons.Tool.Tool, (tool, toolpath), kw) + tool(self) + + def WhereIs(self, prog, path=None, pathext=None, reject=[]): + """Find prog in the path. + """ + if path is None: + try: + path = self['ENV']['PATH'] + except KeyError: + pass + elif SCons.Util.is_String(path): + path = self.subst(path) + if pathext is None: + try: + pathext = self['ENV']['PATHEXT'] + except KeyError: + pass + elif SCons.Util.is_String(pathext): + pathext = self.subst(pathext) + prog = self.subst(prog) + path = SCons.Util.WhereIs(prog, path, pathext, reject) + if path: return path + return None + + ####################################################################### + # Public methods for doing real "SCons stuff" (manipulating + # dependencies, setting attributes on targets, etc.). These begin + # with upper-case letters. The essential characteristic of methods + # in this section is that they all *should* have corresponding + # same-named global functions. + ####################################################################### + + def Action(self, *args, **kw): + def subst_string(a, self=self): + if SCons.Util.is_String(a): + a = self.subst(a) + return a + nargs = map(subst_string, args) + nkw = self.subst_kw(kw) + return apply(SCons.Action.Action, nargs, nkw) + + def AddPreAction(self, files, action): + nodes = self.arg2nodes(files, self.fs.Entry) + action = SCons.Action.Action(action) + uniq = {} + for executor in map(lambda n: n.get_executor(), nodes): + uniq[executor] = 1 + for executor in uniq.keys(): + executor.add_pre_action(action) + return nodes + + def AddPostAction(self, files, action): + nodes = self.arg2nodes(files, self.fs.Entry) + action = SCons.Action.Action(action) + uniq = {} + for executor in map(lambda n: n.get_executor(), nodes): + uniq[executor] = 1 + for executor in uniq.keys(): + executor.add_post_action(action) + return nodes + + def Alias(self, target, source=[], action=None, **kw): + tlist = self.arg2nodes(target, self.ans.Alias) + if not SCons.Util.is_List(source): + source = [source] + source = filter(None, source) + + if not action: + if not source: + # There are no source files and no action, so just + # return a target list of classic Alias Nodes, without + # any builder. The externally visible effect is that + # this will make the wrapping Script.BuildTask class + # say that there's "Nothing to be done" for this Alias, + # instead of that it's "up to date." + return tlist + + # No action, but there are sources. Re-call all the target + # builders to add the sources to each target. + result = [] + for t in tlist: + bld = t.get_builder(AliasBuilder) + result.extend(bld(self, t, source)) + return result + + nkw = self.subst_kw(kw) + nkw.update({ + 'action' : SCons.Action.Action(action), + 'source_factory' : self.fs.Entry, + 'multi' : 1, + 'is_explicit' : None, + }) + bld = apply(SCons.Builder.Builder, (), nkw) + + # Apply the Builder separately to each target so that the Aliases + # stay separate. If we did one "normal" Builder call with the + # whole target list, then all of the target Aliases would be + # associated under a single Executor. + result = [] + for t in tlist: + # Calling the convert() method will cause a new Executor to be + # created from scratch, so we have to explicitly initialize + # it with the target's existing sources, plus our new ones, + # so nothing gets lost. + b = t.get_builder() + if b is None or b is AliasBuilder: + b = bld + else: + nkw['action'] = b.action + action + b = apply(SCons.Builder.Builder, (), nkw) + t.convert() + result.extend(b(self, t, t.sources + source)) + return result + + def AlwaysBuild(self, *targets): + tlist = [] + for t in targets: + tlist.extend(self.arg2nodes(t, self.fs.Entry)) + for t in tlist: + t.set_always_build() + return tlist + + def BuildDir(self, *args, **kw): + if kw.has_key('build_dir'): + kw['variant_dir'] = kw['build_dir'] + del kw['build_dir'] + return apply(self.VariantDir, args, kw) + + def Builder(self, **kw): + nkw = self.subst_kw(kw) + return apply(SCons.Builder.Builder, [], nkw) + + def CacheDir(self, path): + import SCons.CacheDir + if path is not None: + path = self.subst(path) + self._CacheDir_path = path + + def Clean(self, targets, files): + global CleanTargets + tlist = self.arg2nodes(targets, self.fs.Entry) + flist = self.arg2nodes(files, self.fs.Entry) + for t in tlist: + try: + CleanTargets[t].extend(flist) + except KeyError: + CleanTargets[t] = flist + + def Configure(self, *args, **kw): + nargs = [self] + if args: + nargs = nargs + self.subst_list(args)[0] + nkw = self.subst_kw(kw) + nkw['_depth'] = kw.get('_depth', 0) + 1 + try: + nkw['custom_tests'] = self.subst_kw(nkw['custom_tests']) + except KeyError: + pass + return apply(SCons.SConf.SConf, nargs, nkw) + + def Command(self, target, source, action, **kw): + """Builds the supplied target files from the supplied + source files using the supplied action. Action may + be any type that the Builder constructor will accept + for an action.""" + bkw = { + 'action' : action, + 'target_factory' : self.fs.Entry, + 'source_factory' : self.fs.Entry, + } + try: bkw['source_scanner'] = kw['source_scanner'] + except KeyError: pass + else: del kw['source_scanner'] + bld = apply(SCons.Builder.Builder, (), bkw) + return apply(bld, (self, target, source), kw) + + def Depends(self, target, dependency): + """Explicity specify that 'target's depend on 'dependency'.""" + tlist = self.arg2nodes(target, self.fs.Entry) + dlist = self.arg2nodes(dependency, self.fs.Entry) + for t in tlist: + t.add_dependency(dlist) + return tlist + + def Dir(self, name, *args, **kw): + """ + """ + s = self.subst(name) + if SCons.Util.is_Sequence(s): + result=[] + for e in s: + result.append(apply(self.fs.Dir, (e,) + args, kw)) + return result + return apply(self.fs.Dir, (s,) + args, kw) + + def NoClean(self, *targets): + """Tags a target so that it will not be cleaned by -c""" + tlist = [] + for t in targets: + tlist.extend(self.arg2nodes(t, self.fs.Entry)) + for t in tlist: + t.set_noclean() + return tlist + + def NoCache(self, *targets): + """Tags a target so that it will not be cached""" + tlist = [] + for t in targets: + tlist.extend(self.arg2nodes(t, self.fs.Entry)) + for t in tlist: + t.set_nocache() + return tlist + + def Entry(self, name, *args, **kw): + """ + """ + s = self.subst(name) + if SCons.Util.is_Sequence(s): + result=[] + for e in s: + result.append(apply(self.fs.Entry, (e,) + args, kw)) + return result + return apply(self.fs.Entry, (s,) + args, kw) + + def Environment(self, **kw): + return apply(SCons.Environment.Environment, [], self.subst_kw(kw)) + + def Execute(self, action, *args, **kw): + """Directly execute an action through an Environment + """ + action = apply(self.Action, (action,) + args, kw) + result = action([], [], self) + if isinstance(result, SCons.Errors.BuildError): + errstr = result.errstr + if result.filename: + errstr = result.filename + ': ' + errstr + sys.stderr.write("scons: *** %s\n" % errstr) + return result.status + else: + return result + + def File(self, name, *args, **kw): + """ + """ + s = self.subst(name) + if SCons.Util.is_Sequence(s): + result=[] + for e in s: + result.append(apply(self.fs.File, (e,) + args, kw)) + return result + return apply(self.fs.File, (s,) + args, kw) + + def FindFile(self, file, dirs): + file = self.subst(file) + nodes = self.arg2nodes(dirs, self.fs.Dir) + return SCons.Node.FS.find_file(file, tuple(nodes)) + + def Flatten(self, sequence): + return SCons.Util.flatten(sequence) + + def GetBuildPath(self, files): + result = map(str, self.arg2nodes(files, self.fs.Entry)) + if SCons.Util.is_List(files): + return result + else: + return result[0] + + def Glob(self, pattern, ondisk=True, source=False, strings=False): + return self.fs.Glob(self.subst(pattern), ondisk, source, strings) + + def Ignore(self, target, dependency): + """Ignore a dependency.""" + tlist = self.arg2nodes(target, self.fs.Entry) + dlist = self.arg2nodes(dependency, self.fs.Entry) + for t in tlist: + t.add_ignore(dlist) + return tlist + + def Literal(self, string): + return SCons.Subst.Literal(string) + + def Local(self, *targets): + ret = [] + for targ in targets: + if isinstance(targ, SCons.Node.Node): + targ.set_local() + ret.append(targ) + else: + for t in self.arg2nodes(targ, self.fs.Entry): + t.set_local() + ret.append(t) + return ret + + def Precious(self, *targets): + tlist = [] + for t in targets: + tlist.extend(self.arg2nodes(t, self.fs.Entry)) + for t in tlist: + t.set_precious() + return tlist + + def Repository(self, *dirs, **kw): + dirs = self.arg2nodes(list(dirs), self.fs.Dir) + apply(self.fs.Repository, dirs, kw) + + def Requires(self, target, prerequisite): + """Specify that 'prerequisite' must be built before 'target', + (but 'target' does not actually depend on 'prerequisite' + and need not be rebuilt if it changes).""" + tlist = self.arg2nodes(target, self.fs.Entry) + plist = self.arg2nodes(prerequisite, self.fs.Entry) + for t in tlist: + t.add_prerequisite(plist) + return tlist + + def Scanner(self, *args, **kw): + nargs = [] + for arg in args: + if SCons.Util.is_String(arg): + arg = self.subst(arg) + nargs.append(arg) + nkw = self.subst_kw(kw) + return apply(SCons.Scanner.Base, nargs, nkw) + + def SConsignFile(self, name=".sconsign", dbm_module=None): + if name is not None: + name = self.subst(name) + if not os.path.isabs(name): + name = os.path.join(str(self.fs.SConstruct_dir), name) + if name: + name = os.path.normpath(name) + sconsign_dir = os.path.dirname(name) + if sconsign_dir and not os.path.exists(sconsign_dir): + self.Execute(SCons.Defaults.Mkdir(sconsign_dir)) + SCons.SConsign.File(name, dbm_module) + + def SideEffect(self, side_effect, target): + """Tell scons that side_effects are built as side + effects of building targets.""" + side_effects = self.arg2nodes(side_effect, self.fs.Entry) + targets = self.arg2nodes(target, self.fs.Entry) + + for side_effect in side_effects: + if side_effect.multiple_side_effect_has_builder(): + raise SCons.Errors.UserError, "Multiple ways to build the same target were specified for: %s" % str(side_effect) + side_effect.add_source(targets) + side_effect.side_effect = 1 + self.Precious(side_effect) + for target in targets: + target.side_effects.append(side_effect) + return side_effects + + def SourceCode(self, entry, builder): + """Arrange for a source code builder for (part of) a tree.""" + entries = self.arg2nodes(entry, self.fs.Entry) + for entry in entries: + entry.set_src_builder(builder) + return entries + + def SourceSignatures(self, type): + global _warn_source_signatures_deprecated + if _warn_source_signatures_deprecated: + msg = "The env.SourceSignatures() method is deprecated;\n" + \ + "\tconvert your build to use the env.Decider() method instead." + SCons.Warnings.warn(SCons.Warnings.DeprecatedSourceSignaturesWarning, msg) + _warn_source_signatures_deprecated = False + type = self.subst(type) + self.src_sig_type = type + if type == 'MD5': + if not SCons.Util.md5: + raise UserError, "MD5 signatures are not available in this version of Python." + self.decide_source = self._changed_content + elif type == 'timestamp': + self.decide_source = self._changed_timestamp_match + else: + raise UserError, "Unknown source signature type '%s'" % type + + def Split(self, arg): + """This function converts a string or list into a list of strings + or Nodes. This makes things easier for users by allowing files to + be specified as a white-space separated list to be split. + The input rules are: + - A single string containing names separated by spaces. These will be + split apart at the spaces. + - A single Node instance + - A list containing either strings or Node instances. Any strings + in the list are not split at spaces. + In all cases, the function returns a list of Nodes and strings.""" + if SCons.Util.is_List(arg): + return map(self.subst, arg) + elif SCons.Util.is_String(arg): + return string.split(self.subst(arg)) + else: + return [self.subst(arg)] + + def TargetSignatures(self, type): + global _warn_target_signatures_deprecated + if _warn_target_signatures_deprecated: + msg = "The env.TargetSignatures() method is deprecated;\n" + \ + "\tconvert your build to use the env.Decider() method instead." + SCons.Warnings.warn(SCons.Warnings.DeprecatedTargetSignaturesWarning, msg) + _warn_target_signatures_deprecated = False + type = self.subst(type) + self.tgt_sig_type = type + if type in ('MD5', 'content'): + if not SCons.Util.md5: + raise UserError, "MD5 signatures are not available in this version of Python." + self.decide_target = self._changed_content + elif type == 'timestamp': + self.decide_target = self._changed_timestamp_match + elif type == 'build': + self.decide_target = self._changed_build + elif type == 'source': + self.decide_target = self._changed_source + else: + raise UserError, "Unknown target signature type '%s'"%type + + def Value(self, value, built_value=None): + """ + """ + return SCons.Node.Python.Value(value, built_value) + + def VariantDir(self, variant_dir, src_dir, duplicate=1): + variant_dir = self.arg2nodes(variant_dir, self.fs.Dir)[0] + src_dir = self.arg2nodes(src_dir, self.fs.Dir)[0] + self.fs.VariantDir(variant_dir, src_dir, duplicate) + + def FindSourceFiles(self, node='.'): + """ returns a list of all source files. + """ + node = self.arg2nodes(node, self.fs.Entry)[0] + + sources = [] + # Uncomment this and get rid of the global definition when we + # drop support for pre-2.2 Python versions. + #def build_source(ss, result): + # for s in ss: + # if isinstance(s, SCons.Node.FS.Dir): + # build_source(s.all_children(), result) + # elif s.has_builder(): + # build_source(s.sources, result) + # elif isinstance(s.disambiguate(), SCons.Node.FS.File): + # result.append(s) + build_source(node.all_children(), sources) + + # THIS CODE APPEARS TO HAVE NO EFFECT + # # get the final srcnode for all nodes, this means stripping any + # # attached build node by calling the srcnode function + # for file in sources: + # srcnode = file.srcnode() + # while srcnode != file.srcnode(): + # srcnode = file.srcnode() + + # remove duplicates + return list(set(sources)) + + def FindInstalledFiles(self): + """ returns the list of all targets of the Install and InstallAs Builder. + """ + from SCons.Tool import install + if install._UNIQUE_INSTALLED_FILES is None: + install._UNIQUE_INSTALLED_FILES = SCons.Util.uniquer_hashables(install._INSTALLED_FILES) + return install._UNIQUE_INSTALLED_FILES + +class OverrideEnvironment(Base): + """A proxy that overrides variables in a wrapped construction + environment by returning values from an overrides dictionary in + preference to values from the underlying subject environment. + + This is a lightweight (I hope) proxy that passes through most use of + attributes to the underlying Environment.Base class, but has just + enough additional methods defined to act like a real construction + environment with overridden values. It can wrap either a Base + construction environment, or another OverrideEnvironment, which + can in turn nest arbitrary OverrideEnvironments... + + Note that we do *not* call the underlying base class + (SubsitutionEnvironment) initialization, because we get most of those + from proxying the attributes of the subject construction environment. + But because we subclass SubstitutionEnvironment, this class also + has inherited arg2nodes() and subst*() methods; those methods can't + be proxied because they need *this* object's methods to fetch the + values from the overrides dictionary. + """ + + def __init__(self, subject, overrides={}): + if __debug__: logInstanceCreation(self, 'Environment.OverrideEnvironment') + self.__dict__['__subject'] = subject + self.__dict__['overrides'] = overrides + + # Methods that make this class act like a proxy. + def __getattr__(self, name): + return getattr(self.__dict__['__subject'], name) + def __setattr__(self, name, value): + setattr(self.__dict__['__subject'], name, value) + + # Methods that make this class act like a dictionary. + def __getitem__(self, key): + try: + return self.__dict__['overrides'][key] + except KeyError: + return self.__dict__['__subject'].__getitem__(key) + def __setitem__(self, key, value): + if not is_valid_construction_var(key): + raise SCons.Errors.UserError, "Illegal construction variable `%s'" % key + self.__dict__['overrides'][key] = value + def __delitem__(self, key): + try: + del self.__dict__['overrides'][key] + except KeyError: + deleted = 0 + else: + deleted = 1 + try: + result = self.__dict__['__subject'].__delitem__(key) + except KeyError: + if not deleted: + raise + result = None + return result + def get(self, key, default=None): + """Emulates the get() method of dictionaries.""" + try: + return self.__dict__['overrides'][key] + except KeyError: + return self.__dict__['__subject'].get(key, default) + def has_key(self, key): + try: + self.__dict__['overrides'][key] + return 1 + except KeyError: + return self.__dict__['__subject'].has_key(key) + def __contains__(self, key): + if self.__dict__['overrides'].__contains__(key): + return 1 + return self.__dict__['__subject'].__contains__(key) + def Dictionary(self): + """Emulates the items() method of dictionaries.""" + d = self.__dict__['__subject'].Dictionary().copy() + d.update(self.__dict__['overrides']) + return d + def items(self): + """Emulates the items() method of dictionaries.""" + return self.Dictionary().items() + + # Overridden private construction environment methods. + def _update(self, dict): + """Update an environment's values directly, bypassing the normal + checks that occur when users try to set items. + """ + self.__dict__['overrides'].update(dict) + + def gvars(self): + return self.__dict__['__subject'].gvars() + + def lvars(self): + lvars = self.__dict__['__subject'].lvars() + lvars.update(self.__dict__['overrides']) + return lvars + + # Overridden public construction environment methods. + def Replace(self, **kw): + kw = copy_non_reserved_keywords(kw) + self.__dict__['overrides'].update(semi_deepcopy(kw)) + +# The entry point that will be used by the external world +# to refer to a construction environment. This allows the wrapper +# interface to extend a construction environment for its own purposes +# by subclassing SCons.Environment.Base and then assigning the +# class to SCons.Environment.Environment. + +Environment = Base + +# An entry point for returning a proxy subclass instance that overrides +# the subst*() methods so they don't actually perform construction +# variable substitution. This is specifically intended to be the shim +# layer in between global function calls (which don't want construction +# variable substitution) and the DefaultEnvironment() (which would +# substitute variables if left to its own devices).""" +# +# We have to wrap this in a function that allows us to delay definition of +# the class until it's necessary, so that when it subclasses Environment +# it will pick up whatever Environment subclass the wrapper interface +# might have assigned to SCons.Environment.Environment. + +def NoSubstitutionProxy(subject): + class _NoSubstitutionProxy(Environment): + def __init__(self, subject): + self.__dict__['__subject'] = subject + def __getattr__(self, name): + return getattr(self.__dict__['__subject'], name) + def __setattr__(self, name, value): + return setattr(self.__dict__['__subject'], name, value) + def raw_to_mode(self, dict): + try: + raw = dict['raw'] + except KeyError: + pass + else: + del dict['raw'] + dict['mode'] = raw + def subst(self, string, *args, **kwargs): + return string + def subst_kw(self, kw, *args, **kwargs): + return kw + def subst_list(self, string, *args, **kwargs): + nargs = (string, self,) + args + nkw = kwargs.copy() + nkw['gvars'] = {} + self.raw_to_mode(nkw) + return apply(SCons.Subst.scons_subst_list, nargs, nkw) + def subst_target_source(self, string, *args, **kwargs): + nargs = (string, self,) + args + nkw = kwargs.copy() + nkw['gvars'] = {} + self.raw_to_mode(nkw) + return apply(SCons.Subst.scons_subst, nargs, nkw) + return _NoSubstitutionProxy(subject) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Errors.py b/engine/SCons/Errors.py new file mode 100644 index 0000000..07e976c --- /dev/null +++ b/engine/SCons/Errors.py @@ -0,0 +1,207 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +"""SCons.Errors + +This file contains the exception classes used to handle internal +and user errors in SCons. + +""" + +__revision__ = "src/engine/SCons/Errors.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Util + +import exceptions + +class BuildError(Exception): + """ Errors occuring while building. + + BuildError have the following attributes: + + Information about the cause of the build error: + ----------------------------------------------- + + errstr : a description of the error message + + status : the return code of the action that caused the build + error. Must be set to a non-zero value even if the + build error is not due to an action returning a + non-zero returned code. + + exitstatus : SCons exit status due to this build error. + Must be nonzero unless due to an explicit Exit() + call. Not always the same as status, since + actions return a status code that should be + respected, but SCons typically exits with 2 + irrespective of the return value of the failed + action. + + filename : The name of the file or directory that caused the + build error. Set to None if no files are associated with + this error. This might be different from the target + being built. For example, failure to create the + directory in which the target file will appear. It + can be None if the error is not due to a particular + filename. + + exc_info : Info about exception that caused the build + error. Set to (None, None, None) if this build + error is not due to an exception. + + + Information about the cause of the location of the error: + --------------------------------------------------------- + + node : the error occured while building this target node(s) + + executor : the executor that caused the build to fail (might + be None if the build failures is not due to the + executor failing) + + action : the action that caused the build to fail (might be + None if the build failures is not due to the an + action failure) + + command : the command line for the action that caused the + build to fail (might be None if the build failures + is not due to the an action failure) + """ + + def __init__(self, + node=None, errstr="Unknown error", status=2, exitstatus=2, + filename=None, executor=None, action=None, command=None, + exc_info=(None, None, None)): + + self.errstr = errstr + self.status = status + self.exitstatus = exitstatus + self.filename = filename + self.exc_info = exc_info + + self.node = node + self.executor = executor + self.action = action + self.command = command + + Exception.__init__(self, node, errstr, status, exitstatus, filename, + executor, action, command, exc_info) + + def __str__(self): + if self.filename: + return self.filename + ': ' + self.errstr + else: + return self.errstr + +class InternalError(Exception): + pass + +class UserError(Exception): + pass + +class StopError(Exception): + pass + +class EnvironmentError(Exception): + pass + +class MSVCError(IOError): + pass + +class ExplicitExit(Exception): + def __init__(self, node=None, status=None, *args): + self.node = node + self.status = status + self.exitstatus = status + apply(Exception.__init__, (self,) + args) + +def convert_to_BuildError(status, exc_info=None): + """ + Convert any return code a BuildError Exception. + + `status' can either be a return code or an Exception. + The buildError.status we set here will normally be + used as the exit status of the "scons" process. + """ + if not exc_info and isinstance(status, Exception): + exc_info = (status.__class__, status, None) + + if isinstance(status, BuildError): + buildError = status + buildError.exitstatus = 2 # always exit with 2 on build errors + elif isinstance(status, ExplicitExit): + status = status.status + errstr = 'Explicit exit, status %s' % status + buildError = BuildError( + errstr=errstr, + status=status, # might be 0, OK here + exitstatus=status, # might be 0, OK here + exc_info=exc_info) + # TODO(1.5): + #elif isinstance(status, (StopError, UserError)): + elif isinstance(status, StopError) or isinstance(status, UserError): + buildError = BuildError( + errstr=str(status), + status=2, + exitstatus=2, + exc_info=exc_info) + elif isinstance(status, exceptions.EnvironmentError): + # If an IOError/OSError happens, raise a BuildError. + # Report the name of the file or directory that caused the + # error, which might be different from the target being built + # (for example, failure to create the directory in which the + # target file will appear). + try: filename = status.filename + except AttributeError: filename = None + buildError = BuildError( + errstr=status.strerror, + status=status.errno, + exitstatus=2, + filename=filename, + exc_info=exc_info) + elif isinstance(status, Exception): + buildError = BuildError( + errstr='%s : %s' % (status.__class__.__name__, status), + status=2, + exitstatus=2, + exc_info=exc_info) + elif SCons.Util.is_String(status): + buildError = BuildError( + errstr=status, + status=2, + exitstatus=2) + else: + buildError = BuildError( + errstr="Error %s" % status, + status=status, + exitstatus=2) + + #import sys + #sys.stderr.write("convert_to_BuildError: status %s => (errstr %s, status %s)"%(status,buildError.errstr, buildError.status)) + return buildError + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Executor.py b/engine/SCons/Executor.py new file mode 100644 index 0000000..1098968 --- /dev/null +++ b/engine/SCons/Executor.py @@ -0,0 +1,636 @@ +"""SCons.Executor + +A module for executing actions with specific lists of target and source +Nodes. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Executor.py 4577 2009/12/27 19:43:56 scons" + +import string +import UserList + +from SCons.Debug import logInstanceCreation +import SCons.Errors +import SCons.Memoize + + +class Batch: + """Remembers exact association between targets + and sources of executor.""" + def __init__(self, targets=[], sources=[]): + self.targets = targets + self.sources = sources + + + +class TSList(UserList.UserList): + """A class that implements $TARGETS or $SOURCES expansions by wrapping + an executor Method. This class is used in the Executor.lvars() + to delay creation of NodeList objects until they're needed. + + Note that we subclass UserList.UserList purely so that the + is_Sequence() function will identify an object of this class as + a list during variable expansion. We're not really using any + UserList.UserList methods in practice. + """ + def __init__(self, func): + self.func = func + def __getattr__(self, attr): + nl = self.func() + return getattr(nl, attr) + def __getitem__(self, i): + nl = self.func() + return nl[i] + def __getslice__(self, i, j): + nl = self.func() + i = max(i, 0); j = max(j, 0) + return nl[i:j] + def __str__(self): + nl = self.func() + return str(nl) + def __repr__(self): + nl = self.func() + return repr(nl) + +class TSObject: + """A class that implements $TARGET or $SOURCE expansions by wrapping + an Executor method. + """ + def __init__(self, func): + self.func = func + def __getattr__(self, attr): + n = self.func() + return getattr(n, attr) + def __str__(self): + n = self.func() + if n: + return str(n) + return '' + def __repr__(self): + n = self.func() + if n: + return repr(n) + return '' + +def rfile(node): + """ + A function to return the results of a Node's rfile() method, + if it exists, and the Node itself otherwise (if it's a Value + Node, e.g.). + """ + try: + rfile = node.rfile + except AttributeError: + return node + else: + return rfile() + + +class Executor: + """A class for controlling instances of executing an action. + + This largely exists to hold a single association of an action, + environment, list of environment override dictionaries, targets + and sources for later processing as needed. + """ + + if SCons.Memoize.use_memoizer: + __metaclass__ = SCons.Memoize.Memoized_Metaclass + + memoizer_counters = [] + + def __init__(self, action, env=None, overridelist=[{}], + targets=[], sources=[], builder_kw={}): + if __debug__: logInstanceCreation(self, 'Executor.Executor') + self.set_action_list(action) + self.pre_actions = [] + self.post_actions = [] + self.env = env + self.overridelist = overridelist + if targets or sources: + self.batches = [Batch(targets[:], sources[:])] + else: + self.batches = [] + self.builder_kw = builder_kw + self._memo = {} + + def get_lvars(self): + try: + return self.lvars + except AttributeError: + self.lvars = { + 'CHANGED_SOURCES' : TSList(self._get_changed_sources), + 'CHANGED_TARGETS' : TSList(self._get_changed_targets), + 'SOURCE' : TSObject(self._get_source), + 'SOURCES' : TSList(self._get_sources), + 'TARGET' : TSObject(self._get_target), + 'TARGETS' : TSList(self._get_targets), + 'UNCHANGED_SOURCES' : TSList(self._get_unchanged_sources), + 'UNCHANGED_TARGETS' : TSList(self._get_unchanged_targets), + } + return self.lvars + + def _get_changes(self): + cs = [] + ct = [] + us = [] + ut = [] + for b in self.batches: + if b.targets[0].is_up_to_date(): + us.extend(map(rfile, b.sources)) + ut.extend(b.targets) + else: + cs.extend(map(rfile, b.sources)) + ct.extend(b.targets) + self._changed_sources_list = SCons.Util.NodeList(cs) + self._changed_targets_list = SCons.Util.NodeList(ct) + self._unchanged_sources_list = SCons.Util.NodeList(us) + self._unchanged_targets_list = SCons.Util.NodeList(ut) + + def _get_changed_sources(self, *args, **kw): + try: + return self._changed_sources_list + except AttributeError: + self._get_changes() + return self._changed_sources_list + + def _get_changed_targets(self, *args, **kw): + try: + return self._changed_targets_list + except AttributeError: + self._get_changes() + return self._changed_targets_list + + def _get_source(self, *args, **kw): + #return SCons.Util.NodeList([rfile(self.batches[0].sources[0]).get_subst_proxy()]) + return rfile(self.batches[0].sources[0]).get_subst_proxy() + + def _get_sources(self, *args, **kw): + return SCons.Util.NodeList(map(lambda n: rfile(n).get_subst_proxy(), self.get_all_sources())) + + def _get_target(self, *args, **kw): + #return SCons.Util.NodeList([self.batches[0].targets[0].get_subst_proxy()]) + return self.batches[0].targets[0].get_subst_proxy() + + def _get_targets(self, *args, **kw): + return SCons.Util.NodeList(map(lambda n: n.get_subst_proxy(), self.get_all_targets())) + + def _get_unchanged_sources(self, *args, **kw): + try: + return self._unchanged_sources_list + except AttributeError: + self._get_changes() + return self._unchanged_sources_list + + def _get_unchanged_targets(self, *args, **kw): + try: + return self._unchanged_targets_list + except AttributeError: + self._get_changes() + return self._unchanged_targets_list + + def get_action_targets(self): + if not self.action_list: + return [] + targets_string = self.action_list[0].get_targets(self.env, self) + if targets_string[0] == '$': + targets_string = targets_string[1:] + return self.get_lvars()[targets_string] + + def set_action_list(self, action): + import SCons.Util + if not SCons.Util.is_List(action): + if not action: + import SCons.Errors + raise SCons.Errors.UserError, "Executor must have an action." + action = [action] + self.action_list = action + + def get_action_list(self): + return self.pre_actions + self.action_list + self.post_actions + + def get_all_targets(self): + """Returns all targets for all batches of this Executor.""" + result = [] + for batch in self.batches: + # TODO(1.5): remove the list() cast + result.extend(list(batch.targets)) + return result + + def get_all_sources(self): + """Returns all sources for all batches of this Executor.""" + result = [] + for batch in self.batches: + # TODO(1.5): remove the list() cast + result.extend(list(batch.sources)) + return result + + def get_all_children(self): + """Returns all unique children (dependencies) for all batches + of this Executor. + + The Taskmaster can recognize when it's already evaluated a + Node, so we don't have to make this list unique for its intended + canonical use case, but we expect there to be a lot of redundancy + (long lists of batched .cc files #including the same .h files + over and over), so removing the duplicates once up front should + save the Taskmaster a lot of work. + """ + result = SCons.Util.UniqueList([]) + for target in self.get_all_targets(): + result.extend(target.children()) + return result + + def get_all_prerequisites(self): + """Returns all unique (order-only) prerequisites for all batches + of this Executor. + """ + result = SCons.Util.UniqueList([]) + for target in self.get_all_targets(): + # TODO(1.5): remove the list() cast + result.extend(list(target.prerequisites)) + return result + + def get_action_side_effects(self): + + """Returns all side effects for all batches of this + Executor used by the underlying Action. + """ + result = SCons.Util.UniqueList([]) + for target in self.get_action_targets(): + result.extend(target.side_effects) + return result + + memoizer_counters.append(SCons.Memoize.CountValue('get_build_env')) + + def get_build_env(self): + """Fetch or create the appropriate build Environment + for this Executor. + """ + try: + return self._memo['get_build_env'] + except KeyError: + pass + + # Create the build environment instance with appropriate + # overrides. These get evaluated against the current + # environment's construction variables so that users can + # add to existing values by referencing the variable in + # the expansion. + overrides = {} + for odict in self.overridelist: + overrides.update(odict) + + import SCons.Defaults + env = self.env or SCons.Defaults.DefaultEnvironment() + build_env = env.Override(overrides) + + self._memo['get_build_env'] = build_env + + return build_env + + def get_build_scanner_path(self, scanner): + """Fetch the scanner path for this executor's targets and sources. + """ + env = self.get_build_env() + try: + cwd = self.batches[0].targets[0].cwd + except (IndexError, AttributeError): + cwd = None + return scanner.path(env, cwd, + self.get_all_targets(), + self.get_all_sources()) + + def get_kw(self, kw={}): + result = self.builder_kw.copy() + result.update(kw) + result['executor'] = self + return result + + def do_nothing(self, target, kw): + return 0 + + def do_execute(self, target, kw): + """Actually execute the action list.""" + env = self.get_build_env() + kw = self.get_kw(kw) + status = 0 + for act in self.get_action_list(): + #args = (self.get_all_targets(), self.get_all_sources(), env) + args = ([], [], env) + status = apply(act, args, kw) + if isinstance(status, SCons.Errors.BuildError): + status.executor = self + raise status + elif status: + msg = "Error %s" % status + raise SCons.Errors.BuildError( + errstr=msg, + node=self.batches[0].targets, + executor=self, + action=act) + return status + + # use extra indirection because with new-style objects (Python 2.2 + # and above) we can't override special methods, and nullify() needs + # to be able to do this. + + def __call__(self, target, **kw): + return self.do_execute(target, kw) + + def cleanup(self): + self._memo = {} + + def add_sources(self, sources): + """Add source files to this Executor's list. This is necessary + for "multi" Builders that can be called repeatedly to build up + a source file list for a given target.""" + # TODO(batch): extend to multiple batches + assert (len(self.batches) == 1) + # TODO(batch): remove duplicates? + sources = filter(lambda x, s=self.batches[0].sources: x not in s, sources) + self.batches[0].sources.extend(sources) + + def get_sources(self): + return self.batches[0].sources + + def add_batch(self, targets, sources): + """Add pair of associated target and source to this Executor's list. + This is necessary for "batch" Builders that can be called repeatedly + to build up a list of matching target and source files that will be + used in order to update multiple target files at once from multiple + corresponding source files, for tools like MSVC that support it.""" + self.batches.append(Batch(targets, sources)) + + def prepare(self): + """ + Preparatory checks for whether this Executor can go ahead + and (try to) build its targets. + """ + for s in self.get_all_sources(): + if s.missing(): + msg = "Source `%s' not found, needed by target `%s'." + raise SCons.Errors.StopError, msg % (s, self.batches[0].targets[0]) + + def add_pre_action(self, action): + self.pre_actions.append(action) + + def add_post_action(self, action): + self.post_actions.append(action) + + # another extra indirection for new-style objects and nullify... + + def my_str(self): + env = self.get_build_env() + get = lambda action, t=self.get_all_targets(), s=self.get_all_sources(), e=env: \ + action.genstring(t, s, e) + return string.join(map(get, self.get_action_list()), "\n") + + + def __str__(self): + return self.my_str() + + def nullify(self): + self.cleanup() + self.do_execute = self.do_nothing + self.my_str = lambda S=self: '' + + memoizer_counters.append(SCons.Memoize.CountValue('get_contents')) + + def get_contents(self): + """Fetch the signature contents. This is the main reason this + class exists, so we can compute this once and cache it regardless + of how many target or source Nodes there are. + """ + try: + return self._memo['get_contents'] + except KeyError: + pass + env = self.get_build_env() + get = lambda action, t=self.get_all_targets(), s=self.get_all_sources(), e=env: \ + action.get_contents(t, s, e) + result = string.join(map(get, self.get_action_list()), "") + self._memo['get_contents'] = result + return result + + def get_timestamp(self): + """Fetch a time stamp for this Executor. We don't have one, of + course (only files do), but this is the interface used by the + timestamp module. + """ + return 0 + + def scan_targets(self, scanner): + # TODO(batch): scan by batches + self.scan(scanner, self.get_all_targets()) + + def scan_sources(self, scanner): + # TODO(batch): scan by batches + if self.batches[0].sources: + self.scan(scanner, self.get_all_sources()) + + def scan(self, scanner, node_list): + """Scan a list of this Executor's files (targets or sources) for + implicit dependencies and update all of the targets with them. + This essentially short-circuits an N*M scan of the sources for + each individual target, which is a hell of a lot more efficient. + """ + env = self.get_build_env() + + # TODO(batch): scan by batches) + deps = [] + if scanner: + for node in node_list: + node.disambiguate() + s = scanner.select(node) + if not s: + continue + path = self.get_build_scanner_path(s) + deps.extend(node.get_implicit_deps(env, s, path)) + else: + kw = self.get_kw() + for node in node_list: + node.disambiguate() + scanner = node.get_env_scanner(env, kw) + if not scanner: + continue + scanner = scanner.select(node) + if not scanner: + continue + path = self.get_build_scanner_path(scanner) + deps.extend(node.get_implicit_deps(env, scanner, path)) + + deps.extend(self.get_implicit_deps()) + + for tgt in self.get_all_targets(): + tgt.add_to_implicit(deps) + + def _get_unignored_sources_key(self, node, ignore=()): + return (node,) + tuple(ignore) + + memoizer_counters.append(SCons.Memoize.CountDict('get_unignored_sources', _get_unignored_sources_key)) + + def get_unignored_sources(self, node, ignore=()): + key = (node,) + tuple(ignore) + try: + memo_dict = self._memo['get_unignored_sources'] + except KeyError: + memo_dict = {} + self._memo['get_unignored_sources'] = memo_dict + else: + try: + return memo_dict[key] + except KeyError: + pass + + if node: + # TODO: better way to do this (it's a linear search, + # but it may not be critical path)? + sourcelist = [] + for b in self.batches: + if node in b.targets: + sourcelist = b.sources + break + else: + sourcelist = self.get_all_sources() + if ignore: + idict = {} + for i in ignore: + idict[i] = 1 + sourcelist = filter(lambda s, i=idict: not i.has_key(s), sourcelist) + + memo_dict[key] = sourcelist + + return sourcelist + + def get_implicit_deps(self): + """Return the executor's implicit dependencies, i.e. the nodes of + the commands to be executed.""" + result = [] + build_env = self.get_build_env() + for act in self.get_action_list(): + deps = act.get_implicit_deps(self.get_all_targets(), + self.get_all_sources(), + build_env) + result.extend(deps) + return result + + + +_batch_executors = {} + +def GetBatchExecutor(key): + return _batch_executors[key] + +def AddBatchExecutor(key, executor): + assert not _batch_executors.has_key(key) + _batch_executors[key] = executor + +nullenv = None + + +def get_NullEnvironment(): + """Use singleton pattern for Null Environments.""" + global nullenv + + import SCons.Util + class NullEnvironment(SCons.Util.Null): + import SCons.CacheDir + _CacheDir_path = None + _CacheDir = SCons.CacheDir.CacheDir(None) + def get_CacheDir(self): + return self._CacheDir + + if not nullenv: + nullenv = NullEnvironment() + return nullenv + +class Null: + """A null Executor, with a null build Environment, that does + nothing when the rest of the methods call it. + + This might be able to disapper when we refactor things to + disassociate Builders from Nodes entirely, so we're not + going to worry about unit tests for this--at least for now. + """ + def __init__(self, *args, **kw): + if __debug__: logInstanceCreation(self, 'Executor.Null') + self.batches = [Batch(kw['targets'][:], [])] + def get_build_env(self): + return get_NullEnvironment() + def get_build_scanner_path(self): + return None + def cleanup(self): + pass + def prepare(self): + pass + def get_unignored_sources(self, *args, **kw): + return tuple(()) + def get_action_targets(self): + return [] + def get_action_list(self): + return [] + def get_all_targets(self): + return self.batches[0].targets + def get_all_sources(self): + return self.batches[0].targets[0].sources + def get_all_children(self): + return self.get_all_sources() + def get_all_prerequisites(self): + return [] + def get_action_side_effects(self): + return [] + def __call__(self, *args, **kw): + return 0 + def get_contents(self): + return '' + def _morph(self): + """Morph this Null executor to a real Executor object.""" + batches = self.batches + self.__class__ = Executor + self.__init__([]) + self.batches = batches + + # The following methods require morphing this Null Executor to a + # real Executor object. + + def add_pre_action(self, action): + self._morph() + self.add_pre_action(action) + def add_post_action(self, action): + self._morph() + self.add_post_action(action) + def set_action_list(self, action): + self._morph() + self.set_action_list(action) + + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Job.py b/engine/SCons/Job.py new file mode 100644 index 0000000..6bfc288 --- /dev/null +++ b/engine/SCons/Job.py @@ -0,0 +1,435 @@ +"""SCons.Job + +This module defines the Serial and Parallel classes that execute tasks to +complete a build. The Jobs class provides a higher level interface to start, +stop, and wait on jobs. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Job.py 4577 2009/12/27 19:43:56 scons" + +import os +import signal + +import SCons.Errors + +# The default stack size (in kilobytes) of the threads used to execute +# jobs in parallel. +# +# We use a stack size of 256 kilobytes. The default on some platforms +# is too large and prevents us from creating enough threads to fully +# parallelized the build. For example, the default stack size on linux +# is 8 MBytes. + +explicit_stack_size = None +default_stack_size = 256 + +interrupt_msg = 'Build interrupted.' + + +class InterruptState: + def __init__(self): + self.interrupted = False + + def set(self): + self.interrupted = True + + def __call__(self): + return self.interrupted + + +class Jobs: + """An instance of this class initializes N jobs, and provides + methods for starting, stopping, and waiting on all N jobs. + """ + + def __init__(self, num, taskmaster): + """ + create 'num' jobs using the given taskmaster. + + If 'num' is 1 or less, then a serial job will be used, + otherwise a parallel job with 'num' worker threads will + be used. + + The 'num_jobs' attribute will be set to the actual number of jobs + allocated. If more than one job is requested but the Parallel + class can't do it, it gets reset to 1. Wrapping interfaces that + care should check the value of 'num_jobs' after initialization. + """ + + self.job = None + if num > 1: + stack_size = explicit_stack_size + if stack_size is None: + stack_size = default_stack_size + + try: + self.job = Parallel(taskmaster, num, stack_size) + self.num_jobs = num + except NameError: + pass + if self.job is None: + self.job = Serial(taskmaster) + self.num_jobs = 1 + + def run(self, postfunc=lambda: None): + """Run the jobs. + + postfunc() will be invoked after the jobs has run. It will be + invoked even if the jobs are interrupted by a keyboard + interrupt (well, in fact by a signal such as either SIGINT, + SIGTERM or SIGHUP). The execution of postfunc() is protected + against keyboard interrupts and is guaranteed to run to + completion.""" + self._setup_sig_handler() + try: + self.job.start() + finally: + postfunc() + self._reset_sig_handler() + + def were_interrupted(self): + """Returns whether the jobs were interrupted by a signal.""" + return self.job.interrupted() + + def _setup_sig_handler(self): + """Setup an interrupt handler so that SCons can shutdown cleanly in + various conditions: + + a) SIGINT: Keyboard interrupt + b) SIGTERM: kill or system shutdown + c) SIGHUP: Controlling shell exiting + + We handle all of these cases by stopping the taskmaster. It + turns out that it very difficult to stop the build process + by throwing asynchronously an exception such as + KeyboardInterrupt. For example, the python Condition + variables (threading.Condition) and Queue's do not seem to + asynchronous-exception-safe. It would require adding a whole + bunch of try/finally block and except KeyboardInterrupt all + over the place. + + Note also that we have to be careful to handle the case when + SCons forks before executing another process. In that case, we + want the child to exit immediately. + """ + def handler(signum, stack, self=self, parentpid=os.getpid()): + if os.getpid() == parentpid: + self.job.taskmaster.stop() + self.job.interrupted.set() + else: + os._exit(2) + + self.old_sigint = signal.signal(signal.SIGINT, handler) + self.old_sigterm = signal.signal(signal.SIGTERM, handler) + try: + self.old_sighup = signal.signal(signal.SIGHUP, handler) + except AttributeError: + pass + + def _reset_sig_handler(self): + """Restore the signal handlers to their previous state (before the + call to _setup_sig_handler().""" + + signal.signal(signal.SIGINT, self.old_sigint) + signal.signal(signal.SIGTERM, self.old_sigterm) + try: + signal.signal(signal.SIGHUP, self.old_sighup) + except AttributeError: + pass + +class Serial: + """This class is used to execute tasks in series, and is more efficient + than Parallel, but is only appropriate for non-parallel builds. Only + one instance of this class should be in existence at a time. + + This class is not thread safe. + """ + + def __init__(self, taskmaster): + """Create a new serial job given a taskmaster. + + The taskmaster's next_task() method should return the next task + that needs to be executed, or None if there are no more tasks. The + taskmaster's executed() method will be called for each task when it + is successfully executed or failed() will be called if it failed to + execute (e.g. execute() raised an exception).""" + + self.taskmaster = taskmaster + self.interrupted = InterruptState() + + def start(self): + """Start the job. This will begin pulling tasks from the taskmaster + and executing them, and return when there are no more tasks. If a task + fails to execute (i.e. execute() raises an exception), then the job will + stop.""" + + while 1: + task = self.taskmaster.next_task() + + if task is None: + break + + try: + task.prepare() + if task.needs_execute(): + task.execute() + except: + if self.interrupted(): + try: + raise SCons.Errors.BuildError( + task.targets[0], errstr=interrupt_msg) + except: + task.exception_set() + else: + task.exception_set() + + # Let the failed() callback function arrange for the + # build to stop if that's appropriate. + task.failed() + else: + task.executed() + + task.postprocess() + self.taskmaster.cleanup() + + +# Trap import failure so that everything in the Job module but the +# Parallel class (and its dependent classes) will work if the interpreter +# doesn't support threads. +try: + import Queue + import threading +except ImportError: + pass +else: + class Worker(threading.Thread): + """A worker thread waits on a task to be posted to its request queue, + dequeues the task, executes it, and posts a tuple including the task + and a boolean indicating whether the task executed successfully. """ + + def __init__(self, requestQueue, resultsQueue, interrupted): + threading.Thread.__init__(self) + self.setDaemon(1) + self.requestQueue = requestQueue + self.resultsQueue = resultsQueue + self.interrupted = interrupted + self.start() + + def run(self): + while 1: + task = self.requestQueue.get() + + if task is None: + # The "None" value is used as a sentinel by + # ThreadPool.cleanup(). This indicates that there + # are no more tasks, so we should quit. + break + + try: + if self.interrupted(): + raise SCons.Errors.BuildError( + task.targets[0], errstr=interrupt_msg) + task.execute() + except: + task.exception_set() + ok = False + else: + ok = True + + self.resultsQueue.put((task, ok)) + + class ThreadPool: + """This class is responsible for spawning and managing worker threads.""" + + def __init__(self, num, stack_size, interrupted): + """Create the request and reply queues, and 'num' worker threads. + + One must specify the stack size of the worker threads. The + stack size is specified in kilobytes. + """ + self.requestQueue = Queue.Queue(0) + self.resultsQueue = Queue.Queue(0) + + try: + prev_size = threading.stack_size(stack_size*1024) + except AttributeError, e: + # Only print a warning if the stack size has been + # explicitly set. + if not explicit_stack_size is None: + msg = "Setting stack size is unsupported by this version of Python:\n " + \ + e.args[0] + SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg) + except ValueError, e: + msg = "Setting stack size failed:\n " + str(e) + SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg) + + # Create worker threads + self.workers = [] + for _ in range(num): + worker = Worker(self.requestQueue, self.resultsQueue, interrupted) + self.workers.append(worker) + + # Once we drop Python 1.5 we can change the following to: + #if 'prev_size' in locals(): + if 'prev_size' in locals().keys(): + threading.stack_size(prev_size) + + def put(self, task): + """Put task into request queue.""" + self.requestQueue.put(task) + + def get(self): + """Remove and return a result tuple from the results queue.""" + return self.resultsQueue.get() + + def preparation_failed(self, task): + self.resultsQueue.put((task, False)) + + def cleanup(self): + """ + Shuts down the thread pool, giving each worker thread a + chance to shut down gracefully. + """ + # For each worker thread, put a sentinel "None" value + # on the requestQueue (indicating that there's no work + # to be done) so that each worker thread will get one and + # terminate gracefully. + for _ in self.workers: + self.requestQueue.put(None) + + # Wait for all of the workers to terminate. + # + # If we don't do this, later Python versions (2.4, 2.5) often + # seem to raise exceptions during shutdown. This happens + # in requestQueue.get(), as an assertion failure that + # requestQueue.not_full is notified while not acquired, + # seemingly because the main thread has shut down (or is + # in the process of doing so) while the workers are still + # trying to pull sentinels off the requestQueue. + # + # Normally these terminations should happen fairly quickly, + # but we'll stick a one-second timeout on here just in case + # someone gets hung. + for worker in self.workers: + worker.join(1.0) + self.workers = [] + + class Parallel: + """This class is used to execute tasks in parallel, and is somewhat + less efficient than Serial, but is appropriate for parallel builds. + + This class is thread safe. + """ + + def __init__(self, taskmaster, num, stack_size): + """Create a new parallel job given a taskmaster. + + The taskmaster's next_task() method should return the next + task that needs to be executed, or None if there are no more + tasks. The taskmaster's executed() method will be called + for each task when it is successfully executed or failed() + will be called if the task failed to execute (i.e. execute() + raised an exception). + + Note: calls to taskmaster are serialized, but calls to + execute() on distinct tasks are not serialized, because + that is the whole point of parallel jobs: they can execute + multiple tasks simultaneously. """ + + self.taskmaster = taskmaster + self.interrupted = InterruptState() + self.tp = ThreadPool(num, stack_size, self.interrupted) + + self.maxjobs = num + + def start(self): + """Start the job. This will begin pulling tasks from the + taskmaster and executing them, and return when there are no + more tasks. If a task fails to execute (i.e. execute() raises + an exception), then the job will stop.""" + + jobs = 0 + + while 1: + # Start up as many available tasks as we're + # allowed to. + while jobs < self.maxjobs: + task = self.taskmaster.next_task() + if task is None: + break + + try: + # prepare task for execution + task.prepare() + except: + task.exception_set() + task.failed() + task.postprocess() + else: + if task.needs_execute(): + # dispatch task + self.tp.put(task) + jobs = jobs + 1 + else: + task.executed() + task.postprocess() + + if not task and not jobs: break + + # Let any/all completed tasks finish up before we go + # back and put the next batch of tasks on the queue. + while 1: + task, ok = self.tp.get() + jobs = jobs - 1 + + if ok: + task.executed() + else: + if self.interrupted(): + try: + raise SCons.Errors.BuildError( + task.targets[0], errstr=interrupt_msg) + except: + task.exception_set() + + # Let the failed() callback function arrange + # for the build to stop if that's appropriate. + task.failed() + + task.postprocess() + + if self.tp.resultsQueue.empty(): + break + + self.tp.cleanup() + self.taskmaster.cleanup() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Memoize.py b/engine/SCons/Memoize.py new file mode 100644 index 0000000..61e9557 --- /dev/null +++ b/engine/SCons/Memoize.py @@ -0,0 +1,292 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Memoize.py 4577 2009/12/27 19:43:56 scons" + +__doc__ = """Memoizer + +A metaclass implementation to count hits and misses of the computed +values that various methods cache in memory. + +Use of this modules assumes that wrapped methods be coded to cache their +values in a consistent way. Here is an example of wrapping a method +that returns a computed value, with no input parameters: + + memoizer_counters = [] # Memoization + + memoizer_counters.append(SCons.Memoize.CountValue('foo')) # Memoization + + def foo(self): + + try: # Memoization + return self._memo['foo'] # Memoization + except KeyError: # Memoization + pass # Memoization + + result = self.compute_foo_value() + + self._memo['foo'] = result # Memoization + + return result + +Here is an example of wrapping a method that will return different values +based on one or more input arguments: + + def _bar_key(self, argument): # Memoization + return argument # Memoization + + memoizer_counters.append(SCons.Memoize.CountDict('bar', _bar_key)) # Memoization + + def bar(self, argument): + + memo_key = argument # Memoization + try: # Memoization + memo_dict = self._memo['bar'] # Memoization + except KeyError: # Memoization + memo_dict = {} # Memoization + self._memo['dict'] = memo_dict # Memoization + else: # Memoization + try: # Memoization + return memo_dict[memo_key] # Memoization + except KeyError: # Memoization + pass # Memoization + + result = self.compute_bar_value(argument) + + memo_dict[memo_key] = result # Memoization + + return result + +At one point we avoided replicating this sort of logic in all the methods +by putting it right into this module, but we've moved away from that at +present (see the "Historical Note," below.). + +Deciding what to cache is tricky, because different configurations +can have radically different performance tradeoffs, and because the +tradeoffs involved are often so non-obvious. Consequently, deciding +whether or not to cache a given method will likely be more of an art than +a science, but should still be based on available data from this module. +Here are some VERY GENERAL guidelines about deciding whether or not to +cache return values from a method that's being called a lot: + + -- The first question to ask is, "Can we change the calling code + so this method isn't called so often?" Sometimes this can be + done by changing the algorithm. Sometimes the *caller* should + be memoized, not the method you're looking at. + + -- The memoized function should be timed with multiple configurations + to make sure it doesn't inadvertently slow down some other + configuration. + + -- When memoizing values based on a dictionary key composed of + input arguments, you don't need to use all of the arguments + if some of them don't affect the return values. + +Historical Note: The initial Memoizer implementation actually handled +the caching of values for the wrapped methods, based on a set of generic +algorithms for computing hashable values based on the method's arguments. +This collected caching logic nicely, but had two drawbacks: + + Running arguments through a generic key-conversion mechanism is slower + (and less flexible) than just coding these things directly. Since the + methods that need memoized values are generally performance-critical, + slowing them down in order to collect the logic isn't the right + tradeoff. + + Use of the memoizer really obscured what was being called, because + all the memoized methods were wrapped with re-used generic methods. + This made it more difficult, for example, to use the Python profiler + to figure out how to optimize the underlying methods. +""" + +import new + +# A flag controlling whether or not we actually use memoization. +use_memoizer = None + +CounterList = [] + +class Counter: + """ + Base class for counting memoization hits and misses. + + We expect that the metaclass initialization will have filled in + the .name attribute that represents the name of the function + being counted. + """ + def __init__(self, method_name): + """ + """ + self.method_name = method_name + self.hit = 0 + self.miss = 0 + CounterList.append(self) + def display(self): + fmt = " %7d hits %7d misses %s()" + print fmt % (self.hit, self.miss, self.name) + def __cmp__(self, other): + try: + return cmp(self.name, other.name) + except AttributeError: + return 0 + +class CountValue(Counter): + """ + A counter class for simple, atomic memoized values. + + A CountValue object should be instantiated in a class for each of + the class's methods that memoizes its return value by simply storing + the return value in its _memo dictionary. + + We expect that the metaclass initialization will fill in the + .underlying_method attribute with the method that we're wrapping. + We then call the underlying_method method after counting whether + its memoized value has already been set (a hit) or not (a miss). + """ + def __call__(self, *args, **kw): + obj = args[0] + if obj._memo.has_key(self.method_name): + self.hit = self.hit + 1 + else: + self.miss = self.miss + 1 + return apply(self.underlying_method, args, kw) + +class CountDict(Counter): + """ + A counter class for memoized values stored in a dictionary, with + keys based on the method's input arguments. + + A CountDict object is instantiated in a class for each of the + class's methods that memoizes its return value in a dictionary, + indexed by some key that can be computed from one or more of + its input arguments. + + We expect that the metaclass initialization will fill in the + .underlying_method attribute with the method that we're wrapping. + We then call the underlying_method method after counting whether the + computed key value is already present in the memoization dictionary + (a hit) or not (a miss). + """ + def __init__(self, method_name, keymaker): + """ + """ + Counter.__init__(self, method_name) + self.keymaker = keymaker + def __call__(self, *args, **kw): + obj = args[0] + try: + memo_dict = obj._memo[self.method_name] + except KeyError: + self.miss = self.miss + 1 + else: + key = apply(self.keymaker, args, kw) + if memo_dict.has_key(key): + self.hit = self.hit + 1 + else: + self.miss = self.miss + 1 + return apply(self.underlying_method, args, kw) + +class Memoizer: + """Object which performs caching of method calls for its 'primary' + instance.""" + + def __init__(self): + pass + +# Find out if we support metaclasses (Python 2.2 and later). + +class M: + def __init__(cls, name, bases, cls_dict): + cls.use_metaclass = 1 + def fake_method(self): + pass + new.instancemethod(fake_method, None, cls) + +try: + class A: + __metaclass__ = M + + use_metaclass = A.use_metaclass +except AttributeError: + use_metaclass = None + reason = 'no metaclasses' +except TypeError: + use_metaclass = None + reason = 'new.instancemethod() bug' +else: + del A + +del M + +if not use_metaclass: + + def Dump(title): + pass + + try: + class Memoized_Metaclass(type): + # Just a place-holder so pre-metaclass Python versions don't + # have to have special code for the Memoized classes. + pass + except TypeError: + class Memoized_Metaclass: + # A place-holder so pre-metaclass Python versions don't + # have to have special code for the Memoized classes. + pass + + def EnableMemoization(): + import SCons.Warnings + msg = 'memoization is not supported in this version of Python (%s)' + raise SCons.Warnings.NoMetaclassSupportWarning, msg % reason + +else: + + def Dump(title=None): + if title: + print title + CounterList.sort() + for counter in CounterList: + counter.display() + + class Memoized_Metaclass(type): + def __init__(cls, name, bases, cls_dict): + super(Memoized_Metaclass, cls).__init__(name, bases, cls_dict) + + for counter in cls_dict.get('memoizer_counters', []): + method_name = counter.method_name + + counter.name = cls.__name__ + '.' + method_name + counter.underlying_method = cls_dict[method_name] + + replacement_method = new.instancemethod(counter, None, cls) + setattr(cls, method_name, replacement_method) + + def EnableMemoization(): + global use_memoizer + use_memoizer = 1 + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Node/Alias.py b/engine/SCons/Node/Alias.py new file mode 100644 index 0000000..da0391f --- /dev/null +++ b/engine/SCons/Node/Alias.py @@ -0,0 +1,153 @@ + +"""scons.Node.Alias + +Alias nodes. + +This creates a hash of global Aliases (dummy targets). + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Node/Alias.py 4577 2009/12/27 19:43:56 scons" + +import string +import UserDict + +import SCons.Errors +import SCons.Node +import SCons.Util + +class AliasNameSpace(UserDict.UserDict): + def Alias(self, name, **kw): + if isinstance(name, SCons.Node.Alias.Alias): + return name + try: + a = self[name] + except KeyError: + a = apply(SCons.Node.Alias.Alias, (name,), kw) + self[name] = a + return a + + def lookup(self, name, **kw): + try: + return self[name] + except KeyError: + return None + +class AliasNodeInfo(SCons.Node.NodeInfoBase): + current_version_id = 1 + field_list = ['csig'] + def str_to_node(self, s): + return default_ans.Alias(s) + +class AliasBuildInfo(SCons.Node.BuildInfoBase): + current_version_id = 1 + +class Alias(SCons.Node.Node): + + NodeInfo = AliasNodeInfo + BuildInfo = AliasBuildInfo + + def __init__(self, name): + SCons.Node.Node.__init__(self) + self.name = name + + def str_for_display(self): + return '"' + self.__str__() + '"' + + def __str__(self): + return self.name + + def make_ready(self): + self.get_csig() + + really_build = SCons.Node.Node.build + is_up_to_date = SCons.Node.Node.children_are_up_to_date + + def is_under(self, dir): + # Make Alias nodes get built regardless of + # what directory scons was run from. Alias nodes + # are outside the filesystem: + return 1 + + def get_contents(self): + """The contents of an alias is the concatenation + of the content signatures of all its sources.""" + childsigs = map(lambda n: n.get_csig(), self.children()) + return string.join(childsigs, '') + + def sconsign(self): + """An Alias is not recorded in .sconsign files""" + pass + + # + # + # + + def changed_since_last_build(self, target, prev_ni): + cur_csig = self.get_csig() + try: + return cur_csig != prev_ni.csig + except AttributeError: + return 1 + + def build(self): + """A "builder" for aliases.""" + pass + + def convert(self): + try: del self.builder + except AttributeError: pass + self.reset_executor() + self.build = self.really_build + + def get_csig(self): + """ + Generate a node's content signature, the digested signature + of its content. + + node - the node + cache - alternate node to use for the signature cache + returns - the content signature + """ + try: + return self.ninfo.csig + except AttributeError: + pass + + contents = self.get_contents() + csig = SCons.Util.MD5signature(contents) + self.get_ninfo().csig = csig + return csig + +default_ans = AliasNameSpace() + +SCons.Node.arg2nodes_lookups.append(default_ans.lookup) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Node/FS.py b/engine/SCons/Node/FS.py new file mode 100644 index 0000000..fd21e73 --- /dev/null +++ b/engine/SCons/Node/FS.py @@ -0,0 +1,3220 @@ +"""scons.Node.FS + +File system nodes. + +These Nodes represent the canonical external objects that people think +of when they think of building software: files and directories. + +This holds a "default_fs" variable that should be initialized with an FS +that can be used by scripts or modules looking for the canonical default. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Node/FS.py 4577 2009/12/27 19:43:56 scons" + +from itertools import izip +import cStringIO +import fnmatch +import os +import os.path +import re +import shutil +import stat +import string +import sys +import time + +try: + import codecs +except ImportError: + pass +else: + # TODO(2.2): Remove when 2.3 becomes the minimal supported version. + try: + codecs.BOM_UTF8 + except AttributeError: + codecs.BOM_UTF8 = '\xef\xbb\xbf' + try: + codecs.BOM_UTF16_LE + codecs.BOM_UTF16_BE + except AttributeError: + codecs.BOM_UTF16_LE = '\xff\xfe' + codecs.BOM_UTF16_BE = '\xfe\xff' + + # Provide a wrapper function to handle decoding differences in + # different versions of Python. Normally, we'd try to do this in the + # compat layer (and maybe it still makes sense to move there?) but + # that doesn't provide a way to supply the string class used in + # pre-2.3 Python versions with a .decode() method that all strings + # naturally have. Plus, the 2.[01] encodings behave differently + # enough that we have to settle for a lowest-common-denominator + # wrapper approach. + # + # Note that the 2.[012] implementations below may be inefficient + # because they perform an explicit look up of the encoding for every + # decode, but they're old enough (and we want to stop supporting + # them soon enough) that it's not worth complicating the interface. + # Think of it as additional incentive for people to upgrade... + try: + ''.decode + except AttributeError: + # 2.0 through 2.2: strings have no .decode() method + try: + codecs.lookup('ascii').decode + except AttributeError: + # 2.0 and 2.1: encodings are a tuple of functions, and the + # decode() function returns a (result, length) tuple. + def my_decode(contents, encoding): + return codecs.lookup(encoding)[1](contents)[0] + else: + # 2.2: encodings are an object with methods, and the + # .decode() method returns just the decoded bytes. + def my_decode(contents, encoding): + return codecs.lookup(encoding).decode(contents) + else: + # 2.3 or later: use the .decode() string method + def my_decode(contents, encoding): + return contents.decode(encoding) + +import SCons.Action +from SCons.Debug import logInstanceCreation +import SCons.Errors +import SCons.Memoize +import SCons.Node +import SCons.Node.Alias +import SCons.Subst +import SCons.Util +import SCons.Warnings + +from SCons.Debug import Trace + +do_store_info = True + + +class EntryProxyAttributeError(AttributeError): + """ + An AttributeError subclass for recording and displaying the name + of the underlying Entry involved in an AttributeError exception. + """ + def __init__(self, entry_proxy, attribute): + AttributeError.__init__(self) + self.entry_proxy = entry_proxy + self.attribute = attribute + def __str__(self): + entry = self.entry_proxy.get() + fmt = "%s instance %s has no attribute %s" + return fmt % (entry.__class__.__name__, + repr(entry.name), + repr(self.attribute)) + +# The max_drift value: by default, use a cached signature value for +# any file that's been untouched for more than two days. +default_max_drift = 2*24*60*60 + +# +# We stringify these file system Nodes a lot. Turning a file system Node +# into a string is non-trivial, because the final string representation +# can depend on a lot of factors: whether it's a derived target or not, +# whether it's linked to a repository or source directory, and whether +# there's duplication going on. The normal technique for optimizing +# calculations like this is to memoize (cache) the string value, so you +# only have to do the calculation once. +# +# A number of the above factors, however, can be set after we've already +# been asked to return a string for a Node, because a Repository() or +# VariantDir() call or the like may not occur until later in SConscript +# files. So this variable controls whether we bother trying to save +# string values for Nodes. The wrapper interface can set this whenever +# they're done mucking with Repository and VariantDir and the other stuff, +# to let this module know it can start returning saved string values +# for Nodes. +# +Save_Strings = None + +def save_strings(val): + global Save_Strings + Save_Strings = val + +# +# Avoid unnecessary function calls by recording a Boolean value that +# tells us whether or not os.path.splitdrive() actually does anything +# on this system, and therefore whether we need to bother calling it +# when looking up path names in various methods below. +# + +do_splitdrive = None + +def initialize_do_splitdrive(): + global do_splitdrive + drive, path = os.path.splitdrive('X:/foo') + do_splitdrive = not not drive + +initialize_do_splitdrive() + +# + +needs_normpath_check = None + +def initialize_normpath_check(): + """ + Initialize the normpath_check regular expression. + + This function is used by the unit tests to re-initialize the pattern + when testing for behavior with different values of os.sep. + """ + global needs_normpath_check + if os.sep == '/': + pattern = r'.*/|\.$|\.\.$' + else: + pattern = r'.*[/%s]|\.$|\.\.$' % re.escape(os.sep) + needs_normpath_check = re.compile(pattern) + +initialize_normpath_check() + +# +# SCons.Action objects for interacting with the outside world. +# +# The Node.FS methods in this module should use these actions to +# create and/or remove files and directories; they should *not* use +# os.{link,symlink,unlink,mkdir}(), etc., directly. +# +# Using these SCons.Action objects ensures that descriptions of these +# external activities are properly displayed, that the displays are +# suppressed when the -s (silent) option is used, and (most importantly) +# the actions are disabled when the the -n option is used, in which case +# there should be *no* changes to the external file system(s)... +# + +if hasattr(os, 'link'): + def _hardlink_func(fs, src, dst): + # If the source is a symlink, we can't just hard-link to it + # because a relative symlink may point somewhere completely + # different. We must disambiguate the symlink and then + # hard-link the final destination file. + while fs.islink(src): + link = fs.readlink(src) + if not os.path.isabs(link): + src = link + else: + src = os.path.join(os.path.dirname(src), link) + fs.link(src, dst) +else: + _hardlink_func = None + +if hasattr(os, 'symlink'): + def _softlink_func(fs, src, dst): + fs.symlink(src, dst) +else: + _softlink_func = None + +def _copy_func(fs, src, dest): + shutil.copy2(src, dest) + st = fs.stat(src) + fs.chmod(dest, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE) + + +Valid_Duplicates = ['hard-soft-copy', 'soft-hard-copy', + 'hard-copy', 'soft-copy', 'copy'] + +Link_Funcs = [] # contains the callables of the specified duplication style + +def set_duplicate(duplicate): + # Fill in the Link_Funcs list according to the argument + # (discarding those not available on the platform). + + # Set up the dictionary that maps the argument names to the + # underlying implementations. We do this inside this function, + # not in the top-level module code, so that we can remap os.link + # and os.symlink for testing purposes. + link_dict = { + 'hard' : _hardlink_func, + 'soft' : _softlink_func, + 'copy' : _copy_func + } + + if not duplicate in Valid_Duplicates: + raise SCons.Errors.InternalError, ("The argument of set_duplicate " + "should be in Valid_Duplicates") + global Link_Funcs + Link_Funcs = [] + for func in string.split(duplicate,'-'): + if link_dict[func]: + Link_Funcs.append(link_dict[func]) + +def LinkFunc(target, source, env): + # Relative paths cause problems with symbolic links, so + # we use absolute paths, which may be a problem for people + # who want to move their soft-linked src-trees around. Those + # people should use the 'hard-copy' mode, softlinks cannot be + # used for that; at least I have no idea how ... + src = source[0].abspath + dest = target[0].abspath + dir, file = os.path.split(dest) + if dir and not target[0].fs.isdir(dir): + os.makedirs(dir) + if not Link_Funcs: + # Set a default order of link functions. + set_duplicate('hard-soft-copy') + fs = source[0].fs + # Now link the files with the previously specified order. + for func in Link_Funcs: + try: + func(fs, src, dest) + break + except (IOError, OSError): + # An OSError indicates something happened like a permissions + # problem or an attempt to symlink across file-system + # boundaries. An IOError indicates something like the file + # not existing. In either case, keeping trying additional + # functions in the list and only raise an error if the last + # one failed. + if func == Link_Funcs[-1]: + # exception of the last link method (copy) are fatal + raise + return 0 + +Link = SCons.Action.Action(LinkFunc, None) +def LocalString(target, source, env): + return 'Local copy of %s from %s' % (target[0], source[0]) + +LocalCopy = SCons.Action.Action(LinkFunc, LocalString) + +def UnlinkFunc(target, source, env): + t = target[0] + t.fs.unlink(t.abspath) + return 0 + +Unlink = SCons.Action.Action(UnlinkFunc, None) + +def MkdirFunc(target, source, env): + t = target[0] + if not t.exists(): + t.fs.mkdir(t.abspath) + return 0 + +Mkdir = SCons.Action.Action(MkdirFunc, None, presub=None) + +MkdirBuilder = None + +def get_MkdirBuilder(): + global MkdirBuilder + if MkdirBuilder is None: + import SCons.Builder + import SCons.Defaults + # "env" will get filled in by Executor.get_build_env() + # calling SCons.Defaults.DefaultEnvironment() when necessary. + MkdirBuilder = SCons.Builder.Builder(action = Mkdir, + env = None, + explain = None, + is_explicit = None, + target_scanner = SCons.Defaults.DirEntryScanner, + name = "MkdirBuilder") + return MkdirBuilder + +class _Null: + pass + +_null = _Null() + +DefaultSCCSBuilder = None +DefaultRCSBuilder = None + +def get_DefaultSCCSBuilder(): + global DefaultSCCSBuilder + if DefaultSCCSBuilder is None: + import SCons.Builder + # "env" will get filled in by Executor.get_build_env() + # calling SCons.Defaults.DefaultEnvironment() when necessary. + act = SCons.Action.Action('$SCCSCOM', '$SCCSCOMSTR') + DefaultSCCSBuilder = SCons.Builder.Builder(action = act, + env = None, + name = "DefaultSCCSBuilder") + return DefaultSCCSBuilder + +def get_DefaultRCSBuilder(): + global DefaultRCSBuilder + if DefaultRCSBuilder is None: + import SCons.Builder + # "env" will get filled in by Executor.get_build_env() + # calling SCons.Defaults.DefaultEnvironment() when necessary. + act = SCons.Action.Action('$RCS_COCOM', '$RCS_COCOMSTR') + DefaultRCSBuilder = SCons.Builder.Builder(action = act, + env = None, + name = "DefaultRCSBuilder") + return DefaultRCSBuilder + +# Cygwin's os.path.normcase pretends it's on a case-sensitive filesystem. +_is_cygwin = sys.platform == "cygwin" +if os.path.normcase("TeSt") == os.path.normpath("TeSt") and not _is_cygwin: + def _my_normcase(x): + return x +else: + def _my_normcase(x): + return string.upper(x) + + + +class DiskChecker: + def __init__(self, type, do, ignore): + self.type = type + self.do = do + self.ignore = ignore + self.set_do() + def set_do(self): + self.__call__ = self.do + def set_ignore(self): + self.__call__ = self.ignore + def set(self, list): + if self.type in list: + self.set_do() + else: + self.set_ignore() + +def do_diskcheck_match(node, predicate, errorfmt): + result = predicate() + try: + # If calling the predicate() cached a None value from stat(), + # remove it so it doesn't interfere with later attempts to + # build this Node as we walk the DAG. (This isn't a great way + # to do this, we're reaching into an interface that doesn't + # really belong to us, but it's all about performance, so + # for now we'll just document the dependency...) + if node._memo['stat'] is None: + del node._memo['stat'] + except (AttributeError, KeyError): + pass + if result: + raise TypeError, errorfmt % node.abspath + +def ignore_diskcheck_match(node, predicate, errorfmt): + pass + +def do_diskcheck_rcs(node, name): + try: + rcs_dir = node.rcs_dir + except AttributeError: + if node.entry_exists_on_disk('RCS'): + rcs_dir = node.Dir('RCS') + else: + rcs_dir = None + node.rcs_dir = rcs_dir + if rcs_dir: + return rcs_dir.entry_exists_on_disk(name+',v') + return None + +def ignore_diskcheck_rcs(node, name): + return None + +def do_diskcheck_sccs(node, name): + try: + sccs_dir = node.sccs_dir + except AttributeError: + if node.entry_exists_on_disk('SCCS'): + sccs_dir = node.Dir('SCCS') + else: + sccs_dir = None + node.sccs_dir = sccs_dir + if sccs_dir: + return sccs_dir.entry_exists_on_disk('s.'+name) + return None + +def ignore_diskcheck_sccs(node, name): + return None + +diskcheck_match = DiskChecker('match', do_diskcheck_match, ignore_diskcheck_match) +diskcheck_rcs = DiskChecker('rcs', do_diskcheck_rcs, ignore_diskcheck_rcs) +diskcheck_sccs = DiskChecker('sccs', do_diskcheck_sccs, ignore_diskcheck_sccs) + +diskcheckers = [ + diskcheck_match, + diskcheck_rcs, + diskcheck_sccs, +] + +def set_diskcheck(list): + for dc in diskcheckers: + dc.set(list) + +def diskcheck_types(): + return map(lambda dc: dc.type, diskcheckers) + + + +class EntryProxy(SCons.Util.Proxy): + def __get_abspath(self): + entry = self.get() + return SCons.Subst.SpecialAttrWrapper(entry.get_abspath(), + entry.name + "_abspath") + + def __get_filebase(self): + name = self.get().name + return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[0], + name + "_filebase") + + def __get_suffix(self): + name = self.get().name + return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[1], + name + "_suffix") + + def __get_file(self): + name = self.get().name + return SCons.Subst.SpecialAttrWrapper(name, name + "_file") + + def __get_base_path(self): + """Return the file's directory and file name, with the + suffix stripped.""" + entry = self.get() + return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(entry.get_path())[0], + entry.name + "_base") + + def __get_posix_path(self): + """Return the path with / as the path separator, + regardless of platform.""" + if os.sep == '/': + return self + else: + entry = self.get() + r = string.replace(entry.get_path(), os.sep, '/') + return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_posix") + + def __get_windows_path(self): + """Return the path with \ as the path separator, + regardless of platform.""" + if os.sep == '\\': + return self + else: + entry = self.get() + r = string.replace(entry.get_path(), os.sep, '\\') + return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_windows") + + def __get_srcnode(self): + return EntryProxy(self.get().srcnode()) + + def __get_srcdir(self): + """Returns the directory containing the source node linked to this + node via VariantDir(), or the directory of this node if not linked.""" + return EntryProxy(self.get().srcnode().dir) + + def __get_rsrcnode(self): + return EntryProxy(self.get().srcnode().rfile()) + + def __get_rsrcdir(self): + """Returns the directory containing the source node linked to this + node via VariantDir(), or the directory of this node if not linked.""" + return EntryProxy(self.get().srcnode().rfile().dir) + + def __get_dir(self): + return EntryProxy(self.get().dir) + + dictSpecialAttrs = { "base" : __get_base_path, + "posix" : __get_posix_path, + "windows" : __get_windows_path, + "win32" : __get_windows_path, + "srcpath" : __get_srcnode, + "srcdir" : __get_srcdir, + "dir" : __get_dir, + "abspath" : __get_abspath, + "filebase" : __get_filebase, + "suffix" : __get_suffix, + "file" : __get_file, + "rsrcpath" : __get_rsrcnode, + "rsrcdir" : __get_rsrcdir, + } + + def __getattr__(self, name): + # This is how we implement the "special" attributes + # such as base, posix, srcdir, etc. + try: + attr_function = self.dictSpecialAttrs[name] + except KeyError: + try: + attr = SCons.Util.Proxy.__getattr__(self, name) + except AttributeError, e: + # Raise our own AttributeError subclass with an + # overridden __str__() method that identifies the + # name of the entry that caused the exception. + raise EntryProxyAttributeError(self, name) + return attr + else: + return attr_function(self) + +class Base(SCons.Node.Node): + """A generic class for file system entries. This class is for + when we don't know yet whether the entry being looked up is a file + or a directory. Instances of this class can morph into either + Dir or File objects by a later, more precise lookup. + + Note: this class does not define __cmp__ and __hash__ for + efficiency reasons. SCons does a lot of comparing of + Node.FS.{Base,Entry,File,Dir} objects, so those operations must be + as fast as possible, which means we want to use Python's built-in + object identity comparisons. + """ + + memoizer_counters = [] + + def __init__(self, name, directory, fs): + """Initialize a generic Node.FS.Base object. + + Call the superclass initialization, take care of setting up + our relative and absolute paths, identify our parent + directory, and indicate that this node should use + signatures.""" + if __debug__: logInstanceCreation(self, 'Node.FS.Base') + SCons.Node.Node.__init__(self) + + # Filenames and paths are probably reused and are intern'ed to + # save some memory. + self.name = SCons.Util.silent_intern(name) + self.suffix = SCons.Util.silent_intern(SCons.Util.splitext(name)[1]) + self.fs = fs + + assert directory, "A directory must be provided" + + self.abspath = SCons.Util.silent_intern(directory.entry_abspath(name)) + self.labspath = SCons.Util.silent_intern(directory.entry_labspath(name)) + if directory.path == '.': + self.path = SCons.Util.silent_intern(name) + else: + self.path = SCons.Util.silent_intern(directory.entry_path(name)) + if directory.tpath == '.': + self.tpath = SCons.Util.silent_intern(name) + else: + self.tpath = SCons.Util.silent_intern(directory.entry_tpath(name)) + self.path_elements = directory.path_elements + [self] + + self.dir = directory + self.cwd = None # will hold the SConscript directory for target nodes + self.duplicate = directory.duplicate + + def str_for_display(self): + return '"' + self.__str__() + '"' + + def must_be_same(self, klass): + """ + This node, which already existed, is being looked up as the + specified klass. Raise an exception if it isn't. + """ + if isinstance(self, klass) or klass is Entry: + return + raise TypeError, "Tried to lookup %s '%s' as a %s." %\ + (self.__class__.__name__, self.path, klass.__name__) + + def get_dir(self): + return self.dir + + def get_suffix(self): + return self.suffix + + def rfile(self): + return self + + def __str__(self): + """A Node.FS.Base object's string representation is its path + name.""" + global Save_Strings + if Save_Strings: + return self._save_str() + return self._get_str() + + memoizer_counters.append(SCons.Memoize.CountValue('_save_str')) + + def _save_str(self): + try: + return self._memo['_save_str'] + except KeyError: + pass + result = intern(self._get_str()) + self._memo['_save_str'] = result + return result + + def _get_str(self): + global Save_Strings + if self.duplicate or self.is_derived(): + return self.get_path() + srcnode = self.srcnode() + if srcnode.stat() is None and self.stat() is not None: + result = self.get_path() + else: + result = srcnode.get_path() + if not Save_Strings: + # We're not at the point where we're saving the string string + # representations of FS Nodes (because we haven't finished + # reading the SConscript files and need to have str() return + # things relative to them). That also means we can't yet + # cache values returned (or not returned) by stat(), since + # Python code in the SConscript files might still create + # or otherwise affect the on-disk file. So get rid of the + # values that the underlying stat() method saved. + try: del self._memo['stat'] + except KeyError: pass + if self is not srcnode: + try: del srcnode._memo['stat'] + except KeyError: pass + return result + + rstr = __str__ + + memoizer_counters.append(SCons.Memoize.CountValue('stat')) + + def stat(self): + try: return self._memo['stat'] + except KeyError: pass + try: result = self.fs.stat(self.abspath) + except os.error: result = None + self._memo['stat'] = result + return result + + def exists(self): + return self.stat() is not None + + def rexists(self): + return self.rfile().exists() + + def getmtime(self): + st = self.stat() + if st: return st[stat.ST_MTIME] + else: return None + + def getsize(self): + st = self.stat() + if st: return st[stat.ST_SIZE] + else: return None + + def isdir(self): + st = self.stat() + return st is not None and stat.S_ISDIR(st[stat.ST_MODE]) + + def isfile(self): + st = self.stat() + return st is not None and stat.S_ISREG(st[stat.ST_MODE]) + + if hasattr(os, 'symlink'): + def islink(self): + try: st = self.fs.lstat(self.abspath) + except os.error: return 0 + return stat.S_ISLNK(st[stat.ST_MODE]) + else: + def islink(self): + return 0 # no symlinks + + def is_under(self, dir): + if self is dir: + return 1 + else: + return self.dir.is_under(dir) + + def set_local(self): + self._local = 1 + + def srcnode(self): + """If this node is in a build path, return the node + corresponding to its source file. Otherwise, return + ourself. + """ + srcdir_list = self.dir.srcdir_list() + if srcdir_list: + srcnode = srcdir_list[0].Entry(self.name) + srcnode.must_be_same(self.__class__) + return srcnode + return self + + def get_path(self, dir=None): + """Return path relative to the current working directory of the + Node.FS.Base object that owns us.""" + if not dir: + dir = self.fs.getcwd() + if self == dir: + return '.' + path_elems = self.path_elements + try: i = path_elems.index(dir) + except ValueError: pass + else: path_elems = path_elems[i+1:] + path_elems = map(lambda n: n.name, path_elems) + return string.join(path_elems, os.sep) + + def set_src_builder(self, builder): + """Set the source code builder for this node.""" + self.sbuilder = builder + if not self.has_builder(): + self.builder_set(builder) + + def src_builder(self): + """Fetch the source code builder for this node. + + If there isn't one, we cache the source code builder specified + for the directory (which in turn will cache the value from its + parent directory, and so on up to the file system root). + """ + try: + scb = self.sbuilder + except AttributeError: + scb = self.dir.src_builder() + self.sbuilder = scb + return scb + + def get_abspath(self): + """Get the absolute path of the file.""" + return self.abspath + + def for_signature(self): + # Return just our name. Even an absolute path would not work, + # because that can change thanks to symlinks or remapped network + # paths. + return self.name + + def get_subst_proxy(self): + try: + return self._proxy + except AttributeError: + ret = EntryProxy(self) + self._proxy = ret + return ret + + def target_from_source(self, prefix, suffix, splitext=SCons.Util.splitext): + """ + + Generates a target entry that corresponds to this entry (usually + a source file) with the specified prefix and suffix. + + Note that this method can be overridden dynamically for generated + files that need different behavior. See Tool/swig.py for + an example. + """ + return self.dir.Entry(prefix + splitext(self.name)[0] + suffix) + + def _Rfindalldirs_key(self, pathlist): + return pathlist + + memoizer_counters.append(SCons.Memoize.CountDict('Rfindalldirs', _Rfindalldirs_key)) + + def Rfindalldirs(self, pathlist): + """ + Return all of the directories for a given path list, including + corresponding "backing" directories in any repositories. + + The Node lookups are relative to this Node (typically a + directory), so memoizing result saves cycles from looking + up the same path for each target in a given directory. + """ + try: + memo_dict = self._memo['Rfindalldirs'] + except KeyError: + memo_dict = {} + self._memo['Rfindalldirs'] = memo_dict + else: + try: + return memo_dict[pathlist] + except KeyError: + pass + + create_dir_relative_to_self = self.Dir + result = [] + for path in pathlist: + if isinstance(path, SCons.Node.Node): + result.append(path) + else: + dir = create_dir_relative_to_self(path) + result.extend(dir.get_all_rdirs()) + + memo_dict[pathlist] = result + + return result + + def RDirs(self, pathlist): + """Search for a list of directories in the Repository list.""" + cwd = self.cwd or self.fs._cwd + return cwd.Rfindalldirs(pathlist) + + memoizer_counters.append(SCons.Memoize.CountValue('rentry')) + + def rentry(self): + try: + return self._memo['rentry'] + except KeyError: + pass + result = self + if not self.exists(): + norm_name = _my_normcase(self.name) + for dir in self.dir.get_all_rdirs(): + try: + node = dir.entries[norm_name] + except KeyError: + if dir.entry_exists_on_disk(self.name): + result = dir.Entry(self.name) + break + self._memo['rentry'] = result + return result + + def _glob1(self, pattern, ondisk=True, source=False, strings=False): + return [] + +class Entry(Base): + """This is the class for generic Node.FS entries--that is, things + that could be a File or a Dir, but we're just not sure yet. + Consequently, the methods in this class really exist just to + transform their associated object into the right class when the + time comes, and then call the same-named method in the transformed + class.""" + + def diskcheck_match(self): + pass + + def disambiguate(self, must_exist=None): + """ + """ + if self.isdir(): + self.__class__ = Dir + self._morph() + elif self.isfile(): + self.__class__ = File + self._morph() + self.clear() + else: + # There was nothing on-disk at this location, so look in + # the src directory. + # + # We can't just use self.srcnode() straight away because + # that would create an actual Node for this file in the src + # directory, and there might not be one. Instead, use the + # dir_on_disk() method to see if there's something on-disk + # with that name, in which case we can go ahead and call + # self.srcnode() to create the right type of entry. + srcdir = self.dir.srcnode() + if srcdir != self.dir and \ + srcdir.entry_exists_on_disk(self.name) and \ + self.srcnode().isdir(): + self.__class__ = Dir + self._morph() + elif must_exist: + msg = "No such file or directory: '%s'" % self.abspath + raise SCons.Errors.UserError, msg + else: + self.__class__ = File + self._morph() + self.clear() + return self + + def rfile(self): + """We're a generic Entry, but the caller is actually looking for + a File at this point, so morph into one.""" + self.__class__ = File + self._morph() + self.clear() + return File.rfile(self) + + def scanner_key(self): + return self.get_suffix() + + def get_contents(self): + """Fetch the contents of the entry. Returns the exact binary + contents of the file.""" + try: + self = self.disambiguate(must_exist=1) + except SCons.Errors.UserError: + # There was nothing on disk with which to disambiguate + # this entry. Leave it as an Entry, but return a null + # string so calls to get_contents() in emitters and the + # like (e.g. in qt.py) don't have to disambiguate by hand + # or catch the exception. + return '' + else: + return self.get_contents() + + def get_text_contents(self): + """Fetch the decoded text contents of a Unicode encoded Entry. + + Since this should return the text contents from the file + system, we check to see into what sort of subclass we should + morph this Entry.""" + try: + self = self.disambiguate(must_exist=1) + except SCons.Errors.UserError: + # There was nothing on disk with which to disambiguate + # this entry. Leave it as an Entry, but return a null + # string so calls to get_text_contents() in emitters and + # the like (e.g. in qt.py) don't have to disambiguate by + # hand or catch the exception. + return '' + else: + return self.get_text_contents() + + def must_be_same(self, klass): + """Called to make sure a Node is a Dir. Since we're an + Entry, we can morph into one.""" + if self.__class__ is not klass: + self.__class__ = klass + self._morph() + self.clear() + + # The following methods can get called before the Taskmaster has + # had a chance to call disambiguate() directly to see if this Entry + # should really be a Dir or a File. We therefore use these to call + # disambiguate() transparently (from our caller's point of view). + # + # Right now, this minimal set of methods has been derived by just + # looking at some of the methods that will obviously be called early + # in any of the various Taskmasters' calling sequences, and then + # empirically figuring out which additional methods are necessary + # to make various tests pass. + + def exists(self): + """Return if the Entry exists. Check the file system to see + what we should turn into first. Assume a file if there's no + directory.""" + return self.disambiguate().exists() + + def rel_path(self, other): + d = self.disambiguate() + if d.__class__ is Entry: + raise "rel_path() could not disambiguate File/Dir" + return d.rel_path(other) + + def new_ninfo(self): + return self.disambiguate().new_ninfo() + + def changed_since_last_build(self, target, prev_ni): + return self.disambiguate().changed_since_last_build(target, prev_ni) + + def _glob1(self, pattern, ondisk=True, source=False, strings=False): + return self.disambiguate()._glob1(pattern, ondisk, source, strings) + + def get_subst_proxy(self): + return self.disambiguate().get_subst_proxy() + +# This is for later so we can differentiate between Entry the class and Entry +# the method of the FS class. +_classEntry = Entry + + +class LocalFS: + + if SCons.Memoize.use_memoizer: + __metaclass__ = SCons.Memoize.Memoized_Metaclass + + # This class implements an abstraction layer for operations involving + # a local file system. Essentially, this wraps any function in + # the os, os.path or shutil modules that we use to actually go do + # anything with or to the local file system. + # + # Note that there's a very good chance we'll refactor this part of + # the architecture in some way as we really implement the interface(s) + # for remote file system Nodes. For example, the right architecture + # might be to have this be a subclass instead of a base class. + # Nevertheless, we're using this as a first step in that direction. + # + # We're not using chdir() yet because the calling subclass method + # needs to use os.chdir() directly to avoid recursion. Will we + # really need this one? + #def chdir(self, path): + # return os.chdir(path) + def chmod(self, path, mode): + return os.chmod(path, mode) + def copy(self, src, dst): + return shutil.copy(src, dst) + def copy2(self, src, dst): + return shutil.copy2(src, dst) + def exists(self, path): + return os.path.exists(path) + def getmtime(self, path): + return os.path.getmtime(path) + def getsize(self, path): + return os.path.getsize(path) + def isdir(self, path): + return os.path.isdir(path) + def isfile(self, path): + return os.path.isfile(path) + def link(self, src, dst): + return os.link(src, dst) + def lstat(self, path): + return os.lstat(path) + def listdir(self, path): + return os.listdir(path) + def makedirs(self, path): + return os.makedirs(path) + def mkdir(self, path): + return os.mkdir(path) + def rename(self, old, new): + return os.rename(old, new) + def stat(self, path): + return os.stat(path) + def symlink(self, src, dst): + return os.symlink(src, dst) + def open(self, path): + return open(path) + def unlink(self, path): + return os.unlink(path) + + if hasattr(os, 'symlink'): + def islink(self, path): + return os.path.islink(path) + else: + def islink(self, path): + return 0 # no symlinks + + if hasattr(os, 'readlink'): + def readlink(self, file): + return os.readlink(file) + else: + def readlink(self, file): + return '' + + +#class RemoteFS: +# # Skeleton for the obvious methods we might need from the +# # abstraction layer for a remote filesystem. +# def upload(self, local_src, remote_dst): +# pass +# def download(self, remote_src, local_dst): +# pass + + +class FS(LocalFS): + + memoizer_counters = [] + + def __init__(self, path = None): + """Initialize the Node.FS subsystem. + + The supplied path is the top of the source tree, where we + expect to find the top-level build file. If no path is + supplied, the current directory is the default. + + The path argument must be a valid absolute path. + """ + if __debug__: logInstanceCreation(self, 'Node.FS') + + self._memo = {} + + self.Root = {} + self.SConstruct_dir = None + self.max_drift = default_max_drift + + self.Top = None + if path is None: + self.pathTop = os.getcwd() + else: + self.pathTop = path + self.defaultDrive = _my_normcase(os.path.splitdrive(self.pathTop)[0]) + + self.Top = self.Dir(self.pathTop) + self.Top.path = '.' + self.Top.tpath = '.' + self._cwd = self.Top + + DirNodeInfo.fs = self + FileNodeInfo.fs = self + + def set_SConstruct_dir(self, dir): + self.SConstruct_dir = dir + + def get_max_drift(self): + return self.max_drift + + def set_max_drift(self, max_drift): + self.max_drift = max_drift + + def getcwd(self): + return self._cwd + + def chdir(self, dir, change_os_dir=0): + """Change the current working directory for lookups. + If change_os_dir is true, we will also change the "real" cwd + to match. + """ + curr=self._cwd + try: + if dir is not None: + self._cwd = dir + if change_os_dir: + os.chdir(dir.abspath) + except OSError: + self._cwd = curr + raise + + def get_root(self, drive): + """ + Returns the root directory for the specified drive, creating + it if necessary. + """ + drive = _my_normcase(drive) + try: + return self.Root[drive] + except KeyError: + root = RootDir(drive, self) + self.Root[drive] = root + if not drive: + self.Root[self.defaultDrive] = root + elif drive == self.defaultDrive: + self.Root[''] = root + return root + + def _lookup(self, p, directory, fsclass, create=1): + """ + The generic entry point for Node lookup with user-supplied data. + + This translates arbitrary input into a canonical Node.FS object + of the specified fsclass. The general approach for strings is + to turn it into a fully normalized absolute path and then call + the root directory's lookup_abs() method for the heavy lifting. + + If the path name begins with '#', it is unconditionally + interpreted relative to the top-level directory of this FS. '#' + is treated as a synonym for the top-level SConstruct directory, + much like '~' is treated as a synonym for the user's home + directory in a UNIX shell. So both '#foo' and '#/foo' refer + to the 'foo' subdirectory underneath the top-level SConstruct + directory. + + If the path name is relative, then the path is looked up relative + to the specified directory, or the current directory (self._cwd, + typically the SConscript directory) if the specified directory + is None. + """ + if isinstance(p, Base): + # It's already a Node.FS object. Make sure it's the right + # class and return. + p.must_be_same(fsclass) + return p + # str(p) in case it's something like a proxy object + p = str(p) + + initial_hash = (p[0:1] == '#') + if initial_hash: + # There was an initial '#', so we strip it and override + # whatever directory they may have specified with the + # top-level SConstruct directory. + p = p[1:] + directory = self.Top + + if directory and not isinstance(directory, Dir): + directory = self.Dir(directory) + + if do_splitdrive: + drive, p = os.path.splitdrive(p) + else: + drive = '' + if drive and not p: + # This causes a naked drive letter to be treated as a synonym + # for the root directory on that drive. + p = os.sep + absolute = os.path.isabs(p) + + needs_normpath = needs_normpath_check.match(p) + + if initial_hash or not absolute: + # This is a relative lookup, either to the top-level + # SConstruct directory (because of the initial '#') or to + # the current directory (the path name is not absolute). + # Add the string to the appropriate directory lookup path, + # after which the whole thing gets normalized. + if not directory: + directory = self._cwd + if p: + p = directory.labspath + '/' + p + else: + p = directory.labspath + + if needs_normpath: + p = os.path.normpath(p) + + if drive or absolute: + root = self.get_root(drive) + else: + if not directory: + directory = self._cwd + root = directory.root + + if os.sep != '/': + p = string.replace(p, os.sep, '/') + return root._lookup_abs(p, fsclass, create) + + def Entry(self, name, directory = None, create = 1): + """Look up or create a generic Entry node with the specified name. + If the name is a relative path (begins with ./, ../, or a file + name), then it is looked up relative to the supplied directory + node, or to the top level directory of the FS (supplied at + construction time) if no directory is supplied. + """ + return self._lookup(name, directory, Entry, create) + + def File(self, name, directory = None, create = 1): + """Look up or create a File node with the specified name. If + the name is a relative path (begins with ./, ../, or a file name), + then it is looked up relative to the supplied directory node, + or to the top level directory of the FS (supplied at construction + time) if no directory is supplied. + + This method will raise TypeError if a directory is found at the + specified path. + """ + return self._lookup(name, directory, File, create) + + def Dir(self, name, directory = None, create = True): + """Look up or create a Dir node with the specified name. If + the name is a relative path (begins with ./, ../, or a file name), + then it is looked up relative to the supplied directory node, + or to the top level directory of the FS (supplied at construction + time) if no directory is supplied. + + This method will raise TypeError if a normal file is found at the + specified path. + """ + return self._lookup(name, directory, Dir, create) + + def VariantDir(self, variant_dir, src_dir, duplicate=1): + """Link the supplied variant directory to the source directory + for purposes of building files.""" + + if not isinstance(src_dir, SCons.Node.Node): + src_dir = self.Dir(src_dir) + if not isinstance(variant_dir, SCons.Node.Node): + variant_dir = self.Dir(variant_dir) + if src_dir.is_under(variant_dir): + raise SCons.Errors.UserError, "Source directory cannot be under variant directory." + if variant_dir.srcdir: + if variant_dir.srcdir == src_dir: + return # We already did this. + raise SCons.Errors.UserError, "'%s' already has a source directory: '%s'."%(variant_dir, variant_dir.srcdir) + variant_dir.link(src_dir, duplicate) + + def Repository(self, *dirs): + """Specify Repository directories to search.""" + for d in dirs: + if not isinstance(d, SCons.Node.Node): + d = self.Dir(d) + self.Top.addRepository(d) + + def variant_dir_target_climb(self, orig, dir, tail): + """Create targets in corresponding variant directories + + Climb the directory tree, and look up path names + relative to any linked variant directories we find. + + Even though this loops and walks up the tree, we don't memoize + the return value because this is really only used to process + the command-line targets. + """ + targets = [] + message = None + fmt = "building associated VariantDir targets: %s" + start_dir = dir + while dir: + for bd in dir.variant_dirs: + if start_dir.is_under(bd): + # If already in the build-dir location, don't reflect + return [orig], fmt % str(orig) + p = apply(os.path.join, [bd.path] + tail) + targets.append(self.Entry(p)) + tail = [dir.name] + tail + dir = dir.up() + if targets: + message = fmt % string.join(map(str, targets)) + return targets, message + + def Glob(self, pathname, ondisk=True, source=True, strings=False, cwd=None): + """ + Globs + + This is mainly a shim layer + """ + if cwd is None: + cwd = self.getcwd() + return cwd.glob(pathname, ondisk, source, strings) + +class DirNodeInfo(SCons.Node.NodeInfoBase): + # This should get reset by the FS initialization. + current_version_id = 1 + + fs = None + + def str_to_node(self, s): + top = self.fs.Top + root = top.root + if do_splitdrive: + drive, s = os.path.splitdrive(s) + if drive: + root = self.fs.get_root(drive) + if not os.path.isabs(s): + s = top.labspath + '/' + s + return root._lookup_abs(s, Entry) + +class DirBuildInfo(SCons.Node.BuildInfoBase): + current_version_id = 1 + +glob_magic_check = re.compile('[*?[]') + +def has_glob_magic(s): + return glob_magic_check.search(s) is not None + +class Dir(Base): + """A class for directories in a file system. + """ + + memoizer_counters = [] + + NodeInfo = DirNodeInfo + BuildInfo = DirBuildInfo + + def __init__(self, name, directory, fs): + if __debug__: logInstanceCreation(self, 'Node.FS.Dir') + Base.__init__(self, name, directory, fs) + self._morph() + + def _morph(self): + """Turn a file system Node (either a freshly initialized directory + object or a separate Entry object) into a proper directory object. + + Set up this directory's entries and hook it into the file + system tree. Specify that directories (this Node) don't use + signatures for calculating whether they're current. + """ + + self.repositories = [] + self.srcdir = None + + self.entries = {} + self.entries['.'] = self + self.entries['..'] = self.dir + self.cwd = self + self.searched = 0 + self._sconsign = None + self.variant_dirs = [] + self.root = self.dir.root + + # Don't just reset the executor, replace its action list, + # because it might have some pre-or post-actions that need to + # be preserved. + self.builder = get_MkdirBuilder() + self.get_executor().set_action_list(self.builder.action) + + def diskcheck_match(self): + diskcheck_match(self, self.isfile, + "File %s found where directory expected.") + + def __clearRepositoryCache(self, duplicate=None): + """Called when we change the repository(ies) for a directory. + This clears any cached information that is invalidated by changing + the repository.""" + + for node in self.entries.values(): + if node != self.dir: + if node != self and isinstance(node, Dir): + node.__clearRepositoryCache(duplicate) + else: + node.clear() + try: + del node._srcreps + except AttributeError: + pass + if duplicate is not None: + node.duplicate=duplicate + + def __resetDuplicate(self, node): + if node != self: + node.duplicate = node.get_dir().duplicate + + def Entry(self, name): + """ + Looks up or creates an entry node named 'name' relative to + this directory. + """ + return self.fs.Entry(name, self) + + def Dir(self, name, create=True): + """ + Looks up or creates a directory node named 'name' relative to + this directory. + """ + return self.fs.Dir(name, self, create) + + def File(self, name): + """ + Looks up or creates a file node named 'name' relative to + this directory. + """ + return self.fs.File(name, self) + + def _lookup_rel(self, name, klass, create=1): + """ + Looks up a *normalized* relative path name, relative to this + directory. + + This method is intended for use by internal lookups with + already-normalized path data. For general-purpose lookups, + use the Entry(), Dir() and File() methods above. + + This method does *no* input checking and will die or give + incorrect results if it's passed a non-normalized path name (e.g., + a path containing '..'), an absolute path name, a top-relative + ('#foo') path name, or any kind of object. + """ + name = self.entry_labspath(name) + return self.root._lookup_abs(name, klass, create) + + def link(self, srcdir, duplicate): + """Set this directory as the variant directory for the + supplied source directory.""" + self.srcdir = srcdir + self.duplicate = duplicate + self.__clearRepositoryCache(duplicate) + srcdir.variant_dirs.append(self) + + def getRepositories(self): + """Returns a list of repositories for this directory. + """ + if self.srcdir and not self.duplicate: + return self.srcdir.get_all_rdirs() + self.repositories + return self.repositories + + memoizer_counters.append(SCons.Memoize.CountValue('get_all_rdirs')) + + def get_all_rdirs(self): + try: + return list(self._memo['get_all_rdirs']) + except KeyError: + pass + + result = [self] + fname = '.' + dir = self + while dir: + for rep in dir.getRepositories(): + result.append(rep.Dir(fname)) + if fname == '.': + fname = dir.name + else: + fname = dir.name + os.sep + fname + dir = dir.up() + + self._memo['get_all_rdirs'] = list(result) + + return result + + def addRepository(self, dir): + if dir != self and not dir in self.repositories: + self.repositories.append(dir) + dir.tpath = '.' + self.__clearRepositoryCache() + + def up(self): + return self.entries['..'] + + def _rel_path_key(self, other): + return str(other) + + memoizer_counters.append(SCons.Memoize.CountDict('rel_path', _rel_path_key)) + + def rel_path(self, other): + """Return a path to "other" relative to this directory. + """ + + # This complicated and expensive method, which constructs relative + # paths between arbitrary Node.FS objects, is no longer used + # by SCons itself. It was introduced to store dependency paths + # in .sconsign files relative to the target, but that ended up + # being significantly inefficient. + # + # We're continuing to support the method because some SConstruct + # files out there started using it when it was available, and + # we're all about backwards compatibility.. + + try: + memo_dict = self._memo['rel_path'] + except KeyError: + memo_dict = {} + self._memo['rel_path'] = memo_dict + else: + try: + return memo_dict[other] + except KeyError: + pass + + if self is other: + result = '.' + + elif not other in self.path_elements: + try: + other_dir = other.get_dir() + except AttributeError: + result = str(other) + else: + if other_dir is None: + result = other.name + else: + dir_rel_path = self.rel_path(other_dir) + if dir_rel_path == '.': + result = other.name + else: + result = dir_rel_path + os.sep + other.name + else: + i = self.path_elements.index(other) + 1 + + path_elems = ['..'] * (len(self.path_elements) - i) \ + + map(lambda n: n.name, other.path_elements[i:]) + + result = string.join(path_elems, os.sep) + + memo_dict[other] = result + + return result + + def get_env_scanner(self, env, kw={}): + import SCons.Defaults + return SCons.Defaults.DirEntryScanner + + def get_target_scanner(self): + import SCons.Defaults + return SCons.Defaults.DirEntryScanner + + def get_found_includes(self, env, scanner, path): + """Return this directory's implicit dependencies. + + We don't bother caching the results because the scan typically + shouldn't be requested more than once (as opposed to scanning + .h file contents, which can be requested as many times as the + files is #included by other files). + """ + if not scanner: + return [] + # Clear cached info for this Dir. If we already visited this + # directory on our walk down the tree (because we didn't know at + # that point it was being used as the source for another Node) + # then we may have calculated build signature before realizing + # we had to scan the disk. Now that we have to, though, we need + # to invalidate the old calculated signature so that any node + # dependent on our directory structure gets one that includes + # info about everything on disk. + self.clear() + return scanner(self, env, path) + + # + # Taskmaster interface subsystem + # + + def prepare(self): + pass + + def build(self, **kw): + """A null "builder" for directories.""" + global MkdirBuilder + if self.builder is not MkdirBuilder: + apply(SCons.Node.Node.build, [self,], kw) + + # + # + # + + def _create(self): + """Create this directory, silently and without worrying about + whether the builder is the default or not.""" + listDirs = [] + parent = self + while parent: + if parent.exists(): + break + listDirs.append(parent) + p = parent.up() + if p is None: + # Don't use while: - else: for this condition because + # if so, then parent is None and has no .path attribute. + raise SCons.Errors.StopError, parent.path + parent = p + listDirs.reverse() + for dirnode in listDirs: + try: + # Don't call dirnode.build(), call the base Node method + # directly because we definitely *must* create this + # directory. The dirnode.build() method will suppress + # the build if it's the default builder. + SCons.Node.Node.build(dirnode) + dirnode.get_executor().nullify() + # The build() action may or may not have actually + # created the directory, depending on whether the -n + # option was used or not. Delete the _exists and + # _rexists attributes so they can be reevaluated. + dirnode.clear() + except OSError: + pass + + def multiple_side_effect_has_builder(self): + global MkdirBuilder + return self.builder is not MkdirBuilder and self.has_builder() + + def alter_targets(self): + """Return any corresponding targets in a variant directory. + """ + return self.fs.variant_dir_target_climb(self, self, []) + + def scanner_key(self): + """A directory does not get scanned.""" + return None + + def get_text_contents(self): + """We already emit things in text, so just return the binary + version.""" + return self.get_contents() + + def get_contents(self): + """Return content signatures and names of all our children + separated by new-lines. Ensure that the nodes are sorted.""" + contents = [] + name_cmp = lambda a, b: cmp(a.name, b.name) + sorted_children = self.children()[:] + sorted_children.sort(name_cmp) + for node in sorted_children: + contents.append('%s %s\n' % (node.get_csig(), node.name)) + return string.join(contents, '') + + def get_csig(self): + """Compute the content signature for Directory nodes. In + general, this is not needed and the content signature is not + stored in the DirNodeInfo. However, if get_contents on a Dir + node is called which has a child directory, the child + directory should return the hash of its contents.""" + contents = self.get_contents() + return SCons.Util.MD5signature(contents) + + def do_duplicate(self, src): + pass + + changed_since_last_build = SCons.Node.Node.state_has_changed + + def is_up_to_date(self): + """If any child is not up-to-date, then this directory isn't, + either.""" + if self.builder is not MkdirBuilder and not self.exists(): + return 0 + up_to_date = SCons.Node.up_to_date + for kid in self.children(): + if kid.get_state() > up_to_date: + return 0 + return 1 + + def rdir(self): + if not self.exists(): + norm_name = _my_normcase(self.name) + for dir in self.dir.get_all_rdirs(): + try: node = dir.entries[norm_name] + except KeyError: node = dir.dir_on_disk(self.name) + if node and node.exists() and \ + (isinstance(dir, Dir) or isinstance(dir, Entry)): + return node + return self + + def sconsign(self): + """Return the .sconsign file info for this directory, + creating it first if necessary.""" + if not self._sconsign: + import SCons.SConsign + self._sconsign = SCons.SConsign.ForDirectory(self) + return self._sconsign + + def srcnode(self): + """Dir has a special need for srcnode()...if we + have a srcdir attribute set, then that *is* our srcnode.""" + if self.srcdir: + return self.srcdir + return Base.srcnode(self) + + def get_timestamp(self): + """Return the latest timestamp from among our children""" + stamp = 0 + for kid in self.children(): + if kid.get_timestamp() > stamp: + stamp = kid.get_timestamp() + return stamp + + def entry_abspath(self, name): + return self.abspath + os.sep + name + + def entry_labspath(self, name): + return self.labspath + '/' + name + + def entry_path(self, name): + return self.path + os.sep + name + + def entry_tpath(self, name): + return self.tpath + os.sep + name + + def entry_exists_on_disk(self, name): + try: + d = self.on_disk_entries + except AttributeError: + d = {} + try: + entries = os.listdir(self.abspath) + except OSError: + pass + else: + for entry in map(_my_normcase, entries): + d[entry] = True + self.on_disk_entries = d + if sys.platform == 'win32': + name = _my_normcase(name) + result = d.get(name) + if result is None: + # Belt-and-suspenders for Windows: check directly for + # 8.3 file names that don't show up in os.listdir(). + result = os.path.exists(self.abspath + os.sep + name) + d[name] = result + return result + else: + return d.has_key(name) + + memoizer_counters.append(SCons.Memoize.CountValue('srcdir_list')) + + def srcdir_list(self): + try: + return self._memo['srcdir_list'] + except KeyError: + pass + + result = [] + + dirname = '.' + dir = self + while dir: + if dir.srcdir: + result.append(dir.srcdir.Dir(dirname)) + dirname = dir.name + os.sep + dirname + dir = dir.up() + + self._memo['srcdir_list'] = result + + return result + + def srcdir_duplicate(self, name): + for dir in self.srcdir_list(): + if self.is_under(dir): + # We shouldn't source from something in the build path; + # variant_dir is probably under src_dir, in which case + # we are reflecting. + break + if dir.entry_exists_on_disk(name): + srcnode = dir.Entry(name).disambiguate() + if self.duplicate: + node = self.Entry(name).disambiguate() + node.do_duplicate(srcnode) + return node + else: + return srcnode + return None + + def _srcdir_find_file_key(self, filename): + return filename + + memoizer_counters.append(SCons.Memoize.CountDict('srcdir_find_file', _srcdir_find_file_key)) + + def srcdir_find_file(self, filename): + try: + memo_dict = self._memo['srcdir_find_file'] + except KeyError: + memo_dict = {} + self._memo['srcdir_find_file'] = memo_dict + else: + try: + return memo_dict[filename] + except KeyError: + pass + + def func(node): + if (isinstance(node, File) or isinstance(node, Entry)) and \ + (node.is_derived() or node.exists()): + return node + return None + + norm_name = _my_normcase(filename) + + for rdir in self.get_all_rdirs(): + try: node = rdir.entries[norm_name] + except KeyError: node = rdir.file_on_disk(filename) + else: node = func(node) + if node: + result = (node, self) + memo_dict[filename] = result + return result + + for srcdir in self.srcdir_list(): + for rdir in srcdir.get_all_rdirs(): + try: node = rdir.entries[norm_name] + except KeyError: node = rdir.file_on_disk(filename) + else: node = func(node) + if node: + result = (File(filename, self, self.fs), srcdir) + memo_dict[filename] = result + return result + + result = (None, None) + memo_dict[filename] = result + return result + + def dir_on_disk(self, name): + if self.entry_exists_on_disk(name): + try: return self.Dir(name) + except TypeError: pass + node = self.srcdir_duplicate(name) + if isinstance(node, File): + return None + return node + + def file_on_disk(self, name): + if self.entry_exists_on_disk(name) or \ + diskcheck_rcs(self, name) or \ + diskcheck_sccs(self, name): + try: return self.File(name) + except TypeError: pass + node = self.srcdir_duplicate(name) + if isinstance(node, Dir): + return None + return node + + def walk(self, func, arg): + """ + Walk this directory tree by calling the specified function + for each directory in the tree. + + This behaves like the os.path.walk() function, but for in-memory + Node.FS.Dir objects. The function takes the same arguments as + the functions passed to os.path.walk(): + + func(arg, dirname, fnames) + + Except that "dirname" will actually be the directory *Node*, + not the string. The '.' and '..' entries are excluded from + fnames. The fnames list may be modified in-place to filter the + subdirectories visited or otherwise impose a specific order. + The "arg" argument is always passed to func() and may be used + in any way (or ignored, passing None is common). + """ + entries = self.entries + names = entries.keys() + names.remove('.') + names.remove('..') + func(arg, self, names) + select_dirs = lambda n, e=entries: isinstance(e[n], Dir) + for dirname in filter(select_dirs, names): + entries[dirname].walk(func, arg) + + def glob(self, pathname, ondisk=True, source=False, strings=False): + """ + Returns a list of Nodes (or strings) matching a specified + pathname pattern. + + Pathname patterns follow UNIX shell semantics: * matches + any-length strings of any characters, ? matches any character, + and [] can enclose lists or ranges of characters. Matches do + not span directory separators. + + The matches take into account Repositories, returning local + Nodes if a corresponding entry exists in a Repository (either + an in-memory Node or something on disk). + + By defafult, the glob() function matches entries that exist + on-disk, in addition to in-memory Nodes. Setting the "ondisk" + argument to False (or some other non-true value) causes the glob() + function to only match in-memory Nodes. The default behavior is + to return both the on-disk and in-memory Nodes. + + The "source" argument, when true, specifies that corresponding + source Nodes must be returned if you're globbing in a build + directory (initialized with VariantDir()). The default behavior + is to return Nodes local to the VariantDir(). + + The "strings" argument, when true, returns the matches as strings, + not Nodes. The strings are path names relative to this directory. + + The underlying algorithm is adapted from the glob.glob() function + in the Python library (but heavily modified), and uses fnmatch() + under the covers. + """ + dirname, basename = os.path.split(pathname) + if not dirname: + result = self._glob1(basename, ondisk, source, strings) + result.sort(lambda a, b: cmp(str(a), str(b))) + return result + if has_glob_magic(dirname): + list = self.glob(dirname, ondisk, source, strings=False) + else: + list = [self.Dir(dirname, create=True)] + result = [] + for dir in list: + r = dir._glob1(basename, ondisk, source, strings) + if strings: + r = map(lambda x, d=str(dir): os.path.join(d, x), r) + result.extend(r) + result.sort(lambda a, b: cmp(str(a), str(b))) + return result + + def _glob1(self, pattern, ondisk=True, source=False, strings=False): + """ + Globs for and returns a list of entry names matching a single + pattern in this directory. + + This searches any repositories and source directories for + corresponding entries and returns a Node (or string) relative + to the current directory if an entry is found anywhere. + + TODO: handle pattern with no wildcard + """ + search_dir_list = self.get_all_rdirs() + for srcdir in self.srcdir_list(): + search_dir_list.extend(srcdir.get_all_rdirs()) + + selfEntry = self.Entry + names = [] + for dir in search_dir_list: + # We use the .name attribute from the Node because the keys of + # the dir.entries dictionary are normalized (that is, all upper + # case) on case-insensitive systems like Windows. + #node_names = [ v.name for k, v in dir.entries.items() if k not in ('.', '..') ] + entry_names = filter(lambda n: n not in ('.', '..'), dir.entries.keys()) + node_names = map(lambda n, e=dir.entries: e[n].name, entry_names) + names.extend(node_names) + if not strings: + # Make sure the working directory (self) actually has + # entries for all Nodes in repositories or variant dirs. + for name in node_names: selfEntry(name) + if ondisk: + try: + disk_names = os.listdir(dir.abspath) + except os.error: + continue + names.extend(disk_names) + if not strings: + # We're going to return corresponding Nodes in + # the local directory, so we need to make sure + # those Nodes exist. We only want to create + # Nodes for the entries that will match the + # specified pattern, though, which means we + # need to filter the list here, even though + # the overall list will also be filtered later, + # after we exit this loop. + if pattern[0] != '.': + #disk_names = [ d for d in disk_names if d[0] != '.' ] + disk_names = filter(lambda x: x[0] != '.', disk_names) + disk_names = fnmatch.filter(disk_names, pattern) + dirEntry = dir.Entry + for name in disk_names: + # Add './' before disk filename so that '#' at + # beginning of filename isn't interpreted. + name = './' + name + node = dirEntry(name).disambiguate() + n = selfEntry(name) + if n.__class__ != node.__class__: + n.__class__ = node.__class__ + n._morph() + + names = set(names) + if pattern[0] != '.': + #names = [ n for n in names if n[0] != '.' ] + names = filter(lambda x: x[0] != '.', names) + names = fnmatch.filter(names, pattern) + + if strings: + return names + + #return [ self.entries[_my_normcase(n)] for n in names ] + return map(lambda n, e=self.entries: e[_my_normcase(n)], names) + +class RootDir(Dir): + """A class for the root directory of a file system. + + This is the same as a Dir class, except that the path separator + ('/' or '\\') is actually part of the name, so we don't need to + add a separator when creating the path names of entries within + this directory. + """ + def __init__(self, name, fs): + if __debug__: logInstanceCreation(self, 'Node.FS.RootDir') + # We're going to be our own parent directory (".." entry and .dir + # attribute) so we have to set up some values so Base.__init__() + # won't gag won't it calls some of our methods. + self.abspath = '' + self.labspath = '' + self.path = '' + self.tpath = '' + self.path_elements = [] + self.duplicate = 0 + self.root = self + Base.__init__(self, name, self, fs) + + # Now set our paths to what we really want them to be: the + # initial drive letter (the name) plus the directory separator, + # except for the "lookup abspath," which does not have the + # drive letter. + self.abspath = name + os.sep + self.labspath = '' + self.path = name + os.sep + self.tpath = name + os.sep + self._morph() + + self._lookupDict = {} + + # The // and os.sep + os.sep entries are necessary because + # os.path.normpath() seems to preserve double slashes at the + # beginning of a path (presumably for UNC path names), but + # collapses triple slashes to a single slash. + self._lookupDict[''] = self + self._lookupDict['/'] = self + self._lookupDict['//'] = self + self._lookupDict[os.sep] = self + self._lookupDict[os.sep + os.sep] = self + + def must_be_same(self, klass): + if klass is Dir: + return + Base.must_be_same(self, klass) + + def _lookup_abs(self, p, klass, create=1): + """ + Fast (?) lookup of a *normalized* absolute path. + + This method is intended for use by internal lookups with + already-normalized path data. For general-purpose lookups, + use the FS.Entry(), FS.Dir() or FS.File() methods. + + The caller is responsible for making sure we're passed a + normalized absolute path; we merely let Python's dictionary look + up and return the One True Node.FS object for the path. + + If no Node for the specified "p" doesn't already exist, and + "create" is specified, the Node may be created after recursive + invocation to find or create the parent directory or directories. + """ + k = _my_normcase(p) + try: + result = self._lookupDict[k] + except KeyError: + if not create: + msg = "No such file or directory: '%s' in '%s' (and create is False)" % (p, str(self)) + raise SCons.Errors.UserError, msg + # There is no Node for this path name, and we're allowed + # to create it. + dir_name, file_name = os.path.split(p) + dir_node = self._lookup_abs(dir_name, Dir) + result = klass(file_name, dir_node, self.fs) + + # Double-check on disk (as configured) that the Node we + # created matches whatever is out there in the real world. + result.diskcheck_match() + + self._lookupDict[k] = result + dir_node.entries[_my_normcase(file_name)] = result + dir_node.implicit = None + else: + # There is already a Node for this path name. Allow it to + # complain if we were looking for an inappropriate type. + result.must_be_same(klass) + return result + + def __str__(self): + return self.abspath + + def entry_abspath(self, name): + return self.abspath + name + + def entry_labspath(self, name): + return '/' + name + + def entry_path(self, name): + return self.path + name + + def entry_tpath(self, name): + return self.tpath + name + + def is_under(self, dir): + if self is dir: + return 1 + else: + return 0 + + def up(self): + return None + + def get_dir(self): + return None + + def src_builder(self): + return _null + +class FileNodeInfo(SCons.Node.NodeInfoBase): + current_version_id = 1 + + field_list = ['csig', 'timestamp', 'size'] + + # This should get reset by the FS initialization. + fs = None + + def str_to_node(self, s): + top = self.fs.Top + root = top.root + if do_splitdrive: + drive, s = os.path.splitdrive(s) + if drive: + root = self.fs.get_root(drive) + if not os.path.isabs(s): + s = top.labspath + '/' + s + return root._lookup_abs(s, Entry) + +class FileBuildInfo(SCons.Node.BuildInfoBase): + current_version_id = 1 + + def convert_to_sconsign(self): + """ + Converts this FileBuildInfo object for writing to a .sconsign file + + This replaces each Node in our various dependency lists with its + usual string representation: relative to the top-level SConstruct + directory, or an absolute path if it's outside. + """ + if os.sep == '/': + node_to_str = str + else: + def node_to_str(n): + try: + s = n.path + except AttributeError: + s = str(n) + else: + s = string.replace(s, os.sep, '/') + return s + for attr in ['bsources', 'bdepends', 'bimplicit']: + try: + val = getattr(self, attr) + except AttributeError: + pass + else: + setattr(self, attr, map(node_to_str, val)) + def convert_from_sconsign(self, dir, name): + """ + Converts a newly-read FileBuildInfo object for in-SCons use + + For normal up-to-date checking, we don't have any conversion to + perform--but we're leaving this method here to make that clear. + """ + pass + def prepare_dependencies(self): + """ + Prepares a FileBuildInfo object for explaining what changed + + The bsources, bdepends and bimplicit lists have all been + stored on disk as paths relative to the top-level SConstruct + directory. Convert the strings to actual Nodes (for use by the + --debug=explain code and --implicit-cache). + """ + attrs = [ + ('bsources', 'bsourcesigs'), + ('bdepends', 'bdependsigs'), + ('bimplicit', 'bimplicitsigs'), + ] + for (nattr, sattr) in attrs: + try: + strings = getattr(self, nattr) + nodeinfos = getattr(self, sattr) + except AttributeError: + continue + nodes = [] + for s, ni in izip(strings, nodeinfos): + if not isinstance(s, SCons.Node.Node): + s = ni.str_to_node(s) + nodes.append(s) + setattr(self, nattr, nodes) + def format(self, names=0): + result = [] + bkids = self.bsources + self.bdepends + self.bimplicit + bkidsigs = self.bsourcesigs + self.bdependsigs + self.bimplicitsigs + for bkid, bkidsig in izip(bkids, bkidsigs): + result.append(str(bkid) + ': ' + + string.join(bkidsig.format(names=names), ' ')) + result.append('%s [%s]' % (self.bactsig, self.bact)) + return string.join(result, '\n') + +class File(Base): + """A class for files in a file system. + """ + + memoizer_counters = [] + + NodeInfo = FileNodeInfo + BuildInfo = FileBuildInfo + + md5_chunksize = 64 + + def diskcheck_match(self): + diskcheck_match(self, self.isdir, + "Directory %s found where file expected.") + + def __init__(self, name, directory, fs): + if __debug__: logInstanceCreation(self, 'Node.FS.File') + Base.__init__(self, name, directory, fs) + self._morph() + + def Entry(self, name): + """Create an entry node named 'name' relative to + the directory of this file.""" + return self.dir.Entry(name) + + def Dir(self, name, create=True): + """Create a directory node named 'name' relative to + the directory of this file.""" + return self.dir.Dir(name, create=create) + + def Dirs(self, pathlist): + """Create a list of directories relative to the SConscript + directory of this file.""" + # TODO(1.5) + # return [self.Dir(p) for p in pathlist] + return map(lambda p, s=self: s.Dir(p), pathlist) + + def File(self, name): + """Create a file node named 'name' relative to + the directory of this file.""" + return self.dir.File(name) + + #def generate_build_dict(self): + # """Return an appropriate dictionary of values for building + # this File.""" + # return {'Dir' : self.Dir, + # 'File' : self.File, + # 'RDirs' : self.RDirs} + + def _morph(self): + """Turn a file system node into a File object.""" + self.scanner_paths = {} + if not hasattr(self, '_local'): + self._local = 0 + + # If there was already a Builder set on this entry, then + # we need to make sure we call the target-decider function, + # not the source-decider. Reaching in and doing this by hand + # is a little bogus. We'd prefer to handle this by adding + # an Entry.builder_set() method that disambiguates like the + # other methods, but that starts running into problems with the + # fragile way we initialize Dir Nodes with their Mkdir builders, + # yet still allow them to be overridden by the user. Since it's + # not clear right now how to fix that, stick with what works + # until it becomes clear... + if self.has_builder(): + self.changed_since_last_build = self.decide_target + + def scanner_key(self): + return self.get_suffix() + + def get_contents(self): + if not self.rexists(): + return '' + fname = self.rfile().abspath + try: + contents = open(fname, "rb").read() + except EnvironmentError, e: + if not e.filename: + e.filename = fname + raise + return contents + + try: + import codecs + except ImportError: + get_text_contents = get_contents + else: + # This attempts to figure out what the encoding of the text is + # based upon the BOM bytes, and then decodes the contents so that + # it's a valid python string. + def get_text_contents(self): + contents = self.get_contents() + # The behavior of various decode() methods and functions + # w.r.t. the initial BOM bytes is different for different + # encodings and/or Python versions. ('utf-8' does not strip + # them, but has a 'utf-8-sig' which does; 'utf-16' seems to + # strip them; etc.) Just side step all the complication by + # explicitly stripping the BOM before we decode(). + if contents.startswith(codecs.BOM_UTF8): + contents = contents[len(codecs.BOM_UTF8):] + # TODO(2.2): Remove when 2.3 becomes floor. + #contents = contents.decode('utf-8') + contents = my_decode(contents, 'utf-8') + elif contents.startswith(codecs.BOM_UTF16_LE): + contents = contents[len(codecs.BOM_UTF16_LE):] + # TODO(2.2): Remove when 2.3 becomes floor. + #contents = contents.decode('utf-16-le') + contents = my_decode(contents, 'utf-16-le') + elif contents.startswith(codecs.BOM_UTF16_BE): + contents = contents[len(codecs.BOM_UTF16_BE):] + # TODO(2.2): Remove when 2.3 becomes floor. + #contents = contents.decode('utf-16-be') + contents = my_decode(contents, 'utf-16-be') + return contents + + def get_content_hash(self): + """ + Compute and return the MD5 hash for this file. + """ + if not self.rexists(): + return SCons.Util.MD5signature('') + fname = self.rfile().abspath + try: + cs = SCons.Util.MD5filesignature(fname, + chunksize=SCons.Node.FS.File.md5_chunksize*1024) + except EnvironmentError, e: + if not e.filename: + e.filename = fname + raise + return cs + + + memoizer_counters.append(SCons.Memoize.CountValue('get_size')) + + def get_size(self): + try: + return self._memo['get_size'] + except KeyError: + pass + + if self.rexists(): + size = self.rfile().getsize() + else: + size = 0 + + self._memo['get_size'] = size + + return size + + memoizer_counters.append(SCons.Memoize.CountValue('get_timestamp')) + + def get_timestamp(self): + try: + return self._memo['get_timestamp'] + except KeyError: + pass + + if self.rexists(): + timestamp = self.rfile().getmtime() + else: + timestamp = 0 + + self._memo['get_timestamp'] = timestamp + + return timestamp + + def store_info(self): + # Merge our build information into the already-stored entry. + # This accomodates "chained builds" where a file that's a target + # in one build (SConstruct file) is a source in a different build. + # See test/chained-build.py for the use case. + if do_store_info: + self.dir.sconsign().store_info(self.name, self) + + convert_copy_attrs = [ + 'bsources', + 'bimplicit', + 'bdepends', + 'bact', + 'bactsig', + 'ninfo', + ] + + + convert_sig_attrs = [ + 'bsourcesigs', + 'bimplicitsigs', + 'bdependsigs', + ] + + def convert_old_entry(self, old_entry): + # Convert a .sconsign entry from before the Big Signature + # Refactoring, doing what we can to convert its information + # to the new .sconsign entry format. + # + # The old format looked essentially like this: + # + # BuildInfo + # .ninfo (NodeInfo) + # .bsig + # .csig + # .timestamp + # .size + # .bsources + # .bsourcesigs ("signature" list) + # .bdepends + # .bdependsigs ("signature" list) + # .bimplicit + # .bimplicitsigs ("signature" list) + # .bact + # .bactsig + # + # The new format looks like this: + # + # .ninfo (NodeInfo) + # .bsig + # .csig + # .timestamp + # .size + # .binfo (BuildInfo) + # .bsources + # .bsourcesigs (NodeInfo list) + # .bsig + # .csig + # .timestamp + # .size + # .bdepends + # .bdependsigs (NodeInfo list) + # .bsig + # .csig + # .timestamp + # .size + # .bimplicit + # .bimplicitsigs (NodeInfo list) + # .bsig + # .csig + # .timestamp + # .size + # .bact + # .bactsig + # + # The basic idea of the new structure is that a NodeInfo always + # holds all available information about the state of a given Node + # at a certain point in time. The various .b*sigs lists can just + # be a list of pointers to the .ninfo attributes of the different + # dependent nodes, without any copying of information until it's + # time to pickle it for writing out to a .sconsign file. + # + # The complicating issue is that the *old* format only stored one + # "signature" per dependency, based on however the *last* build + # was configured. We don't know from just looking at it whether + # it was a build signature, a content signature, or a timestamp + # "signature". Since we no longer use build signatures, the + # best we can do is look at the length and if it's thirty two, + # assume that it was (or might have been) a content signature. + # If it was actually a build signature, then it will cause a + # rebuild anyway when it doesn't match the new content signature, + # but that's probably the best we can do. + import SCons.SConsign + new_entry = SCons.SConsign.SConsignEntry() + new_entry.binfo = self.new_binfo() + binfo = new_entry.binfo + for attr in self.convert_copy_attrs: + try: + value = getattr(old_entry, attr) + except AttributeError: + continue + setattr(binfo, attr, value) + delattr(old_entry, attr) + for attr in self.convert_sig_attrs: + try: + sig_list = getattr(old_entry, attr) + except AttributeError: + continue + value = [] + for sig in sig_list: + ninfo = self.new_ninfo() + if len(sig) == 32: + ninfo.csig = sig + else: + ninfo.timestamp = sig + value.append(ninfo) + setattr(binfo, attr, value) + delattr(old_entry, attr) + return new_entry + + memoizer_counters.append(SCons.Memoize.CountValue('get_stored_info')) + + def get_stored_info(self): + try: + return self._memo['get_stored_info'] + except KeyError: + pass + + try: + sconsign_entry = self.dir.sconsign().get_entry(self.name) + except (KeyError, EnvironmentError): + import SCons.SConsign + sconsign_entry = SCons.SConsign.SConsignEntry() + sconsign_entry.binfo = self.new_binfo() + sconsign_entry.ninfo = self.new_ninfo() + else: + if isinstance(sconsign_entry, FileBuildInfo): + # This is a .sconsign file from before the Big Signature + # Refactoring; convert it as best we can. + sconsign_entry = self.convert_old_entry(sconsign_entry) + try: + delattr(sconsign_entry.ninfo, 'bsig') + except AttributeError: + pass + + self._memo['get_stored_info'] = sconsign_entry + + return sconsign_entry + + def get_stored_implicit(self): + binfo = self.get_stored_info().binfo + binfo.prepare_dependencies() + try: return binfo.bimplicit + except AttributeError: return None + + def rel_path(self, other): + return self.dir.rel_path(other) + + def _get_found_includes_key(self, env, scanner, path): + return (id(env), id(scanner), path) + + memoizer_counters.append(SCons.Memoize.CountDict('get_found_includes', _get_found_includes_key)) + + def get_found_includes(self, env, scanner, path): + """Return the included implicit dependencies in this file. + Cache results so we only scan the file once per path + regardless of how many times this information is requested. + """ + memo_key = (id(env), id(scanner), path) + try: + memo_dict = self._memo['get_found_includes'] + except KeyError: + memo_dict = {} + self._memo['get_found_includes'] = memo_dict + else: + try: + return memo_dict[memo_key] + except KeyError: + pass + + if scanner: + # result = [n.disambiguate() for n in scanner(self, env, path)] + result = scanner(self, env, path) + result = map(lambda N: N.disambiguate(), result) + else: + result = [] + + memo_dict[memo_key] = result + + return result + + def _createDir(self): + # ensure that the directories for this node are + # created. + self.dir._create() + + def push_to_cache(self): + """Try to push the node into a cache + """ + # This should get called before the Nodes' .built() method is + # called, which would clear the build signature if the file has + # a source scanner. + # + # We have to clear the local memoized values *before* we push + # the node to cache so that the memoization of the self.exists() + # return value doesn't interfere. + if self.nocache: + return + self.clear_memoized_values() + if self.exists(): + self.get_build_env().get_CacheDir().push(self) + + def retrieve_from_cache(self): + """Try to retrieve the node's content from a cache + + This method is called from multiple threads in a parallel build, + so only do thread safe stuff here. Do thread unsafe stuff in + built(). + + Returns true iff the node was successfully retrieved. + """ + if self.nocache: + return None + if not self.is_derived(): + return None + return self.get_build_env().get_CacheDir().retrieve(self) + + def visited(self): + if self.exists(): + self.get_build_env().get_CacheDir().push_if_forced(self) + + ninfo = self.get_ninfo() + + csig = self.get_max_drift_csig() + if csig: + ninfo.csig = csig + + ninfo.timestamp = self.get_timestamp() + ninfo.size = self.get_size() + + if not self.has_builder(): + # This is a source file, but it might have been a target file + # in another build that included more of the DAG. Copy + # any build information that's stored in the .sconsign file + # into our binfo object so it doesn't get lost. + old = self.get_stored_info() + self.get_binfo().__dict__.update(old.binfo.__dict__) + + self.store_info() + + def find_src_builder(self): + if self.rexists(): + return None + scb = self.dir.src_builder() + if scb is _null: + if diskcheck_sccs(self.dir, self.name): + scb = get_DefaultSCCSBuilder() + elif diskcheck_rcs(self.dir, self.name): + scb = get_DefaultRCSBuilder() + else: + scb = None + if scb is not None: + try: + b = self.builder + except AttributeError: + b = None + if b is None: + self.builder_set(scb) + return scb + + def has_src_builder(self): + """Return whether this Node has a source builder or not. + + If this Node doesn't have an explicit source code builder, this + is where we figure out, on the fly, if there's a transparent + source code builder for it. + + Note that if we found a source builder, we also set the + self.builder attribute, so that all of the methods that actually + *build* this file don't have to do anything different. + """ + try: + scb = self.sbuilder + except AttributeError: + scb = self.sbuilder = self.find_src_builder() + return scb is not None + + def alter_targets(self): + """Return any corresponding targets in a variant directory. + """ + if self.is_derived(): + return [], None + return self.fs.variant_dir_target_climb(self, self.dir, [self.name]) + + def _rmv_existing(self): + self.clear_memoized_values() + e = Unlink(self, [], None) + if isinstance(e, SCons.Errors.BuildError): + raise e + + # + # Taskmaster interface subsystem + # + + def make_ready(self): + self.has_src_builder() + self.get_binfo() + + def prepare(self): + """Prepare for this file to be created.""" + SCons.Node.Node.prepare(self) + + if self.get_state() != SCons.Node.up_to_date: + if self.exists(): + if self.is_derived() and not self.precious: + self._rmv_existing() + else: + try: + self._createDir() + except SCons.Errors.StopError, drive: + desc = "No drive `%s' for target `%s'." % (drive, self) + raise SCons.Errors.StopError, desc + + # + # + # + + def remove(self): + """Remove this file.""" + if self.exists() or self.islink(): + self.fs.unlink(self.path) + return 1 + return None + + def do_duplicate(self, src): + self._createDir() + Unlink(self, None, None) + e = Link(self, src, None) + if isinstance(e, SCons.Errors.BuildError): + desc = "Cannot duplicate `%s' in `%s': %s." % (src.path, self.dir.path, e.errstr) + raise SCons.Errors.StopError, desc + self.linked = 1 + # The Link() action may or may not have actually + # created the file, depending on whether the -n + # option was used or not. Delete the _exists and + # _rexists attributes so they can be reevaluated. + self.clear() + + memoizer_counters.append(SCons.Memoize.CountValue('exists')) + + def exists(self): + try: + return self._memo['exists'] + except KeyError: + pass + # Duplicate from source path if we are set up to do this. + if self.duplicate and not self.is_derived() and not self.linked: + src = self.srcnode() + if src is not self: + # At this point, src is meant to be copied in a variant directory. + src = src.rfile() + if src.abspath != self.abspath: + if src.exists(): + self.do_duplicate(src) + # Can't return 1 here because the duplication might + # not actually occur if the -n option is being used. + else: + # The source file does not exist. Make sure no old + # copy remains in the variant directory. + if Base.exists(self) or self.islink(): + self.fs.unlink(self.path) + # Return None explicitly because the Base.exists() call + # above will have cached its value if the file existed. + self._memo['exists'] = None + return None + result = Base.exists(self) + self._memo['exists'] = result + return result + + # + # SIGNATURE SUBSYSTEM + # + + def get_max_drift_csig(self): + """ + Returns the content signature currently stored for this node + if it's been unmodified longer than the max_drift value, or the + max_drift value is 0. Returns None otherwise. + """ + old = self.get_stored_info() + mtime = self.get_timestamp() + + max_drift = self.fs.max_drift + if max_drift > 0: + if (time.time() - mtime) > max_drift: + try: + n = old.ninfo + if n.timestamp and n.csig and n.timestamp == mtime: + return n.csig + except AttributeError: + pass + elif max_drift == 0: + try: + return old.ninfo.csig + except AttributeError: + pass + + return None + + def get_csig(self): + """ + Generate a node's content signature, the digested signature + of its content. + + node - the node + cache - alternate node to use for the signature cache + returns - the content signature + """ + ninfo = self.get_ninfo() + try: + return ninfo.csig + except AttributeError: + pass + + csig = self.get_max_drift_csig() + if csig is None: + + try: + if self.get_size() < SCons.Node.FS.File.md5_chunksize: + contents = self.get_contents() + else: + csig = self.get_content_hash() + except IOError: + # This can happen if there's actually a directory on-disk, + # which can be the case if they've disabled disk checks, + # or if an action with a File target actually happens to + # create a same-named directory by mistake. + csig = '' + else: + if not csig: + csig = SCons.Util.MD5signature(contents) + + ninfo.csig = csig + + return csig + + # + # DECISION SUBSYSTEM + # + + def builder_set(self, builder): + SCons.Node.Node.builder_set(self, builder) + self.changed_since_last_build = self.decide_target + + def changed_content(self, target, prev_ni): + cur_csig = self.get_csig() + try: + return cur_csig != prev_ni.csig + except AttributeError: + return 1 + + def changed_state(self, target, prev_ni): + return self.state != SCons.Node.up_to_date + + def changed_timestamp_then_content(self, target, prev_ni): + if not self.changed_timestamp_match(target, prev_ni): + try: + self.get_ninfo().csig = prev_ni.csig + except AttributeError: + pass + return False + return self.changed_content(target, prev_ni) + + def changed_timestamp_newer(self, target, prev_ni): + try: + return self.get_timestamp() > target.get_timestamp() + except AttributeError: + return 1 + + def changed_timestamp_match(self, target, prev_ni): + try: + return self.get_timestamp() != prev_ni.timestamp + except AttributeError: + return 1 + + def decide_source(self, target, prev_ni): + return target.get_build_env().decide_source(self, target, prev_ni) + + def decide_target(self, target, prev_ni): + return target.get_build_env().decide_target(self, target, prev_ni) + + # Initialize this Node's decider function to decide_source() because + # every file is a source file until it has a Builder attached... + changed_since_last_build = decide_source + + def is_up_to_date(self): + T = 0 + if T: Trace('is_up_to_date(%s):' % self) + if not self.exists(): + if T: Trace(' not self.exists():') + # The file doesn't exist locally... + r = self.rfile() + if r != self: + # ...but there is one in a Repository... + if not self.changed(r): + if T: Trace(' changed(%s):' % r) + # ...and it's even up-to-date... + if self._local: + # ...and they'd like a local copy. + e = LocalCopy(self, r, None) + if isinstance(e, SCons.Errors.BuildError): + raise + self.store_info() + if T: Trace(' 1\n') + return 1 + self.changed() + if T: Trace(' None\n') + return None + else: + r = self.changed() + if T: Trace(' self.exists(): %s\n' % r) + return not r + + memoizer_counters.append(SCons.Memoize.CountValue('rfile')) + + def rfile(self): + try: + return self._memo['rfile'] + except KeyError: + pass + result = self + if not self.exists(): + norm_name = _my_normcase(self.name) + for dir in self.dir.get_all_rdirs(): + try: node = dir.entries[norm_name] + except KeyError: node = dir.file_on_disk(self.name) + if node and node.exists() and \ + (isinstance(node, File) or isinstance(node, Entry) \ + or not node.is_derived()): + result = node + # Copy over our local attributes to the repository + # Node so we identify shared object files in the + # repository and don't assume they're static. + # + # This isn't perfect; the attribute would ideally + # be attached to the object in the repository in + # case it was built statically in the repository + # and we changed it to shared locally, but that's + # rarely the case and would only occur if you + # intentionally used the same suffix for both + # shared and static objects anyway. So this + # should work well in practice. + result.attributes = self.attributes + break + self._memo['rfile'] = result + return result + + def rstr(self): + return str(self.rfile()) + + def get_cachedir_csig(self): + """ + Fetch a Node's content signature for purposes of computing + another Node's cachesig. + + This is a wrapper around the normal get_csig() method that handles + the somewhat obscure case of using CacheDir with the -n option. + Any files that don't exist would normally be "built" by fetching + them from the cache, but the normal get_csig() method will try + to open up the local file, which doesn't exist because the -n + option meant we didn't actually pull the file from cachedir. + But since the file *does* actually exist in the cachedir, we + can use its contents for the csig. + """ + try: + return self.cachedir_csig + except AttributeError: + pass + + cachedir, cachefile = self.get_build_env().get_CacheDir().cachepath(self) + if not self.exists() and cachefile and os.path.exists(cachefile): + self.cachedir_csig = SCons.Util.MD5filesignature(cachefile, \ + SCons.Node.FS.File.md5_chunksize * 1024) + else: + self.cachedir_csig = self.get_csig() + return self.cachedir_csig + + def get_cachedir_bsig(self): + try: + return self.cachesig + except AttributeError: + pass + + # Add the path to the cache signature, because multiple + # targets built by the same action will all have the same + # build signature, and we have to differentiate them somehow. + children = self.children() + executor = self.get_executor() + # sigs = [n.get_cachedir_csig() for n in children] + sigs = map(lambda n: n.get_cachedir_csig(), children) + sigs.append(SCons.Util.MD5signature(executor.get_contents())) + sigs.append(self.path) + result = self.cachesig = SCons.Util.MD5collect(sigs) + return result + + +default_fs = None + +def get_default_fs(): + global default_fs + if not default_fs: + default_fs = FS() + return default_fs + +class FileFinder: + """ + """ + if SCons.Memoize.use_memoizer: + __metaclass__ = SCons.Memoize.Memoized_Metaclass + + memoizer_counters = [] + + def __init__(self): + self._memo = {} + + def filedir_lookup(self, p, fd=None): + """ + A helper method for find_file() that looks up a directory for + a file we're trying to find. This only creates the Dir Node if + it exists on-disk, since if the directory doesn't exist we know + we won't find any files in it... :-) + + It would be more compact to just use this as a nested function + with a default keyword argument (see the commented-out version + below), but that doesn't work unless you have nested scopes, + so we define it here just so this work under Python 1.5.2. + """ + if fd is None: + fd = self.default_filedir + dir, name = os.path.split(fd) + drive, d = os.path.splitdrive(dir) + if not name and d[:1] in ('/', os.sep): + #return p.fs.get_root(drive).dir_on_disk(name) + return p.fs.get_root(drive) + if dir: + p = self.filedir_lookup(p, dir) + if not p: + return None + norm_name = _my_normcase(name) + try: + node = p.entries[norm_name] + except KeyError: + return p.dir_on_disk(name) + if isinstance(node, Dir): + return node + if isinstance(node, Entry): + node.must_be_same(Dir) + return node + return None + + def _find_file_key(self, filename, paths, verbose=None): + return (filename, paths) + + memoizer_counters.append(SCons.Memoize.CountDict('find_file', _find_file_key)) + + def find_file(self, filename, paths, verbose=None): + """ + find_file(str, [Dir()]) -> [nodes] + + filename - a filename to find + paths - a list of directory path *nodes* to search in. Can be + represented as a list, a tuple, or a callable that is + called with no arguments and returns the list or tuple. + + returns - the node created from the found file. + + Find a node corresponding to either a derived file or a file + that exists already. + + Only the first file found is returned, and none is returned + if no file is found. + """ + memo_key = self._find_file_key(filename, paths) + try: + memo_dict = self._memo['find_file'] + except KeyError: + memo_dict = {} + self._memo['find_file'] = memo_dict + else: + try: + return memo_dict[memo_key] + except KeyError: + pass + + if verbose and not callable(verbose): + if not SCons.Util.is_String(verbose): + verbose = "find_file" + verbose = ' %s: ' % verbose + verbose = lambda s, v=verbose: sys.stdout.write(v + s) + + filedir, filename = os.path.split(filename) + if filedir: + # More compact code that we can't use until we drop + # support for Python 1.5.2: + # + #def filedir_lookup(p, fd=filedir): + # """ + # A helper function that looks up a directory for a file + # we're trying to find. This only creates the Dir Node + # if it exists on-disk, since if the directory doesn't + # exist we know we won't find any files in it... :-) + # """ + # dir, name = os.path.split(fd) + # if dir: + # p = filedir_lookup(p, dir) + # if not p: + # return None + # norm_name = _my_normcase(name) + # try: + # node = p.entries[norm_name] + # except KeyError: + # return p.dir_on_disk(name) + # if isinstance(node, Dir): + # return node + # if isinstance(node, Entry): + # node.must_be_same(Dir) + # return node + # if isinstance(node, Dir) or isinstance(node, Entry): + # return node + # return None + #paths = filter(None, map(filedir_lookup, paths)) + + self.default_filedir = filedir + paths = filter(None, map(self.filedir_lookup, paths)) + + result = None + for dir in paths: + if verbose: + verbose("looking for '%s' in '%s' ...\n" % (filename, dir)) + node, d = dir.srcdir_find_file(filename) + if node: + if verbose: + verbose("... FOUND '%s' in '%s'\n" % (filename, d)) + result = node + break + + memo_dict[memo_key] = result + + return result + +find_file = FileFinder().find_file + + +def invalidate_node_memos(targets): + """ + Invalidate the memoized values of all Nodes (files or directories) + that are associated with the given entries. Has been added to + clear the cache of nodes affected by a direct execution of an + action (e.g. Delete/Copy/Chmod). Existing Node caches become + inconsistent if the action is run through Execute(). The argument + `targets` can be a single Node object or filename, or a sequence + of Nodes/filenames. + """ + from traceback import extract_stack + + # First check if the cache really needs to be flushed. Only + # actions run in the SConscript with Execute() seem to be + # affected. XXX The way to check if Execute() is in the stacktrace + # is a very dirty hack and should be replaced by a more sensible + # solution. + for f in extract_stack(): + if f[2] == 'Execute' and f[0][-14:] == 'Environment.py': + break + else: + # Dont have to invalidate, so return + return + + if not SCons.Util.is_List(targets): + targets = [targets] + + for entry in targets: + # If the target is a Node object, clear the cache. If it is a + # filename, look up potentially existing Node object first. + try: + entry.clear_memoized_values() + except AttributeError: + # Not a Node object, try to look up Node by filename. XXX + # This creates Node objects even for those filenames which + # do not correspond to an existing Node object. + node = get_default_fs().Entry(entry) + if node: + node.clear_memoized_values() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Node/Python.py b/engine/SCons/Node/Python.py new file mode 100644 index 0000000..ef66c3c --- /dev/null +++ b/engine/SCons/Node/Python.py @@ -0,0 +1,128 @@ +"""scons.Node.Python + +Python nodes. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Node/Python.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Node + +class ValueNodeInfo(SCons.Node.NodeInfoBase): + current_version_id = 1 + + field_list = ['csig'] + + def str_to_node(self, s): + return Value(s) + +class ValueBuildInfo(SCons.Node.BuildInfoBase): + current_version_id = 1 + +class Value(SCons.Node.Node): + """A class for Python variables, typically passed on the command line + or generated by a script, but not from a file or some other source. + """ + + NodeInfo = ValueNodeInfo + BuildInfo = ValueBuildInfo + + def __init__(self, value, built_value=None): + SCons.Node.Node.__init__(self) + self.value = value + if built_value is not None: + self.built_value = built_value + + def str_for_display(self): + return repr(self.value) + + def __str__(self): + return str(self.value) + + def make_ready(self): + self.get_csig() + + def build(self, **kw): + if not hasattr(self, 'built_value'): + apply (SCons.Node.Node.build, (self,), kw) + + is_up_to_date = SCons.Node.Node.children_are_up_to_date + + def is_under(self, dir): + # Make Value nodes get built regardless of + # what directory scons was run from. Value nodes + # are outside the filesystem: + return 1 + + def write(self, built_value): + """Set the value of the node.""" + self.built_value = built_value + + def read(self): + """Return the value. If necessary, the value is built.""" + self.build() + if not hasattr(self, 'built_value'): + self.built_value = self.value + return self.built_value + + def get_text_contents(self): + """By the assumption that the node.built_value is a + deterministic product of the sources, the contents of a Value + are the concatenation of all the contents of its sources. As + the value need not be built when get_contents() is called, we + cannot use the actual node.built_value.""" + ###TODO: something reasonable about universal newlines + contents = str(self.value) + for kid in self.children(None): + contents = contents + kid.get_contents() + return contents + + get_contents = get_text_contents ###TODO should return 'bytes' value + + def changed_since_last_build(self, target, prev_ni): + cur_csig = self.get_csig() + try: + return cur_csig != prev_ni.csig + except AttributeError: + return 1 + + def get_csig(self, calc=None): + """Because we're a Python value node and don't have a real + timestamp, we get to ignore the calculator and just use the + value contents.""" + try: + return self.ninfo.csig + except AttributeError: + pass + contents = self.get_contents() + self.get_ninfo().csig = contents + return contents + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Node/__init__.py b/engine/SCons/Node/__init__.py new file mode 100644 index 0000000..6acc751 --- /dev/null +++ b/engine/SCons/Node/__init__.py @@ -0,0 +1,1341 @@ +"""SCons.Node + +The Node package for the SCons software construction utility. + +This is, in many ways, the heart of SCons. + +A Node is where we encapsulate all of the dependency information about +any thing that SCons can build, or about any thing which SCons can use +to build some other thing. The canonical "thing," of course, is a file, +but a Node can also represent something remote (like a web page) or +something completely abstract (like an Alias). + +Each specific type of "thing" is specifically represented by a subclass +of the Node base class: Node.FS.File for files, Node.Alias for aliases, +etc. Dependency information is kept here in the base class, and +information specific to files/aliases/etc. is in the subclass. The +goal, if we've done this correctly, is that any type of "thing" should +be able to depend on any other type of "thing." + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Node/__init__.py 4577 2009/12/27 19:43:56 scons" + +import copy +from itertools import chain, izip +import string +import UserList + +from SCons.Debug import logInstanceCreation +import SCons.Executor +import SCons.Memoize +import SCons.Util + +from SCons.Debug import Trace + +def classname(obj): + return string.split(str(obj.__class__), '.')[-1] + +# Node states +# +# These are in "priority" order, so that the maximum value for any +# child/dependency of a node represents the state of that node if +# it has no builder of its own. The canonical example is a file +# system directory, which is only up to date if all of its children +# were up to date. +no_state = 0 +pending = 1 +executing = 2 +up_to_date = 3 +executed = 4 +failed = 5 + +StateString = { + 0 : "no_state", + 1 : "pending", + 2 : "executing", + 3 : "up_to_date", + 4 : "executed", + 5 : "failed", +} + +# controls whether implicit dependencies are cached: +implicit_cache = 0 + +# controls whether implicit dep changes are ignored: +implicit_deps_unchanged = 0 + +# controls whether the cached implicit deps are ignored: +implicit_deps_changed = 0 + +# A variable that can be set to an interface-specific function be called +# to annotate a Node with information about its creation. +def do_nothing(node): pass + +Annotate = do_nothing + +# Classes for signature info for Nodes. + +class NodeInfoBase: + """ + The generic base class for signature information for a Node. + + Node subclasses should subclass NodeInfoBase to provide their own + logic for dealing with their own Node-specific signature information. + """ + current_version_id = 1 + def __init__(self, node): + # Create an object attribute from the class attribute so it ends up + # in the pickled data in the .sconsign file. + self._version_id = self.current_version_id + def update(self, node): + try: + field_list = self.field_list + except AttributeError: + return + for f in field_list: + try: + delattr(self, f) + except AttributeError: + pass + try: + func = getattr(node, 'get_' + f) + except AttributeError: + pass + else: + setattr(self, f, func()) + def convert(self, node, val): + pass + def merge(self, other): + self.__dict__.update(other.__dict__) + def format(self, field_list=None, names=0): + if field_list is None: + try: + field_list = self.field_list + except AttributeError: + field_list = self.__dict__.keys() + field_list.sort() + fields = [] + for field in field_list: + try: + f = getattr(self, field) + except AttributeError: + f = None + f = str(f) + if names: + f = field + ': ' + f + fields.append(f) + return fields + +class BuildInfoBase: + """ + The generic base class for build information for a Node. + + This is what gets stored in a .sconsign file for each target file. + It contains a NodeInfo instance for this node (signature information + that's specific to the type of Node) and direct attributes for the + generic build stuff we have to track: sources, explicit dependencies, + implicit dependencies, and action information. + """ + current_version_id = 1 + def __init__(self, node): + # Create an object attribute from the class attribute so it ends up + # in the pickled data in the .sconsign file. + self._version_id = self.current_version_id + self.bsourcesigs = [] + self.bdependsigs = [] + self.bimplicitsigs = [] + self.bactsig = None + def merge(self, other): + self.__dict__.update(other.__dict__) + +class Node: + """The base Node class, for entities that we know how to + build, or use to build other Nodes. + """ + + if SCons.Memoize.use_memoizer: + __metaclass__ = SCons.Memoize.Memoized_Metaclass + + memoizer_counters = [] + + class Attrs: + pass + + def __init__(self): + if __debug__: logInstanceCreation(self, 'Node.Node') + # Note that we no longer explicitly initialize a self.builder + # attribute to None here. That's because the self.builder + # attribute may be created on-the-fly later by a subclass (the + # canonical example being a builder to fetch a file from a + # source code system like CVS or Subversion). + + # Each list of children that we maintain is accompanied by a + # dictionary used to look up quickly whether a node is already + # present in the list. Empirical tests showed that it was + # fastest to maintain them as side-by-side Node attributes in + # this way, instead of wrapping up each list+dictionary pair in + # a class. (Of course, we could always still do that in the + # future if we had a good reason to...). + self.sources = [] # source files used to build node + self.sources_set = set() + self._specific_sources = False + self.depends = [] # explicit dependencies (from Depends) + self.depends_set = set() + self.ignore = [] # dependencies to ignore + self.ignore_set = set() + self.prerequisites = SCons.Util.UniqueList() + self.implicit = None # implicit (scanned) dependencies (None means not scanned yet) + self.waiting_parents = set() + self.waiting_s_e = set() + self.ref_count = 0 + self.wkids = None # Kids yet to walk, when it's an array + + self.env = None + self.state = no_state + self.precious = None + self.noclean = 0 + self.nocache = 0 + self.always_build = None + self.includes = None + self.attributes = self.Attrs() # Generic place to stick information about the Node. + self.side_effect = 0 # true iff this node is a side effect + self.side_effects = [] # the side effects of building this target + self.linked = 0 # is this node linked to the variant directory? + + self.clear_memoized_values() + + # Let the interface in which the build engine is embedded + # annotate this Node with its own info (like a description of + # what line in what file created the node, for example). + Annotate(self) + + def disambiguate(self, must_exist=None): + return self + + def get_suffix(self): + return '' + + memoizer_counters.append(SCons.Memoize.CountValue('get_build_env')) + + def get_build_env(self): + """Fetch the appropriate Environment to build this node. + """ + try: + return self._memo['get_build_env'] + except KeyError: + pass + result = self.get_executor().get_build_env() + self._memo['get_build_env'] = result + return result + + def get_build_scanner_path(self, scanner): + """Fetch the appropriate scanner path for this node.""" + return self.get_executor().get_build_scanner_path(scanner) + + def set_executor(self, executor): + """Set the action executor for this node.""" + self.executor = executor + + def get_executor(self, create=1): + """Fetch the action executor for this node. Create one if + there isn't already one, and requested to do so.""" + try: + executor = self.executor + except AttributeError: + if not create: + raise + try: + act = self.builder.action + except AttributeError: + executor = SCons.Executor.Null(targets=[self]) + else: + executor = SCons.Executor.Executor(act, + self.env or self.builder.env, + [self.builder.overrides], + [self], + self.sources) + self.executor = executor + return executor + + def executor_cleanup(self): + """Let the executor clean up any cached information.""" + try: + executor = self.get_executor(create=None) + except AttributeError: + pass + else: + executor.cleanup() + + def reset_executor(self): + "Remove cached executor; forces recompute when needed." + try: + delattr(self, 'executor') + except AttributeError: + pass + + def push_to_cache(self): + """Try to push a node into a cache + """ + pass + + def retrieve_from_cache(self): + """Try to retrieve the node's content from a cache + + This method is called from multiple threads in a parallel build, + so only do thread safe stuff here. Do thread unsafe stuff in + built(). + + Returns true iff the node was successfully retrieved. + """ + return 0 + + # + # Taskmaster interface subsystem + # + + def make_ready(self): + """Get a Node ready for evaluation. + + This is called before the Taskmaster decides if the Node is + up-to-date or not. Overriding this method allows for a Node + subclass to be disambiguated if necessary, or for an implicit + source builder to be attached. + """ + pass + + def prepare(self): + """Prepare for this Node to be built. + + This is called after the Taskmaster has decided that the Node + is out-of-date and must be rebuilt, but before actually calling + the method to build the Node. + + This default implementation checks that explicit or implicit + dependencies either exist or are derived, and initializes the + BuildInfo structure that will hold the information about how + this node is, uh, built. + + (The existence of source files is checked separately by the + Executor, which aggregates checks for all of the targets built + by a specific action.) + + Overriding this method allows for for a Node subclass to remove + the underlying file from the file system. Note that subclass + methods should call this base class method to get the child + check and the BuildInfo structure. + """ + for d in self.depends: + if d.missing(): + msg = "Explicit dependency `%s' not found, needed by target `%s'." + raise SCons.Errors.StopError, msg % (d, self) + if self.implicit is not None: + for i in self.implicit: + if i.missing(): + msg = "Implicit dependency `%s' not found, needed by target `%s'." + raise SCons.Errors.StopError, msg % (i, self) + self.binfo = self.get_binfo() + + def build(self, **kw): + """Actually build the node. + + This is called by the Taskmaster after it's decided that the + Node is out-of-date and must be rebuilt, and after the prepare() + method has gotten everything, uh, prepared. + + This method is called from multiple threads in a parallel build, + so only do thread safe stuff here. Do thread unsafe stuff + in built(). + + """ + try: + apply(self.get_executor(), (self,), kw) + except SCons.Errors.BuildError, e: + e.node = self + raise + + def built(self): + """Called just after this node is successfully built.""" + + # Clear the implicit dependency caches of any Nodes + # waiting for this Node to be built. + for parent in self.waiting_parents: + parent.implicit = None + + self.clear() + + self.ninfo.update(self) + + def visited(self): + """Called just after this node has been visited (with or + without a build).""" + try: + binfo = self.binfo + except AttributeError: + # Apparently this node doesn't need build info, so + # don't bother calculating or storing it. + pass + else: + self.ninfo.update(self) + self.store_info() + + # + # + # + + def add_to_waiting_s_e(self, node): + self.waiting_s_e.add(node) + + def add_to_waiting_parents(self, node): + """ + Returns the number of nodes added to our waiting parents list: + 1 if we add a unique waiting parent, 0 if not. (Note that the + returned values are intended to be used to increment a reference + count, so don't think you can "clean up" this function by using + True and False instead...) + """ + wp = self.waiting_parents + if node in wp: + return 0 + wp.add(node) + return 1 + + def postprocess(self): + """Clean up anything we don't need to hang onto after we've + been built.""" + self.executor_cleanup() + self.waiting_parents = set() + + def clear(self): + """Completely clear a Node of all its cached state (so that it + can be re-evaluated by interfaces that do continuous integration + builds). + """ + # The del_binfo() call here isn't necessary for normal execution, + # but is for interactive mode, where we might rebuild the same + # target and need to start from scratch. + self.del_binfo() + self.clear_memoized_values() + self.ninfo = self.new_ninfo() + self.executor_cleanup() + try: + delattr(self, '_calculated_sig') + except AttributeError: + pass + self.includes = None + + def clear_memoized_values(self): + self._memo = {} + + def builder_set(self, builder): + self.builder = builder + try: + del self.executor + except AttributeError: + pass + + def has_builder(self): + """Return whether this Node has a builder or not. + + In Boolean tests, this turns out to be a *lot* more efficient + than simply examining the builder attribute directly ("if + node.builder: ..."). When the builder attribute is examined + directly, it ends up calling __getattr__ for both the __len__ + and __nonzero__ attributes on instances of our Builder Proxy + class(es), generating a bazillion extra calls and slowing + things down immensely. + """ + try: + b = self.builder + except AttributeError: + # There was no explicit builder for this Node, so initialize + # the self.builder attribute to None now. + b = self.builder = None + return b is not None + + def set_explicit(self, is_explicit): + self.is_explicit = is_explicit + + def has_explicit_builder(self): + """Return whether this Node has an explicit builder + + This allows an internal Builder created by SCons to be marked + non-explicit, so that it can be overridden by an explicit + builder that the user supplies (the canonical example being + directories).""" + try: + return self.is_explicit + except AttributeError: + self.is_explicit = None + return self.is_explicit + + def get_builder(self, default_builder=None): + """Return the set builder, or a specified default value""" + try: + return self.builder + except AttributeError: + return default_builder + + multiple_side_effect_has_builder = has_builder + + def is_derived(self): + """ + Returns true iff this node is derived (i.e. built). + + This should return true only for nodes whose path should be in + the variant directory when duplicate=0 and should contribute their build + signatures when they are used as source files to other derived files. For + example: source with source builders are not derived in this sense, + and hence should not return true. + """ + return self.has_builder() or self.side_effect + + def alter_targets(self): + """Return a list of alternate targets for this Node. + """ + return [], None + + def get_found_includes(self, env, scanner, path): + """Return the scanned include lines (implicit dependencies) + found in this node. + + The default is no implicit dependencies. We expect this method + to be overridden by any subclass that can be scanned for + implicit dependencies. + """ + return [] + + def get_implicit_deps(self, env, scanner, path): + """Return a list of implicit dependencies for this node. + + This method exists to handle recursive invocation of the scanner + on the implicit dependencies returned by the scanner, if the + scanner's recursive flag says that we should. + """ + if not scanner: + return [] + + # Give the scanner a chance to select a more specific scanner + # for this Node. + #scanner = scanner.select(self) + + nodes = [self] + seen = {} + seen[self] = 1 + deps = [] + while nodes: + n = nodes.pop(0) + d = filter(lambda x, seen=seen: not seen.has_key(x), + n.get_found_includes(env, scanner, path)) + if d: + deps.extend(d) + for n in d: + seen[n] = 1 + nodes.extend(scanner.recurse_nodes(d)) + + return deps + + def get_env_scanner(self, env, kw={}): + return env.get_scanner(self.scanner_key()) + + def get_target_scanner(self): + return self.builder.target_scanner + + def get_source_scanner(self, node): + """Fetch the source scanner for the specified node + + NOTE: "self" is the target being built, "node" is + the source file for which we want to fetch the scanner. + + Implies self.has_builder() is true; again, expect to only be + called from locations where this is already verified. + + This function may be called very often; it attempts to cache + the scanner found to improve performance. + """ + scanner = None + try: + scanner = self.builder.source_scanner + except AttributeError: + pass + if not scanner: + # The builder didn't have an explicit scanner, so go look up + # a scanner from env['SCANNERS'] based on the node's scanner + # key (usually the file extension). + scanner = self.get_env_scanner(self.get_build_env()) + if scanner: + scanner = scanner.select(node) + return scanner + + def add_to_implicit(self, deps): + if not hasattr(self, 'implicit') or self.implicit is None: + self.implicit = [] + self.implicit_set = set() + self._children_reset() + self._add_child(self.implicit, self.implicit_set, deps) + + def scan(self): + """Scan this node's dependents for implicit dependencies.""" + # Don't bother scanning non-derived files, because we don't + # care what their dependencies are. + # Don't scan again, if we already have scanned. + if self.implicit is not None: + return + self.implicit = [] + self.implicit_set = set() + self._children_reset() + if not self.has_builder(): + return + + build_env = self.get_build_env() + executor = self.get_executor() + + # Here's where we implement --implicit-cache. + if implicit_cache and not implicit_deps_changed: + implicit = self.get_stored_implicit() + if implicit is not None: + # We now add the implicit dependencies returned from the + # stored .sconsign entry to have already been converted + # to Nodes for us. (We used to run them through a + # source_factory function here.) + + # Update all of the targets with them. This + # essentially short-circuits an N*M scan of the + # sources for each individual target, which is a hell + # of a lot more efficient. + for tgt in executor.get_all_targets(): + tgt.add_to_implicit(implicit) + + if implicit_deps_unchanged or self.is_up_to_date(): + return + # one of this node's sources has changed, + # so we must recalculate the implicit deps: + self.implicit = [] + self.implicit_set = set() + + # Have the executor scan the sources. + executor.scan_sources(self.builder.source_scanner) + + # If there's a target scanner, have the executor scan the target + # node itself and associated targets that might be built. + scanner = self.get_target_scanner() + if scanner: + executor.scan_targets(scanner) + + def scanner_key(self): + return None + + def select_scanner(self, scanner): + """Selects a scanner for this Node. + + This is a separate method so it can be overridden by Node + subclasses (specifically, Node.FS.Dir) that *must* use their + own Scanner and don't select one the Scanner.Selector that's + configured for the target. + """ + return scanner.select(self) + + def env_set(self, env, safe=0): + if safe and self.env: + return + self.env = env + + # + # SIGNATURE SUBSYSTEM + # + + NodeInfo = NodeInfoBase + BuildInfo = BuildInfoBase + + def new_ninfo(self): + ninfo = self.NodeInfo(self) + return ninfo + + def get_ninfo(self): + try: + return self.ninfo + except AttributeError: + self.ninfo = self.new_ninfo() + return self.ninfo + + def new_binfo(self): + binfo = self.BuildInfo(self) + return binfo + + def get_binfo(self): + """ + Fetch a node's build information. + + node - the node whose sources will be collected + cache - alternate node to use for the signature cache + returns - the build signature + + This no longer handles the recursive descent of the + node's children's signatures. We expect that they're + already built and updated by someone else, if that's + what's wanted. + """ + try: + return self.binfo + except AttributeError: + pass + + binfo = self.new_binfo() + self.binfo = binfo + + executor = self.get_executor() + ignore_set = self.ignore_set + + if self.has_builder(): + binfo.bact = str(executor) + binfo.bactsig = SCons.Util.MD5signature(executor.get_contents()) + + if self._specific_sources: + sources = [] + for s in self.sources: + if s not in ignore_set: + sources.append(s) + else: + sources = executor.get_unignored_sources(self, self.ignore) + seen = set() + bsources = [] + bsourcesigs = [] + for s in sources: + if not s in seen: + seen.add(s) + bsources.append(s) + bsourcesigs.append(s.get_ninfo()) + binfo.bsources = bsources + binfo.bsourcesigs = bsourcesigs + + depends = self.depends + dependsigs = [] + for d in depends: + if d not in ignore_set: + dependsigs.append(d.get_ninfo()) + binfo.bdepends = depends + binfo.bdependsigs = dependsigs + + implicit = self.implicit or [] + implicitsigs = [] + for i in implicit: + if i not in ignore_set: + implicitsigs.append(i.get_ninfo()) + binfo.bimplicit = implicit + binfo.bimplicitsigs = implicitsigs + + return binfo + + def del_binfo(self): + """Delete the build info from this node.""" + try: + delattr(self, 'binfo') + except AttributeError: + pass + + def get_csig(self): + try: + return self.ninfo.csig + except AttributeError: + ninfo = self.get_ninfo() + ninfo.csig = SCons.Util.MD5signature(self.get_contents()) + return self.ninfo.csig + + def get_cachedir_csig(self): + return self.get_csig() + + def store_info(self): + """Make the build signature permanent (that is, store it in the + .sconsign file or equivalent).""" + pass + + def do_not_store_info(self): + pass + + def get_stored_info(self): + return None + + def get_stored_implicit(self): + """Fetch the stored implicit dependencies""" + return None + + # + # + # + + def set_precious(self, precious = 1): + """Set the Node's precious value.""" + self.precious = precious + + def set_noclean(self, noclean = 1): + """Set the Node's noclean value.""" + # Make sure noclean is an integer so the --debug=stree + # output in Util.py can use it as an index. + self.noclean = noclean and 1 or 0 + + def set_nocache(self, nocache = 1): + """Set the Node's nocache value.""" + # Make sure nocache is an integer so the --debug=stree + # output in Util.py can use it as an index. + self.nocache = nocache and 1 or 0 + + def set_always_build(self, always_build = 1): + """Set the Node's always_build value.""" + self.always_build = always_build + + def exists(self): + """Does this node exists?""" + # All node exist by default: + return 1 + + def rexists(self): + """Does this node exist locally or in a repositiory?""" + # There are no repositories by default: + return self.exists() + + def missing(self): + return not self.is_derived() and \ + not self.linked and \ + not self.rexists() + + def remove(self): + """Remove this Node: no-op by default.""" + return None + + def add_dependency(self, depend): + """Adds dependencies.""" + try: + self._add_child(self.depends, self.depends_set, depend) + except TypeError, e: + e = e.args[0] + if SCons.Util.is_List(e): + s = map(str, e) + else: + s = str(e) + raise SCons.Errors.UserError("attempted to add a non-Node dependency to %s:\n\t%s is a %s, not a Node" % (str(self), s, type(e))) + + def add_prerequisite(self, prerequisite): + """Adds prerequisites""" + self.prerequisites.extend(prerequisite) + self._children_reset() + + def add_ignore(self, depend): + """Adds dependencies to ignore.""" + try: + self._add_child(self.ignore, self.ignore_set, depend) + except TypeError, e: + e = e.args[0] + if SCons.Util.is_List(e): + s = map(str, e) + else: + s = str(e) + raise SCons.Errors.UserError("attempted to ignore a non-Node dependency of %s:\n\t%s is a %s, not a Node" % (str(self), s, type(e))) + + def add_source(self, source): + """Adds sources.""" + if self._specific_sources: + return + try: + self._add_child(self.sources, self.sources_set, source) + except TypeError, e: + e = e.args[0] + if SCons.Util.is_List(e): + s = map(str, e) + else: + s = str(e) + raise SCons.Errors.UserError("attempted to add a non-Node as source of %s:\n\t%s is a %s, not a Node" % (str(self), s, type(e))) + + def _add_child(self, collection, set, child): + """Adds 'child' to 'collection', first checking 'set' to see if it's + already present.""" + #if type(child) is not type([]): + # child = [child] + #for c in child: + # if not isinstance(c, Node): + # raise TypeError, c + added = None + for c in child: + if c not in set: + set.add(c) + collection.append(c) + added = 1 + if added: + self._children_reset() + + def set_specific_source(self, source): + self.add_source(source) + self._specific_sources = True + + def add_wkid(self, wkid): + """Add a node to the list of kids waiting to be evaluated""" + if self.wkids is not None: + self.wkids.append(wkid) + + def _children_reset(self): + self.clear_memoized_values() + # We need to let the Executor clear out any calculated + # build info that it's cached so we can re-calculate it. + self.executor_cleanup() + + memoizer_counters.append(SCons.Memoize.CountValue('_children_get')) + + def _children_get(self): + try: + return self._memo['children_get'] + except KeyError: + pass + + # The return list may contain duplicate Nodes, especially in + # source trees where there are a lot of repeated #includes + # of a tangle of .h files. Profiling shows, however, that + # eliminating the duplicates with a brute-force approach that + # preserves the order (that is, something like: + # + # u = [] + # for n in list: + # if n not in u: + # u.append(n)" + # + # takes more cycles than just letting the underlying methods + # hand back cached values if a Node's information is requested + # multiple times. (Other methods of removing duplicates, like + # using dictionary keys, lose the order, and the only ordered + # dictionary patterns I found all ended up using "not in" + # internally anyway...) + if self.ignore_set: + if self.implicit is None: + iter = chain(self.sources,self.depends) + else: + iter = chain(self.sources, self.depends, self.implicit) + + children = [] + for i in iter: + if i not in self.ignore_set: + children.append(i) + else: + if self.implicit is None: + children = self.sources + self.depends + else: + children = self.sources + self.depends + self.implicit + + self._memo['children_get'] = children + return children + + def all_children(self, scan=1): + """Return a list of all the node's direct children.""" + if scan: + self.scan() + + # The return list may contain duplicate Nodes, especially in + # source trees where there are a lot of repeated #includes + # of a tangle of .h files. Profiling shows, however, that + # eliminating the duplicates with a brute-force approach that + # preserves the order (that is, something like: + # + # u = [] + # for n in list: + # if n not in u: + # u.append(n)" + # + # takes more cycles than just letting the underlying methods + # hand back cached values if a Node's information is requested + # multiple times. (Other methods of removing duplicates, like + # using dictionary keys, lose the order, and the only ordered + # dictionary patterns I found all ended up using "not in" + # internally anyway...) + if self.implicit is None: + return self.sources + self.depends + else: + return self.sources + self.depends + self.implicit + + def children(self, scan=1): + """Return a list of the node's direct children, minus those + that are ignored by this node.""" + if scan: + self.scan() + return self._children_get() + + def set_state(self, state): + self.state = state + + def get_state(self): + return self.state + + def state_has_changed(self, target, prev_ni): + return (self.state != SCons.Node.up_to_date) + + def get_env(self): + env = self.env + if not env: + import SCons.Defaults + env = SCons.Defaults.DefaultEnvironment() + return env + + def changed_since_last_build(self, target, prev_ni): + """ + + Must be overridden in a specific subclass to return True if this + Node (a dependency) has changed since the last time it was used + to build the specified target. prev_ni is this Node's state (for + example, its file timestamp, length, maybe content signature) + as of the last time the target was built. + + Note that this method is called through the dependency, not the + target, because a dependency Node must be able to use its own + logic to decide if it changed. For example, File Nodes need to + obey if we're configured to use timestamps, but Python Value Nodes + never use timestamps and always use the content. If this method + were called through the target, then each Node's implementation + of this method would have to have more complicated logic to + handle all the different Node types on which it might depend. + """ + raise NotImplementedError + + def Decider(self, function): + SCons.Util.AddMethod(self, function, 'changed_since_last_build') + + def changed(self, node=None): + """ + Returns if the node is up-to-date with respect to the BuildInfo + stored last time it was built. The default behavior is to compare + it against our own previously stored BuildInfo, but the stored + BuildInfo from another Node (typically one in a Repository) + can be used instead. + + Note that we now *always* check every dependency. We used to + short-circuit the check by returning as soon as we detected + any difference, but we now rely on checking every dependency + to make sure that any necessary Node information (for example, + the content signature of an #included .h file) is updated. + """ + t = 0 + if t: Trace('changed(%s [%s], %s)' % (self, classname(self), node)) + if node is None: + node = self + + result = False + + bi = node.get_stored_info().binfo + then = bi.bsourcesigs + bi.bdependsigs + bi.bimplicitsigs + children = self.children() + + diff = len(children) - len(then) + if diff: + # The old and new dependency lists are different lengths. + # This always indicates that the Node must be rebuilt. + # We also extend the old dependency list with enough None + # entries to equal the new dependency list, for the benefit + # of the loop below that updates node information. + then.extend([None] * diff) + if t: Trace(': old %s new %s' % (len(then), len(children))) + result = True + + for child, prev_ni in izip(children, then): + if child.changed_since_last_build(self, prev_ni): + if t: Trace(': %s changed' % child) + result = True + + contents = self.get_executor().get_contents() + if self.has_builder(): + import SCons.Util + newsig = SCons.Util.MD5signature(contents) + if bi.bactsig != newsig: + if t: Trace(': bactsig %s != newsig %s' % (bi.bactsig, newsig)) + result = True + + if not result: + if t: Trace(': up to date') + + if t: Trace('\n') + + return result + + def is_up_to_date(self): + """Default check for whether the Node is current: unknown Node + subtypes are always out of date, so they will always get built.""" + return None + + def children_are_up_to_date(self): + """Alternate check for whether the Node is current: If all of + our children were up-to-date, then this Node was up-to-date, too. + + The SCons.Node.Alias and SCons.Node.Python.Value subclasses + rebind their current() method to this method.""" + # Allow the children to calculate their signatures. + self.binfo = self.get_binfo() + if self.always_build: + return None + state = 0 + for kid in self.children(None): + s = kid.get_state() + if s and (not state or s > state): + state = s + return (state == 0 or state == SCons.Node.up_to_date) + + def is_literal(self): + """Always pass the string representation of a Node to + the command interpreter literally.""" + return 1 + + def render_include_tree(self): + """ + Return a text representation, suitable for displaying to the + user, of the include tree for the sources of this node. + """ + if self.is_derived() and self.env: + env = self.get_build_env() + for s in self.sources: + scanner = self.get_source_scanner(s) + if scanner: + path = self.get_build_scanner_path(scanner) + else: + path = None + def f(node, env=env, scanner=scanner, path=path): + return node.get_found_includes(env, scanner, path) + return SCons.Util.render_tree(s, f, 1) + else: + return None + + def get_abspath(self): + """ + Return an absolute path to the Node. This will return simply + str(Node) by default, but for Node types that have a concept of + relative path, this might return something different. + """ + return str(self) + + def for_signature(self): + """ + Return a string representation of the Node that will always + be the same for this particular Node, no matter what. This + is by contrast to the __str__() method, which might, for + instance, return a relative path for a file Node. The purpose + of this method is to generate a value to be used in signature + calculation for the command line used to build a target, and + we use this method instead of str() to avoid unnecessary + rebuilds. This method does not need to return something that + would actually work in a command line; it can return any kind of + nonsense, so long as it does not change. + """ + return str(self) + + def get_string(self, for_signature): + """This is a convenience function designed primarily to be + used in command generators (i.e., CommandGeneratorActions or + Environment variables that are callable), which are called + with a for_signature argument that is nonzero if the command + generator is being called to generate a signature for the + command line, which determines if we should rebuild or not. + + Such command generators should use this method in preference + to str(Node) when converting a Node to a string, passing + in the for_signature parameter, such that we will call + Node.for_signature() or str(Node) properly, depending on whether + we are calculating a signature or actually constructing a + command line.""" + if for_signature: + return self.for_signature() + return str(self) + + def get_subst_proxy(self): + """ + This method is expected to return an object that will function + exactly like this Node, except that it implements any additional + special features that we would like to be in effect for + Environment variable substitution. The principle use is that + some Nodes would like to implement a __getattr__() method, + but putting that in the Node type itself has a tendency to kill + performance. We instead put it in a proxy and return it from + this method. It is legal for this method to return self + if no new functionality is needed for Environment substitution. + """ + return self + + def explain(self): + if not self.exists(): + return "building `%s' because it doesn't exist\n" % self + + if self.always_build: + return "rebuilding `%s' because AlwaysBuild() is specified\n" % self + + old = self.get_stored_info() + if old is None: + return None + + old = old.binfo + old.prepare_dependencies() + + try: + old_bkids = old.bsources + old.bdepends + old.bimplicit + old_bkidsigs = old.bsourcesigs + old.bdependsigs + old.bimplicitsigs + except AttributeError: + return "Cannot explain why `%s' is being rebuilt: No previous build information found\n" % self + + new = self.get_binfo() + + new_bkids = new.bsources + new.bdepends + new.bimplicit + new_bkidsigs = new.bsourcesigs + new.bdependsigs + new.bimplicitsigs + + osig = dict(izip(old_bkids, old_bkidsigs)) + nsig = dict(izip(new_bkids, new_bkidsigs)) + + # The sources and dependencies we'll want to report are all stored + # as relative paths to this target's directory, but we want to + # report them relative to the top-level SConstruct directory, + # so we only print them after running them through this lambda + # to turn them into the right relative Node and then return + # its string. + def stringify( s, E=self.dir.Entry ) : + if hasattr( s, 'dir' ) : + return str(E(s)) + return str(s) + + lines = [] + + removed = filter(lambda x, nk=new_bkids: not x in nk, old_bkids) + if removed: + removed = map(stringify, removed) + fmt = "`%s' is no longer a dependency\n" + lines.extend(map(lambda s, fmt=fmt: fmt % s, removed)) + + for k in new_bkids: + if not k in old_bkids: + lines.append("`%s' is a new dependency\n" % stringify(k)) + elif k.changed_since_last_build(self, osig[k]): + lines.append("`%s' changed\n" % stringify(k)) + + if len(lines) == 0 and old_bkids != new_bkids: + lines.append("the dependency order changed:\n" + + "%sold: %s\n" % (' '*15, map(stringify, old_bkids)) + + "%snew: %s\n" % (' '*15, map(stringify, new_bkids))) + + if len(lines) == 0: + def fmt_with_title(title, strlines): + lines = string.split(strlines, '\n') + sep = '\n' + ' '*(15 + len(title)) + return ' '*15 + title + string.join(lines, sep) + '\n' + if old.bactsig != new.bactsig: + if old.bact == new.bact: + lines.append("the contents of the build action changed\n" + + fmt_with_title('action: ', new.bact)) + else: + lines.append("the build action changed:\n" + + fmt_with_title('old: ', old.bact) + + fmt_with_title('new: ', new.bact)) + + if len(lines) == 0: + return "rebuilding `%s' for unknown reasons\n" % self + + preamble = "rebuilding `%s' because" % self + if len(lines) == 1: + return "%s %s" % (preamble, lines[0]) + else: + lines = ["%s:\n" % preamble] + lines + return string.join(lines, ' '*11) + +try: + [].extend(UserList.UserList([])) +except TypeError: + # Python 1.5.2 doesn't allow a list to be extended by list-like + # objects (such as UserList instances), so just punt and use + # real lists. + def NodeList(l): + return l +else: + class NodeList(UserList.UserList): + def __str__(self): + return str(map(str, self.data)) + +def get_children(node, parent): return node.children() +def ignore_cycle(node, stack): pass +def do_nothing(node, parent): pass + +class Walker: + """An iterator for walking a Node tree. + + This is depth-first, children are visited before the parent. + The Walker object can be initialized with any node, and + returns the next node on the descent with each next() call. + 'kids_func' is an optional function that will be called to + get the children of a node instead of calling 'children'. + 'cycle_func' is an optional function that will be called + when a cycle is detected. + + This class does not get caught in node cycles caused, for example, + by C header file include loops. + """ + def __init__(self, node, kids_func=get_children, + cycle_func=ignore_cycle, + eval_func=do_nothing): + self.kids_func = kids_func + self.cycle_func = cycle_func + self.eval_func = eval_func + node.wkids = copy.copy(kids_func(node, None)) + self.stack = [node] + self.history = {} # used to efficiently detect and avoid cycles + self.history[node] = None + + def next(self): + """Return the next node for this walk of the tree. + + This function is intentionally iterative, not recursive, + to sidestep any issues of stack size limitations. + """ + + while self.stack: + if self.stack[-1].wkids: + node = self.stack[-1].wkids.pop(0) + if not self.stack[-1].wkids: + self.stack[-1].wkids = None + if self.history.has_key(node): + self.cycle_func(node, self.stack) + else: + node.wkids = copy.copy(self.kids_func(node, self.stack[-1])) + self.stack.append(node) + self.history[node] = None + else: + node = self.stack.pop() + del self.history[node] + if node: + if self.stack: + parent = self.stack[-1] + else: + parent = None + self.eval_func(node, parent) + return node + return None + + def is_done(self): + return not self.stack + + +arg2nodes_lookups = [] + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Options/BoolOption.py b/engine/SCons/Options/BoolOption.py new file mode 100644 index 0000000..600fc15 --- /dev/null +++ b/engine/SCons/Options/BoolOption.py @@ -0,0 +1,50 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Options/BoolOption.py 4577 2009/12/27 19:43:56 scons" + +__doc__ = """Place-holder for the old SCons.Options module hierarchy + +This is for backwards compatibility. The new equivalent is the Variables/ +class hierarchy. These will have deprecation warnings added (some day), +and will then be removed entirely (some day). +""" + +import SCons.Variables +import SCons.Warnings + +warned = False + +def BoolOption(*args, **kw): + global warned + if not warned: + msg = "The BoolOption() function is deprecated; use the BoolVariable() function instead." + SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg) + warned = True + return apply(SCons.Variables.BoolVariable, args, kw) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Options/EnumOption.py b/engine/SCons/Options/EnumOption.py new file mode 100644 index 0000000..a89ab88 --- /dev/null +++ b/engine/SCons/Options/EnumOption.py @@ -0,0 +1,50 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Options/EnumOption.py 4577 2009/12/27 19:43:56 scons" + +__doc__ = """Place-holder for the old SCons.Options module hierarchy + +This is for backwards compatibility. The new equivalent is the Variables/ +class hierarchy. These will have deprecation warnings added (some day), +and will then be removed entirely (some day). +""" + +import SCons.Variables +import SCons.Warnings + +warned = False + +def EnumOption(*args, **kw): + global warned + if not warned: + msg = "The EnumOption() function is deprecated; use the EnumVariable() function instead." + SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg) + warned = True + return apply(SCons.Variables.EnumVariable, args, kw) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Options/ListOption.py b/engine/SCons/Options/ListOption.py new file mode 100644 index 0000000..049b45b --- /dev/null +++ b/engine/SCons/Options/ListOption.py @@ -0,0 +1,50 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Options/ListOption.py 4577 2009/12/27 19:43:56 scons" + +__doc__ = """Place-holder for the old SCons.Options module hierarchy + +This is for backwards compatibility. The new equivalent is the Variables/ +class hierarchy. These will have deprecation warnings added (some day), +and will then be removed entirely (some day). +""" + +import SCons.Variables +import SCons.Warnings + +warned = False + +def ListOption(*args, **kw): + global warned + if not warned: + msg = "The ListOption() function is deprecated; use the ListVariable() function instead." + SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg) + warned = True + return apply(SCons.Variables.ListVariable, args, kw) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Options/PackageOption.py b/engine/SCons/Options/PackageOption.py new file mode 100644 index 0000000..7464110 --- /dev/null +++ b/engine/SCons/Options/PackageOption.py @@ -0,0 +1,50 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Options/PackageOption.py 4577 2009/12/27 19:43:56 scons" + +__doc__ = """Place-holder for the old SCons.Options module hierarchy + +This is for backwards compatibility. The new equivalent is the Variables/ +class hierarchy. These will have deprecation warnings added (some day), +and will then be removed entirely (some day). +""" + +import SCons.Variables +import SCons.Warnings + +warned = False + +def PackageOption(*args, **kw): + global warned + if not warned: + msg = "The PackageOption() function is deprecated; use the PackageVariable() function instead." + SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg) + warned = True + return apply(SCons.Variables.PackageVariable, args, kw) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Options/PathOption.py b/engine/SCons/Options/PathOption.py new file mode 100644 index 0000000..7c46b92 --- /dev/null +++ b/engine/SCons/Options/PathOption.py @@ -0,0 +1,76 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Options/PathOption.py 4577 2009/12/27 19:43:56 scons" + +__doc__ = """Place-holder for the old SCons.Options module hierarchy + +This is for backwards compatibility. The new equivalent is the Variables/ +class hierarchy. These will have deprecation warnings added (some day), +and will then be removed entirely (some day). +""" + +import SCons.Variables +import SCons.Warnings + +warned = False + +class _PathOptionClass: + def warn(self): + global warned + if not warned: + msg = "The PathOption() function is deprecated; use the PathVariable() function instead." + SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg) + warned = True + + def __call__(self, *args, **kw): + self.warn() + return apply(SCons.Variables.PathVariable, args, kw) + + def PathAccept(self, *args, **kw): + self.warn() + return apply(SCons.Variables.PathVariable.PathAccept, args, kw) + + def PathIsDir(self, *args, **kw): + self.warn() + return apply(SCons.Variables.PathVariable.PathIsDir, args, kw) + + def PathIsDirCreate(self, *args, **kw): + self.warn() + return apply(SCons.Variables.PathVariable.PathIsDirCreate, args, kw) + + def PathIsFile(self, *args, **kw): + self.warn() + return apply(SCons.Variables.PathVariable.PathIsFile, args, kw) + + def PathExists(self, *args, **kw): + self.warn() + return apply(SCons.Variables.PathVariable.PathExists, args, kw) + +PathOption = _PathOptionClass() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Options/__init__.py b/engine/SCons/Options/__init__.py new file mode 100644 index 0000000..afa28e8 --- /dev/null +++ b/engine/SCons/Options/__init__.py @@ -0,0 +1,74 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Options/__init__.py 4577 2009/12/27 19:43:56 scons" + +__doc__ = """Place-holder for the old SCons.Options module hierarchy + +This is for backwards compatibility. The new equivalent is the Variables/ +class hierarchy. These will have deprecation warnings added (some day), +and will then be removed entirely (some day). +""" + +import SCons.Variables +import SCons.Warnings + +from BoolOption import BoolOption # okay +from EnumOption import EnumOption # okay +from ListOption import ListOption # naja +from PackageOption import PackageOption # naja +from PathOption import PathOption # okay + +warned = False + +class Options(SCons.Variables.Variables): + def __init__(self, *args, **kw): + global warned + if not warned: + msg = "The Options class is deprecated; use the Variables class instead." + SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg) + warned = True + apply(SCons.Variables.Variables.__init__, + (self,) + args, + kw) + + def AddOptions(self, *args, **kw): + return apply(SCons.Variables.Variables.AddVariables, + (self,) + args, + kw) + + def UnknownOptions(self, *args, **kw): + return apply(SCons.Variables.Variables.UnknownVariables, + (self,) + args, + kw) + + def FormatOptionHelpText(self, *args, **kw): + return apply(SCons.Variables.Variables.FormatVariableHelpText, + (self,) + args, + kw) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/PathList.py b/engine/SCons/PathList.py new file mode 100644 index 0000000..c88cda4 --- /dev/null +++ b/engine/SCons/PathList.py @@ -0,0 +1,232 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/PathList.py 4577 2009/12/27 19:43:56 scons" + +__doc__ = """SCons.PathList + +A module for handling lists of directory paths (the sort of things +that get set as CPPPATH, LIBPATH, etc.) with as much caching of data and +efficiency as we can while still keeping the evaluation delayed so that we +Do the Right Thing (almost) regardless of how the variable is specified. + +""" + +import os +import string + +import SCons.Memoize +import SCons.Node +import SCons.Util + +# +# Variables to specify the different types of entries in a PathList object: +# + +TYPE_STRING_NO_SUBST = 0 # string with no '$' +TYPE_STRING_SUBST = 1 # string containing '$' +TYPE_OBJECT = 2 # other object + +def node_conv(obj): + """ + This is the "string conversion" routine that we have our substitutions + use to return Nodes, not strings. This relies on the fact that an + EntryProxy object has a get() method that returns the underlying + Node that it wraps, which is a bit of architectural dependence + that we might need to break or modify in the future in response to + additional requirements. + """ + try: + get = obj.get + except AttributeError: + if isinstance(obj, SCons.Node.Node) or SCons.Util.is_Sequence( obj ): + result = obj + else: + result = str(obj) + else: + result = get() + return result + +class _PathList: + """ + An actual PathList object. + """ + def __init__(self, pathlist): + """ + Initializes a PathList object, canonicalizing the input and + pre-processing it for quicker substitution later. + + The stored representation of the PathList is a list of tuples + containing (type, value), where the "type" is one of the TYPE_* + variables defined above. We distinguish between: + + strings that contain no '$' and therefore need no + delayed-evaluation string substitution (we expect that there + will be many of these and that we therefore get a pretty + big win from avoiding string substitution) + + strings that contain '$' and therefore need substitution + (the hard case is things like '${TARGET.dir}/include', + which require re-evaluation for every target + source) + + other objects (which may be something like an EntryProxy + that needs a method called to return a Node) + + Pre-identifying the type of each element in the PathList up-front + and storing the type in the list of tuples is intended to reduce + the amount of calculation when we actually do the substitution + over and over for each target. + """ + if SCons.Util.is_String(pathlist): + pathlist = string.split(pathlist, os.pathsep) + elif not SCons.Util.is_Sequence(pathlist): + pathlist = [pathlist] + + pl = [] + for p in pathlist: + try: + index = string.find(p, '$') + except (AttributeError, TypeError): + type = TYPE_OBJECT + else: + if index == -1: + type = TYPE_STRING_NO_SUBST + else: + type = TYPE_STRING_SUBST + pl.append((type, p)) + + self.pathlist = tuple(pl) + + def __len__(self): return len(self.pathlist) + + def __getitem__(self, i): return self.pathlist[i] + + def subst_path(self, env, target, source): + """ + Performs construction variable substitution on a pre-digested + PathList for a specific target and source. + """ + result = [] + for type, value in self.pathlist: + if type == TYPE_STRING_SUBST: + value = env.subst(value, target=target, source=source, + conv=node_conv) + if SCons.Util.is_Sequence(value): + result.extend(value) + continue + + elif type == TYPE_OBJECT: + value = node_conv(value) + if value: + result.append(value) + return tuple(result) + + +class PathListCache: + """ + A class to handle caching of PathList lookups. + + This class gets instantiated once and then deleted from the namespace, + so it's used as a Singleton (although we don't enforce that in the + usual Pythonic ways). We could have just made the cache a dictionary + in the module namespace, but putting it in this class allows us to + use the same Memoizer pattern that we use elsewhere to count cache + hits and misses, which is very valuable. + + Lookup keys in the cache are computed by the _PathList_key() method. + Cache lookup should be quick, so we don't spend cycles canonicalizing + all forms of the same lookup key. For example, 'x:y' and ['x', + 'y'] logically represent the same list, but we don't bother to + split string representations and treat those two equivalently. + (Note, however, that we do, treat lists and tuples the same.) + + The main type of duplication we're trying to catch will come from + looking up the same path list from two different clones of the + same construction environment. That is, given + + env2 = env1.Clone() + + both env1 and env2 will have the same CPPPATH value, and we can + cheaply avoid re-parsing both values of CPPPATH by using the + common value from this cache. + """ + if SCons.Memoize.use_memoizer: + __metaclass__ = SCons.Memoize.Memoized_Metaclass + + memoizer_counters = [] + + def __init__(self): + self._memo = {} + + def _PathList_key(self, pathlist): + """ + Returns the key for memoization of PathLists. + + Note that we want this to be pretty quick, so we don't completely + canonicalize all forms of the same list. For example, + 'dir1:$ROOT/dir2' and ['$ROOT/dir1', 'dir'] may logically + represent the same list if you're executing from $ROOT, but + we're not going to bother splitting strings into path elements, + or massaging strings into Nodes, to identify that equivalence. + We just want to eliminate obvious redundancy from the normal + case of re-using exactly the same cloned value for a path. + """ + if SCons.Util.is_Sequence(pathlist): + pathlist = tuple(SCons.Util.flatten(pathlist)) + return pathlist + + memoizer_counters.append(SCons.Memoize.CountDict('PathList', _PathList_key)) + + def PathList(self, pathlist): + """ + Returns the cached _PathList object for the specified pathlist, + creating and caching a new object as necessary. + """ + pathlist = self._PathList_key(pathlist) + try: + memo_dict = self._memo['PathList'] + except KeyError: + memo_dict = {} + self._memo['PathList'] = memo_dict + else: + try: + return memo_dict[pathlist] + except KeyError: + pass + + result = _PathList(pathlist) + + memo_dict[pathlist] = result + + return result + +PathList = PathListCache().PathList + + +del PathListCache + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Platform/__init__.py b/engine/SCons/Platform/__init__.py new file mode 100644 index 0000000..6252637 --- /dev/null +++ b/engine/SCons/Platform/__init__.py @@ -0,0 +1,236 @@ +"""SCons.Platform + +SCons platform selection. + +This looks for modules that define a callable object that can modify a +construction environment as appropriate for a given platform. + +Note that we take a more simplistic view of "platform" than Python does. +We're looking for a single string that determines a set of +tool-independent variables with which to initialize a construction +environment. Consequently, we'll examine both sys.platform and os.name +(and anything else that might come in to play) in order to return some +specification which is unique enough for our purposes. + +Note that because this subsysem just *selects* a callable that can +modify a construction environment, it's possible for people to define +their own "platform specification" in an arbitrary callable function. +No one needs to use or tie in to this subsystem in order to roll +their own platform definition. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Platform/__init__.py 4577 2009/12/27 19:43:56 scons" + +import SCons.compat + +import imp +import os +import string +import sys +import tempfile + +import SCons.Errors +import SCons.Subst +import SCons.Tool + +def platform_default(): + """Return the platform string for our execution environment. + + The returned value should map to one of the SCons/Platform/*.py + files. Since we're architecture independent, though, we don't + care about the machine architecture. + """ + osname = os.name + if osname == 'java': + osname = os._osType + if osname == 'posix': + if sys.platform == 'cygwin': + return 'cygwin' + elif string.find(sys.platform, 'irix') != -1: + return 'irix' + elif string.find(sys.platform, 'sunos') != -1: + return 'sunos' + elif string.find(sys.platform, 'hp-ux') != -1: + return 'hpux' + elif string.find(sys.platform, 'aix') != -1: + return 'aix' + elif string.find(sys.platform, 'darwin') != -1: + return 'darwin' + else: + return 'posix' + elif os.name == 'os2': + return 'os2' + else: + return sys.platform + +def platform_module(name = platform_default()): + """Return the imported module for the platform. + + This looks for a module name that matches the specified argument. + If the name is unspecified, we fetch the appropriate default for + our execution environment. + """ + full_name = 'SCons.Platform.' + name + if not sys.modules.has_key(full_name): + if os.name == 'java': + eval(full_name) + else: + try: + file, path, desc = imp.find_module(name, + sys.modules['SCons.Platform'].__path__) + try: + mod = imp.load_module(full_name, file, path, desc) + finally: + if file: + file.close() + except ImportError: + try: + import zipimport + importer = zipimport.zipimporter( sys.modules['SCons.Platform'].__path__[0] ) + mod = importer.load_module(full_name) + except ImportError: + raise SCons.Errors.UserError, "No platform named '%s'" % name + setattr(SCons.Platform, name, mod) + return sys.modules[full_name] + +def DefaultToolList(platform, env): + """Select a default tool list for the specified platform. + """ + return SCons.Tool.tool_list(platform, env) + +class PlatformSpec: + def __init__(self, name): + self.name = name + + def __str__(self): + return self.name + +class TempFileMunge: + """A callable class. You can set an Environment variable to this, + then call it with a string argument, then it will perform temporary + file substitution on it. This is used to circumvent the long command + line limitation. + + Example usage: + env["TEMPFILE"] = TempFileMunge + env["LINKCOM"] = "${TEMPFILE('$LINK $TARGET $SOURCES')}" + + By default, the name of the temporary file used begins with a + prefix of '@'. This may be configred for other tool chains by + setting '$TEMPFILEPREFIX'. + + env["TEMPFILEPREFIX"] = '-@' # diab compiler + env["TEMPFILEPREFIX"] = '-via' # arm tool chain + """ + def __init__(self, cmd): + self.cmd = cmd + + def __call__(self, target, source, env, for_signature): + if for_signature: + # If we're being called for signature calculation, it's + # because we're being called by the string expansion in + # Subst.py, which has the logic to strip any $( $) that + # may be in the command line we squirreled away. So we + # just return the raw command line and let the upper + # string substitution layers do their thing. + return self.cmd + + # Now we're actually being called because someone is actually + # going to try to execute the command, so we have to do our + # own expansion. + cmd = env.subst_list(self.cmd, SCons.Subst.SUBST_CMD, target, source)[0] + try: + maxline = int(env.subst('$MAXLINELENGTH')) + except ValueError: + maxline = 2048 + + if (reduce(lambda x, y: x + len(y), cmd, 0) + len(cmd)) <= maxline: + return self.cmd + + # We do a normpath because mktemp() has what appears to be + # a bug in Windows that will use a forward slash as a path + # delimiter. Windows's link mistakes that for a command line + # switch and barfs. + # + # We use the .lnk suffix for the benefit of the Phar Lap + # linkloc linker, which likes to append an .lnk suffix if + # none is given. + (fd, tmp) = tempfile.mkstemp('.lnk', text=True) + native_tmp = SCons.Util.get_native_path(os.path.normpath(tmp)) + + if env['SHELL'] and env['SHELL'] == 'sh': + # The sh shell will try to escape the backslashes in the + # path, so unescape them. + native_tmp = string.replace(native_tmp, '\\', r'\\\\') + # In Cygwin, we want to use rm to delete the temporary + # file, because del does not exist in the sh shell. + rm = env.Detect('rm') or 'del' + else: + # Don't use 'rm' if the shell is not sh, because rm won't + # work with the Windows shells (cmd.exe or command.com) or + # Windows path names. + rm = 'del' + + prefix = env.subst('$TEMPFILEPREFIX') + if not prefix: + prefix = '@' + + args = map(SCons.Subst.quote_spaces, cmd[1:]) + os.write(fd, string.join(args, " ") + "\n") + os.close(fd) + # XXX Using the SCons.Action.print_actions value directly + # like this is bogus, but expedient. This class should + # really be rewritten as an Action that defines the + # __call__() and strfunction() methods and lets the + # normal action-execution logic handle whether or not to + # print/execute the action. The problem, though, is all + # of that is decided before we execute this method as + # part of expanding the $TEMPFILE construction variable. + # Consequently, refactoring this will have to wait until + # we get more flexible with allowing Actions to exist + # independently and get strung together arbitrarily like + # Ant tasks. In the meantime, it's going to be more + # user-friendly to not let obsession with architectural + # purity get in the way of just being helpful, so we'll + # reach into SCons.Action directly. + if SCons.Action.print_actions: + print("Using tempfile "+native_tmp+" for command line:\n"+ + str(cmd[0]) + " " + string.join(args," ")) + return [ cmd[0], prefix + native_tmp + '\n' + rm, native_tmp ] + +def Platform(name = platform_default()): + """Select a canned Platform specification. + """ + module = platform_module(name) + spec = PlatformSpec(name) + spec.__call__ = module.generate + return spec + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Platform/aix.py b/engine/SCons/Platform/aix.py new file mode 100644 index 0000000..8dec76c --- /dev/null +++ b/engine/SCons/Platform/aix.py @@ -0,0 +1,70 @@ +"""engine.SCons.Platform.aix + +Platform-specific initialization for IBM AIX systems. + +There normally shouldn't be any need to import this module directly. It +will usually be imported through the generic SCons.Platform.Platform() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Platform/aix.py 4577 2009/12/27 19:43:56 scons" + +import os +import string + +import posix + +def get_xlc(env, xlc=None, xlc_r=None, packages=[]): + # Use the AIX package installer tool lslpp to figure out where a + # given xl* compiler is installed and what version it is. + xlcPath = None + xlcVersion = None + + if xlc is None: + xlc = env.get('CC', 'xlc') + if xlc_r is None: + xlc_r = xlc + '_r' + for package in packages: + cmd = "lslpp -fc " + package + " 2>/dev/null | egrep '" + xlc + "([^-_a-zA-Z0-9].*)?$'" + line = os.popen(cmd).readline() + if line: + v, p = string.split(line, ':')[1:3] + xlcVersion = string.split(v)[1] + xlcPath = string.split(p)[0] + xlcPath = xlcPath[:xlcPath.rindex('/')] + break + return (xlcPath, xlc, xlc_r, xlcVersion) + +def generate(env): + posix.generate(env) + #Based on AIX 5.2: ARG_MAX=24576 - 3000 for environment expansion + env['MAXLINELENGTH'] = 21576 + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Platform/cygwin.py b/engine/SCons/Platform/cygwin.py new file mode 100644 index 0000000..e61e195 --- /dev/null +++ b/engine/SCons/Platform/cygwin.py @@ -0,0 +1,55 @@ +"""SCons.Platform.cygwin + +Platform-specific initialization for Cygwin systems. + +There normally shouldn't be any need to import this module directly. It +will usually be imported through the generic SCons.Platform.Platform() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Platform/cygwin.py 4577 2009/12/27 19:43:56 scons" + +import posix +from SCons.Platform import TempFileMunge + +def generate(env): + posix.generate(env) + + env['PROGPREFIX'] = '' + env['PROGSUFFIX'] = '.exe' + env['SHLIBPREFIX'] = '' + env['SHLIBSUFFIX'] = '.dll' + env['LIBPREFIXES'] = [ '$LIBPREFIX', '$SHLIBPREFIX' ] + env['LIBSUFFIXES'] = [ '$LIBSUFFIX', '$SHLIBSUFFIX' ] + env['TEMPFILE'] = TempFileMunge + env['TEMPFILEPREFIX'] = '@' + env['MAXLINELENGTH'] = 2048 + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Platform/darwin.py b/engine/SCons/Platform/darwin.py new file mode 100644 index 0000000..7c5b75f --- /dev/null +++ b/engine/SCons/Platform/darwin.py @@ -0,0 +1,46 @@ +"""engine.SCons.Platform.darwin + +Platform-specific initialization for Mac OS X systems. + +There normally shouldn't be any need to import this module directly. It +will usually be imported through the generic SCons.Platform.Platform() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Platform/darwin.py 4577 2009/12/27 19:43:56 scons" + +import posix + +def generate(env): + posix.generate(env) + env['SHLIBSUFFIX'] = '.dylib' + env['ENV']['PATH'] = env['ENV']['PATH'] + ':/sw/bin' + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Platform/hpux.py b/engine/SCons/Platform/hpux.py new file mode 100644 index 0000000..651d0ae --- /dev/null +++ b/engine/SCons/Platform/hpux.py @@ -0,0 +1,46 @@ +"""engine.SCons.Platform.hpux + +Platform-specific initialization for HP-UX systems. + +There normally shouldn't be any need to import this module directly. It +will usually be imported through the generic SCons.Platform.Platform() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Platform/hpux.py 4577 2009/12/27 19:43:56 scons" + +import posix + +def generate(env): + posix.generate(env) + #Based on HP-UX11i: ARG_MAX=2048000 - 3000 for environment expansion + env['MAXLINELENGTH'] = 2045000 + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Platform/irix.py b/engine/SCons/Platform/irix.py new file mode 100644 index 0000000..8d80c89 --- /dev/null +++ b/engine/SCons/Platform/irix.py @@ -0,0 +1,44 @@ +"""SCons.Platform.irix + +Platform-specific initialization for SGI IRIX systems. + +There normally shouldn't be any need to import this module directly. It +will usually be imported through the generic SCons.Platform.Platform() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Platform/irix.py 4577 2009/12/27 19:43:56 scons" + +import posix + +def generate(env): + posix.generate(env) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Platform/os2.py b/engine/SCons/Platform/os2.py new file mode 100644 index 0000000..36e2445 --- /dev/null +++ b/engine/SCons/Platform/os2.py @@ -0,0 +1,58 @@ +"""SCons.Platform.os2 + +Platform-specific initialization for OS/2 systems. + +There normally shouldn't be any need to import this module directly. It +will usually be imported through the generic SCons.Platform.Platform() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Platform/os2.py 4577 2009/12/27 19:43:56 scons" +import win32 + +def generate(env): + if not env.has_key('ENV'): + env['ENV'] = {} + env['OBJPREFIX'] = '' + env['OBJSUFFIX'] = '.obj' + env['SHOBJPREFIX'] = '$OBJPREFIX' + env['SHOBJSUFFIX'] = '$OBJSUFFIX' + env['PROGPREFIX'] = '' + env['PROGSUFFIX'] = '.exe' + env['LIBPREFIX'] = '' + env['LIBSUFFIX'] = '.lib' + env['SHLIBPREFIX'] = '' + env['SHLIBSUFFIX'] = '.dll' + env['LIBPREFIXES'] = '$LIBPREFIX' + env['LIBSUFFIXES'] = [ '$LIBSUFFIX', '$SHLIBSUFFIX' ] + env['HOST_OS'] = 'os2' + env['HOST_ARCH'] = win32.get_architecture().arch + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Platform/posix.py b/engine/SCons/Platform/posix.py new file mode 100644 index 0000000..f9795c0 --- /dev/null +++ b/engine/SCons/Platform/posix.py @@ -0,0 +1,264 @@ +"""SCons.Platform.posix + +Platform-specific initialization for POSIX (Linux, UNIX, etc.) systems. + +There normally shouldn't be any need to import this module directly. It +will usually be imported through the generic SCons.Platform.Platform() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Platform/posix.py 4577 2009/12/27 19:43:56 scons" + +import errno +import os +import os.path +import string +import subprocess +import sys +import select + +import SCons.Util +from SCons.Platform import TempFileMunge + +exitvalmap = { + 2 : 127, + 13 : 126, +} + +def escape(arg): + "escape shell special characters" + slash = '\\' + special = '"$()' + + arg = string.replace(arg, slash, slash+slash) + for c in special: + arg = string.replace(arg, c, slash+c) + + return '"' + arg + '"' + +def exec_system(l, env): + stat = os.system(string.join(l)) + if stat & 0xff: + return stat | 0x80 + return stat >> 8 + +def exec_spawnvpe(l, env): + stat = os.spawnvpe(os.P_WAIT, l[0], l, env) + # os.spawnvpe() returns the actual exit code, not the encoding + # returned by os.waitpid() or os.system(). + return stat + +def exec_fork(l, env): + pid = os.fork() + if not pid: + # Child process. + exitval = 127 + try: + os.execvpe(l[0], l, env) + except OSError, e: + exitval = exitvalmap.get(e[0], e[0]) + sys.stderr.write("scons: %s: %s\n" % (l[0], e[1])) + os._exit(exitval) + else: + # Parent process. + pid, stat = os.waitpid(pid, 0) + if stat & 0xff: + return stat | 0x80 + return stat >> 8 + +def _get_env_command(sh, escape, cmd, args, env): + s = string.join(args) + if env: + l = ['env', '-'] + \ + map(lambda t, e=escape: e(t[0])+'='+e(t[1]), env.items()) + \ + [sh, '-c', escape(s)] + s = string.join(l) + return s + +def env_spawn(sh, escape, cmd, args, env): + return exec_system([_get_env_command( sh, escape, cmd, args, env)], env) + +def spawnvpe_spawn(sh, escape, cmd, args, env): + return exec_spawnvpe([sh, '-c', string.join(args)], env) + +def fork_spawn(sh, escape, cmd, args, env): + return exec_fork([sh, '-c', string.join(args)], env) + +def process_cmd_output(cmd_stdout, cmd_stderr, stdout, stderr): + stdout_eof = stderr_eof = 0 + while not (stdout_eof and stderr_eof): + try: + (i,o,e) = select.select([cmd_stdout, cmd_stderr], [], []) + if cmd_stdout in i: + str = cmd_stdout.read() + if len(str) == 0: + stdout_eof = 1 + elif stdout is not None: + stdout.write(str) + if cmd_stderr in i: + str = cmd_stderr.read() + if len(str) == 0: + #sys.__stderr__.write( "stderr_eof=1\n" ) + stderr_eof = 1 + else: + #sys.__stderr__.write( "str(stderr) = %s\n" % str ) + stderr.write(str) + except select.error, (_errno, _strerror): + if _errno != errno.EINTR: + raise + +def exec_popen3(l, env, stdout, stderr): + proc = subprocess.Popen(string.join(l), + stdout=stdout, + stderr=stderr, + shell=True) + stat = proc.wait() + if stat & 0xff: + return stat | 0x80 + return stat >> 8 + +def exec_piped_fork(l, env, stdout, stderr): + # spawn using fork / exec and providing a pipe for the command's + # stdout / stderr stream + if stdout != stderr: + (rFdOut, wFdOut) = os.pipe() + (rFdErr, wFdErr) = os.pipe() + else: + (rFdOut, wFdOut) = os.pipe() + rFdErr = rFdOut + wFdErr = wFdOut + # do the fork + pid = os.fork() + if not pid: + # Child process + os.close( rFdOut ) + if rFdOut != rFdErr: + os.close( rFdErr ) + os.dup2( wFdOut, 1 ) # is there some symbolic way to do that ? + os.dup2( wFdErr, 2 ) + os.close( wFdOut ) + if stdout != stderr: + os.close( wFdErr ) + exitval = 127 + try: + os.execvpe(l[0], l, env) + except OSError, e: + exitval = exitvalmap.get(e[0], e[0]) + stderr.write("scons: %s: %s\n" % (l[0], e[1])) + os._exit(exitval) + else: + # Parent process + pid, stat = os.waitpid(pid, 0) + os.close( wFdOut ) + if stdout != stderr: + os.close( wFdErr ) + childOut = os.fdopen( rFdOut ) + if stdout != stderr: + childErr = os.fdopen( rFdErr ) + else: + childErr = childOut + process_cmd_output(childOut, childErr, stdout, stderr) + os.close( rFdOut ) + if stdout != stderr: + os.close( rFdErr ) + if stat & 0xff: + return stat | 0x80 + return stat >> 8 + +def piped_env_spawn(sh, escape, cmd, args, env, stdout, stderr): + # spawn using Popen3 combined with the env command + # the command name and the command's stdout is written to stdout + # the command's stderr is written to stderr + return exec_popen3([_get_env_command(sh, escape, cmd, args, env)], + env, stdout, stderr) + +def piped_fork_spawn(sh, escape, cmd, args, env, stdout, stderr): + # spawn using fork / exec and providing a pipe for the command's + # stdout / stderr stream + return exec_piped_fork([sh, '-c', string.join(args)], + env, stdout, stderr) + + + +def generate(env): + # If os.spawnvpe() exists, we use it to spawn commands. Otherwise + # if the env utility exists, we use os.system() to spawn commands, + # finally we fall back on os.fork()/os.exec(). + # + # os.spawnvpe() is prefered because it is the most efficient. But + # for Python versions without it, os.system() is prefered because it + # is claimed that it works better with threads (i.e. -j) and is more + # efficient than forking Python. + # + # NB: Other people on the scons-users mailing list have claimed that + # os.fork()/os.exec() works better than os.system(). There may just + # not be a default that works best for all users. + + if os.__dict__.has_key('spawnvpe'): + spawn = spawnvpe_spawn + elif env.Detect('env'): + spawn = env_spawn + else: + spawn = fork_spawn + + if env.Detect('env'): + pspawn = piped_env_spawn + else: + pspawn = piped_fork_spawn + + if not env.has_key('ENV'): + env['ENV'] = {} + env['ENV']['PATH'] = '/usr/local/bin:/opt/bin:/bin:/usr/bin' + env['OBJPREFIX'] = '' + env['OBJSUFFIX'] = '.o' + env['SHOBJPREFIX'] = '$OBJPREFIX' + env['SHOBJSUFFIX'] = '$OBJSUFFIX' + env['PROGPREFIX'] = '' + env['PROGSUFFIX'] = '' + env['LIBPREFIX'] = 'lib' + env['LIBSUFFIX'] = '.a' + env['SHLIBPREFIX'] = '$LIBPREFIX' + env['SHLIBSUFFIX'] = '.so' + env['LIBPREFIXES'] = [ '$LIBPREFIX' ] + env['LIBSUFFIXES'] = [ '$LIBSUFFIX', '$SHLIBSUFFIX' ] + env['PSPAWN'] = pspawn + env['SPAWN'] = spawn + env['SHELL'] = 'sh' + env['ESCAPE'] = escape + env['TEMPFILE'] = TempFileMunge + env['TEMPFILEPREFIX'] = '@' + #Based on LINUX: ARG_MAX=ARG_MAX=131072 - 3000 for environment expansion + #Note: specific platforms might rise or lower this value + env['MAXLINELENGTH'] = 128072 + + # This platform supports RPATH specifications. + env['__RPATH'] = '$_RPATH' + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Platform/sunos.py b/engine/SCons/Platform/sunos.py new file mode 100644 index 0000000..5302270 --- /dev/null +++ b/engine/SCons/Platform/sunos.py @@ -0,0 +1,50 @@ +"""engine.SCons.Platform.sunos + +Platform-specific initialization for Sun systems. + +There normally shouldn't be any need to import this module directly. It +will usually be imported through the generic SCons.Platform.Platform() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Platform/sunos.py 4577 2009/12/27 19:43:56 scons" + +import posix + +def generate(env): + posix.generate(env) + # Based on sunSparc 8:32bit + # ARG_MAX=1048320 - 3000 for environment expansion + env['MAXLINELENGTH'] = 1045320 + env['PKGINFO'] = 'pkginfo' + env['PKGCHK'] = '/usr/sbin/pkgchk' + env['ENV']['PATH'] = env['ENV']['PATH'] + ':/opt/SUNWspro/bin:/usr/ccs/bin' + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Platform/win32.py b/engine/SCons/Platform/win32.py new file mode 100644 index 0000000..9ffa03e --- /dev/null +++ b/engine/SCons/Platform/win32.py @@ -0,0 +1,386 @@ +"""SCons.Platform.win32 + +Platform-specific initialization for Win32 systems. + +There normally shouldn't be any need to import this module directly. It +will usually be imported through the generic SCons.Platform.Platform() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Platform/win32.py 4577 2009/12/27 19:43:56 scons" + +import os +import os.path +import string +import sys +import tempfile + +from SCons.Platform.posix import exitvalmap +from SCons.Platform import TempFileMunge +import SCons.Util + +try: + import msvcrt + import win32api + import win32con + + msvcrt.get_osfhandle + win32api.SetHandleInformation + win32con.HANDLE_FLAG_INHERIT +except ImportError: + parallel_msg = \ + "you do not seem to have the pywin32 extensions installed;\n" + \ + "\tparallel (-j) builds may not work reliably with open Python files." +except AttributeError: + parallel_msg = \ + "your pywin32 extensions do not support file handle operations;\n" + \ + "\tparallel (-j) builds may not work reliably with open Python files." +else: + parallel_msg = None + + import __builtin__ + + _builtin_file = __builtin__.file + _builtin_open = __builtin__.open + + def _scons_file(*args, **kw): + fp = apply(_builtin_file, args, kw) + win32api.SetHandleInformation(msvcrt.get_osfhandle(fp.fileno()), + win32con.HANDLE_FLAG_INHERIT, + 0) + return fp + + def _scons_open(*args, **kw): + fp = apply(_builtin_open, args, kw) + win32api.SetHandleInformation(msvcrt.get_osfhandle(fp.fileno()), + win32con.HANDLE_FLAG_INHERIT, + 0) + return fp + + __builtin__.file = _scons_file + __builtin__.open = _scons_open + + + +# The upshot of all this is that, if you are using Python 1.5.2, +# you had better have cmd or command.com in your PATH when you run +# scons. + +def piped_spawn(sh, escape, cmd, args, env, stdout, stderr): + # There is no direct way to do that in python. What we do + # here should work for most cases: + # In case stdout (stderr) is not redirected to a file, + # we redirect it into a temporary file tmpFileStdout + # (tmpFileStderr) and copy the contents of this file + # to stdout (stderr) given in the argument + if not sh: + sys.stderr.write("scons: Could not find command interpreter, is it in your PATH?\n") + return 127 + else: + # one temporary file for stdout and stderr + tmpFileStdout = os.path.normpath(tempfile.mktemp()) + tmpFileStderr = os.path.normpath(tempfile.mktemp()) + + # check if output is redirected + stdoutRedirected = 0 + stderrRedirected = 0 + for arg in args: + # are there more possibilities to redirect stdout ? + if (string.find( arg, ">", 0, 1 ) != -1 or + string.find( arg, "1>", 0, 2 ) != -1): + stdoutRedirected = 1 + # are there more possibilities to redirect stderr ? + if string.find( arg, "2>", 0, 2 ) != -1: + stderrRedirected = 1 + + # redirect output of non-redirected streams to our tempfiles + if stdoutRedirected == 0: + args.append(">" + str(tmpFileStdout)) + if stderrRedirected == 0: + args.append("2>" + str(tmpFileStderr)) + + # actually do the spawn + try: + args = [sh, '/C', escape(string.join(args)) ] + ret = os.spawnve(os.P_WAIT, sh, args, env) + except OSError, e: + # catch any error + try: + ret = exitvalmap[e[0]] + except KeyError: + sys.stderr.write("scons: unknown OSError exception code %d - %s: %s\n" % (e[0], cmd, e[1])) + if stderr is not None: + stderr.write("scons: %s: %s\n" % (cmd, e[1])) + # copy child output from tempfiles to our streams + # and do clean up stuff + if stdout is not None and stdoutRedirected == 0: + try: + stdout.write(open( tmpFileStdout, "r" ).read()) + os.remove( tmpFileStdout ) + except (IOError, OSError): + pass + + if stderr is not None and stderrRedirected == 0: + try: + stderr.write(open( tmpFileStderr, "r" ).read()) + os.remove( tmpFileStderr ) + except (IOError, OSError): + pass + return ret + +def exec_spawn(l, env): + try: + result = os.spawnve(os.P_WAIT, l[0], l, env) + except OSError, e: + try: + result = exitvalmap[e[0]] + sys.stderr.write("scons: %s: %s\n" % (l[0], e[1])) + except KeyError: + result = 127 + if len(l) > 2: + if len(l[2]) < 1000: + command = string.join(l[0:3]) + else: + command = l[0] + else: + command = l[0] + sys.stderr.write("scons: unknown OSError exception code %d - '%s': %s\n" % (e[0], command, e[1])) + return result + +def spawn(sh, escape, cmd, args, env): + if not sh: + sys.stderr.write("scons: Could not find command interpreter, is it in your PATH?\n") + return 127 + return exec_spawn([sh, '/C', escape(string.join(args))], env) + +# Windows does not allow special characters in file names anyway, so no +# need for a complex escape function, we will just quote the arg, except +# that "cmd /c" requires that if an argument ends with a backslash it +# needs to be escaped so as not to interfere with closing double quote +# that we add. +def escape(x): + if x[-1] == '\\': + x = x + '\\' + return '"' + x + '"' + +# Get the windows system directory name +_system_root = None + +def get_system_root(): + global _system_root + if _system_root is not None: + return _system_root + + # A resonable default if we can't read the registry + val = os.environ.get('SystemRoot', "C:/WINDOWS") + + if SCons.Util.can_read_reg: + try: + # Look for Windows NT system root + k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE, + 'Software\\Microsoft\\Windows NT\\CurrentVersion') + val, tok = SCons.Util.RegQueryValueEx(k, 'SystemRoot') + except SCons.Util.RegError: + try: + # Okay, try the Windows 9x system root + k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE, + 'Software\\Microsoft\\Windows\\CurrentVersion') + val, tok = SCons.Util.RegQueryValueEx(k, 'SystemRoot') + except KeyboardInterrupt: + raise + except: + pass + _system_root = val + return val + +# Get the location of the program files directory +def get_program_files_dir(): + # Now see if we can look in the registry... + val = '' + if SCons.Util.can_read_reg: + try: + # Look for Windows Program Files directory + k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE, + 'Software\\Microsoft\\Windows\\CurrentVersion') + val, tok = SCons.Util.RegQueryValueEx(k, 'ProgramFilesDir') + except SCons.Util.RegError: + val = '' + pass + + if val == '': + # A reasonable default if we can't read the registry + # (Actually, it's pretty reasonable even if we can :-) + val = os.path.join(os.path.dirname(get_system_root()),"Program Files") + + return val + + + +# Determine which windows CPU were running on. +class ArchDefinition: + """ + A class for defining architecture-specific settings and logic. + """ + def __init__(self, arch, synonyms=[]): + self.arch = arch + self.synonyms = synonyms + +SupportedArchitectureList = [ + ArchDefinition( + 'x86', + ['i386', 'i486', 'i586', 'i686'], + ), + + ArchDefinition( + 'x86_64', + ['AMD64', 'amd64', 'em64t', 'EM64T', 'x86_64'], + ), + + ArchDefinition( + 'ia64', + ['IA64'], + ), +] + +SupportedArchitectureMap = {} +for a in SupportedArchitectureList: + SupportedArchitectureMap[a.arch] = a + for s in a.synonyms: + SupportedArchitectureMap[s] = a + +def get_architecture(arch=None): + """Returns the definition for the specified architecture string. + + If no string is specified, the system default is returned (as defined + by the PROCESSOR_ARCHITEW6432 or PROCESSOR_ARCHITECTURE environment + variables). + """ + if arch is None: + arch = os.environ.get('PROCESSOR_ARCHITEW6432') + if not arch: + arch = os.environ.get('PROCESSOR_ARCHITECTURE') + return SupportedArchitectureMap.get(arch, ArchDefinition('', [''])) + +def generate(env): + # Attempt to find cmd.exe (for WinNT/2k/XP) or + # command.com for Win9x + cmd_interp = '' + # First see if we can look in the registry... + if SCons.Util.can_read_reg: + try: + # Look for Windows NT system root + k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE, + 'Software\\Microsoft\\Windows NT\\CurrentVersion') + val, tok = SCons.Util.RegQueryValueEx(k, 'SystemRoot') + cmd_interp = os.path.join(val, 'System32\\cmd.exe') + except SCons.Util.RegError: + try: + # Okay, try the Windows 9x system root + k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE, + 'Software\\Microsoft\\Windows\\CurrentVersion') + val, tok = SCons.Util.RegQueryValueEx(k, 'SystemRoot') + cmd_interp = os.path.join(val, 'command.com') + except KeyboardInterrupt: + raise + except: + pass + + # For the special case of not having access to the registry, we + # use a temporary path and pathext to attempt to find the command + # interpreter. If we fail, we try to find the interpreter through + # the env's PATH. The problem with that is that it might not + # contain an ENV and a PATH. + if not cmd_interp: + systemroot = get_system_root() + tmp_path = systemroot + os.pathsep + \ + os.path.join(systemroot,'System32') + tmp_pathext = '.com;.exe;.bat;.cmd' + if os.environ.has_key('PATHEXT'): + tmp_pathext = os.environ['PATHEXT'] + cmd_interp = SCons.Util.WhereIs('cmd', tmp_path, tmp_pathext) + if not cmd_interp: + cmd_interp = SCons.Util.WhereIs('command', tmp_path, tmp_pathext) + + if not cmd_interp: + cmd_interp = env.Detect('cmd') + if not cmd_interp: + cmd_interp = env.Detect('command') + + + if not env.has_key('ENV'): + env['ENV'] = {} + + # Import things from the external environment to the construction + # environment's ENV. This is a potential slippery slope, because we + # *don't* want to make builds dependent on the user's environment by + # default. We're doing this for SystemRoot, though, because it's + # needed for anything that uses sockets, and seldom changes, and + # for SystemDrive because it's related. + # + # Weigh the impact carefully before adding other variables to this list. + import_env = [ 'SystemDrive', 'SystemRoot', 'TEMP', 'TMP' ] + for var in import_env: + v = os.environ.get(var) + if v: + env['ENV'][var] = v + + if not env['ENV'].has_key('COMSPEC'): + v = os.environ.get("COMSPEC") + if v: + env['ENV']['COMSPEC'] = v + + env.AppendENVPath('PATH', get_system_root() + '\System32') + + env['ENV']['PATHEXT'] = '.COM;.EXE;.BAT;.CMD' + env['OBJPREFIX'] = '' + env['OBJSUFFIX'] = '.obj' + env['SHOBJPREFIX'] = '$OBJPREFIX' + env['SHOBJSUFFIX'] = '$OBJSUFFIX' + env['PROGPREFIX'] = '' + env['PROGSUFFIX'] = '.exe' + env['LIBPREFIX'] = '' + env['LIBSUFFIX'] = '.lib' + env['SHLIBPREFIX'] = '' + env['SHLIBSUFFIX'] = '.dll' + env['LIBPREFIXES'] = [ '$LIBPREFIX' ] + env['LIBSUFFIXES'] = [ '$LIBSUFFIX' ] + env['PSPAWN'] = piped_spawn + env['SPAWN'] = spawn + env['SHELL'] = cmd_interp + env['TEMPFILE'] = TempFileMunge + env['TEMPFILEPREFIX'] = '@' + env['MAXLINELENGTH'] = 2048 + env['ESCAPE'] = escape + + env['HOST_OS'] = 'win32' + env['HOST_ARCH'] = get_architecture().arch + + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/SConf.py b/engine/SCons/SConf.py new file mode 100644 index 0000000..b67ca64 --- /dev/null +++ b/engine/SCons/SConf.py @@ -0,0 +1,1038 @@ +"""SCons.SConf + +Autoconf-like configuration support. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/SConf.py 4577 2009/12/27 19:43:56 scons" + +import os +import re +import string +import StringIO +import sys +import traceback +import types + +import SCons.Action +import SCons.Builder +import SCons.Errors +import SCons.Job +import SCons.Node.FS +import SCons.Taskmaster +import SCons.Util +import SCons.Warnings +import SCons.Conftest + +from SCons.Debug import Trace + +# Turn off the Conftest error logging +SCons.Conftest.LogInputFiles = 0 +SCons.Conftest.LogErrorMessages = 0 + +# Set +build_type = None +build_types = ['clean', 'help'] + +def SetBuildType(type): + global build_type + build_type = type + +# to be set, if we are in dry-run mode +dryrun = 0 + +AUTO=0 # use SCons dependency scanning for up-to-date checks +FORCE=1 # force all tests to be rebuilt +CACHE=2 # force all tests to be taken from cache (raise an error, if necessary) +cache_mode = AUTO + +def SetCacheMode(mode): + """Set the Configure cache mode. mode must be one of "auto", "force", + or "cache".""" + global cache_mode + if mode == "auto": + cache_mode = AUTO + elif mode == "force": + cache_mode = FORCE + elif mode == "cache": + cache_mode = CACHE + else: + raise ValueError, "SCons.SConf.SetCacheMode: Unknown mode " + mode + +progress_display = SCons.Util.display # will be overwritten by SCons.Script +def SetProgressDisplay(display): + """Set the progress display to use (called from SCons.Script)""" + global progress_display + progress_display = display + +SConfFS = None + +_ac_build_counter = 0 # incremented, whenever TryBuild is called +_ac_config_logs = {} # all config.log files created in this build +_ac_config_hs = {} # all config.h files created in this build +sconf_global = None # current sconf object + +def _createConfigH(target, source, env): + t = open(str(target[0]), "w") + defname = re.sub('[^A-Za-z0-9_]', '_', string.upper(str(target[0]))) + t.write("""#ifndef %(DEFNAME)s_SEEN +#define %(DEFNAME)s_SEEN + +""" % {'DEFNAME' : defname}) + t.write(source[0].get_contents()) + t.write(""" +#endif /* %(DEFNAME)s_SEEN */ +""" % {'DEFNAME' : defname}) + t.close() + +def _stringConfigH(target, source, env): + return "scons: Configure: creating " + str(target[0]) + +def CreateConfigHBuilder(env): + """Called just before the building targets phase begins.""" + if len(_ac_config_hs) == 0: + return + action = SCons.Action.Action(_createConfigH, + _stringConfigH) + sconfigHBld = SCons.Builder.Builder(action=action) + env.Append( BUILDERS={'SConfigHBuilder':sconfigHBld} ) + for k in _ac_config_hs.keys(): + env.SConfigHBuilder(k, env.Value(_ac_config_hs[k])) + +class SConfWarning(SCons.Warnings.Warning): + pass +SCons.Warnings.enableWarningClass(SConfWarning) + +# some error definitions +class SConfError(SCons.Errors.UserError): + def __init__(self,msg): + SCons.Errors.UserError.__init__(self,msg) + +class ConfigureDryRunError(SConfError): + """Raised when a file or directory needs to be updated during a Configure + process, but the user requested a dry-run""" + def __init__(self,target): + if not isinstance(target, SCons.Node.FS.File): + msg = 'Cannot create configure directory "%s" within a dry-run.' % str(target) + else: + msg = 'Cannot update configure test "%s" within a dry-run.' % str(target) + SConfError.__init__(self,msg) + +class ConfigureCacheError(SConfError): + """Raised when a use explicitely requested the cache feature, but the test + is run the first time.""" + def __init__(self,target): + SConfError.__init__(self, '"%s" is not yet built and cache is forced.' % str(target)) + +# define actions for building text files +def _createSource( target, source, env ): + fd = open(str(target[0]), "w") + fd.write(source[0].get_contents()) + fd.close() +def _stringSource( target, source, env ): + return (str(target[0]) + ' <-\n |' + + string.replace( source[0].get_contents(), + '\n', "\n |" ) ) + +# python 2.2 introduces types.BooleanType +BooleanTypes = [types.IntType] +if hasattr(types, 'BooleanType'): BooleanTypes.append(types.BooleanType) + +class SConfBuildInfo(SCons.Node.FS.FileBuildInfo): + """ + Special build info for targets of configure tests. Additional members + are result (did the builder succeed last time?) and string, which + contains messages of the original build phase. + """ + result = None # -> 0/None -> no error, != 0 error + string = None # the stdout / stderr output when building the target + + def set_build_result(self, result, string): + self.result = result + self.string = string + + +class Streamer: + """ + 'Sniffer' for a file-like writable object. Similar to the unix tool tee. + """ + def __init__(self, orig): + self.orig = orig + self.s = StringIO.StringIO() + + def write(self, str): + if self.orig: + self.orig.write(str) + self.s.write(str) + + def writelines(self, lines): + for l in lines: + self.write(l + '\n') + + def getvalue(self): + """ + Return everything written to orig since the Streamer was created. + """ + return self.s.getvalue() + + def flush(self): + if self.orig: + self.orig.flush() + self.s.flush() + + +class SConfBuildTask(SCons.Taskmaster.AlwaysTask): + """ + This is almost the same as SCons.Script.BuildTask. Handles SConfErrors + correctly and knows about the current cache_mode. + """ + def display(self, message): + if sconf_global.logstream: + sconf_global.logstream.write("scons: Configure: " + message + "\n") + + def display_cached_string(self, bi): + """ + Logs the original builder messages, given the SConfBuildInfo instance + bi. + """ + if not isinstance(bi, SConfBuildInfo): + SCons.Warnings.warn(SConfWarning, + "The stored build information has an unexpected class: %s" % bi.__class__) + else: + self.display("The original builder output was:\n" + + string.replace(" |" + str(bi.string), + "\n", "\n |")) + + def failed(self): + # check, if the reason was a ConfigureDryRunError or a + # ConfigureCacheError and if yes, reraise the exception + exc_type = self.exc_info()[0] + if issubclass(exc_type, SConfError): + raise + elif issubclass(exc_type, SCons.Errors.BuildError): + # we ignore Build Errors (occurs, when a test doesn't pass) + # Clear the exception to prevent the contained traceback + # to build a reference cycle. + self.exc_clear() + else: + self.display('Caught exception while building "%s":\n' % + self.targets[0]) + try: + excepthook = sys.excepthook + except AttributeError: + # Earlier versions of Python don't have sys.excepthook... + def excepthook(type, value, tb): + traceback.print_tb(tb) + print type, value + apply(excepthook, self.exc_info()) + return SCons.Taskmaster.Task.failed(self) + + def collect_node_states(self): + # returns (is_up_to_date, cached_error, cachable) + # where is_up_to_date is 1, if the node(s) are up_to_date + # cached_error is 1, if the node(s) are up_to_date, but the + # build will fail + # cachable is 0, if some nodes are not in our cache + T = 0 + changed = False + cached_error = False + cachable = True + for t in self.targets: + if T: Trace('%s' % (t)) + bi = t.get_stored_info().binfo + if isinstance(bi, SConfBuildInfo): + if T: Trace(': SConfBuildInfo') + if cache_mode == CACHE: + t.set_state(SCons.Node.up_to_date) + if T: Trace(': set_state(up_to-date)') + else: + if T: Trace(': get_state() %s' % t.get_state()) + if T: Trace(': changed() %s' % t.changed()) + if (t.get_state() != SCons.Node.up_to_date and t.changed()): + changed = True + if T: Trace(': changed %s' % changed) + cached_error = cached_error or bi.result + else: + if T: Trace(': else') + # the node hasn't been built in a SConf context or doesn't + # exist + cachable = False + changed = ( t.get_state() != SCons.Node.up_to_date ) + if T: Trace(': changed %s' % changed) + if T: Trace('\n') + return (not changed, cached_error, cachable) + + def execute(self): + if not self.targets[0].has_builder(): + return + + sconf = sconf_global + + is_up_to_date, cached_error, cachable = self.collect_node_states() + + if cache_mode == CACHE and not cachable: + raise ConfigureCacheError(self.targets[0]) + elif cache_mode == FORCE: + is_up_to_date = 0 + + if cached_error and is_up_to_date: + self.display("Building \"%s\" failed in a previous run and all " + "its sources are up to date." % str(self.targets[0])) + binfo = self.targets[0].get_stored_info().binfo + self.display_cached_string(binfo) + raise SCons.Errors.BuildError # will be 'caught' in self.failed + elif is_up_to_date: + self.display("\"%s\" is up to date." % str(self.targets[0])) + binfo = self.targets[0].get_stored_info().binfo + self.display_cached_string(binfo) + elif dryrun: + raise ConfigureDryRunError(self.targets[0]) + else: + # note stdout and stderr are the same here + s = sys.stdout = sys.stderr = Streamer(sys.stdout) + try: + env = self.targets[0].get_build_env() + if cache_mode == FORCE: + # Set up the Decider() to force rebuilds by saying + # that every source has changed. Note that we still + # call the environment's underlying source decider so + # that the correct .sconsign info will get calculated + # and keep the build state consistent. + def force_build(dependency, target, prev_ni, + env_decider=env.decide_source): + env_decider(dependency, target, prev_ni) + return True + if env.decide_source.func_code is not force_build.func_code: + env.Decider(force_build) + env['PSTDOUT'] = env['PSTDERR'] = s + try: + sconf.cached = 0 + self.targets[0].build() + finally: + sys.stdout = sys.stderr = env['PSTDOUT'] = \ + env['PSTDERR'] = sconf.logstream + except KeyboardInterrupt: + raise + except SystemExit: + exc_value = sys.exc_info()[1] + raise SCons.Errors.ExplicitExit(self.targets[0],exc_value.code) + except Exception, e: + for t in self.targets: + binfo = t.get_binfo() + binfo.__class__ = SConfBuildInfo + binfo.set_build_result(1, s.getvalue()) + sconsign_entry = SCons.SConsign.SConsignEntry() + sconsign_entry.binfo = binfo + #sconsign_entry.ninfo = self.get_ninfo() + # We'd like to do this as follows: + # t.store_info(binfo) + # However, we need to store it as an SConfBuildInfo + # object, and store_info() will turn it into a + # regular FileNodeInfo if the target is itself a + # regular File. + sconsign = t.dir.sconsign() + sconsign.set_entry(t.name, sconsign_entry) + sconsign.merge() + raise e + else: + for t in self.targets: + binfo = t.get_binfo() + binfo.__class__ = SConfBuildInfo + binfo.set_build_result(0, s.getvalue()) + sconsign_entry = SCons.SConsign.SConsignEntry() + sconsign_entry.binfo = binfo + #sconsign_entry.ninfo = self.get_ninfo() + # We'd like to do this as follows: + # t.store_info(binfo) + # However, we need to store it as an SConfBuildInfo + # object, and store_info() will turn it into a + # regular FileNodeInfo if the target is itself a + # regular File. + sconsign = t.dir.sconsign() + sconsign.set_entry(t.name, sconsign_entry) + sconsign.merge() + +class SConfBase: + """This is simply a class to represent a configure context. After + creating a SConf object, you can call any tests. After finished with your + tests, be sure to call the Finish() method, which returns the modified + environment. + Some words about caching: In most cases, it is not necessary to cache + Test results explicitely. Instead, we use the scons dependency checking + mechanism. For example, if one wants to compile a test program + (SConf.TryLink), the compiler is only called, if the program dependencies + have changed. However, if the program could not be compiled in a former + SConf run, we need to explicitely cache this error. + """ + + def __init__(self, env, custom_tests = {}, conf_dir='$CONFIGUREDIR', + log_file='$CONFIGURELOG', config_h = None, _depth = 0): + """Constructor. Pass additional tests in the custom_tests-dictinary, + e.g. custom_tests={'CheckPrivate':MyPrivateTest}, where MyPrivateTest + defines a custom test. + Note also the conf_dir and log_file arguments (you may want to + build tests in the VariantDir, not in the SourceDir) + """ + global SConfFS + if not SConfFS: + SConfFS = SCons.Node.FS.default_fs or \ + SCons.Node.FS.FS(env.fs.pathTop) + if sconf_global is not None: + raise (SCons.Errors.UserError, + "Only one SConf object may be active at one time") + self.env = env + if log_file is not None: + log_file = SConfFS.File(env.subst(log_file)) + self.logfile = log_file + self.logstream = None + self.lastTarget = None + self.depth = _depth + self.cached = 0 # will be set, if all test results are cached + + # add default tests + default_tests = { + 'CheckCC' : CheckCC, + 'CheckCXX' : CheckCXX, + 'CheckSHCC' : CheckSHCC, + 'CheckSHCXX' : CheckSHCXX, + 'CheckFunc' : CheckFunc, + 'CheckType' : CheckType, + 'CheckTypeSize' : CheckTypeSize, + 'CheckDeclaration' : CheckDeclaration, + 'CheckHeader' : CheckHeader, + 'CheckCHeader' : CheckCHeader, + 'CheckCXXHeader' : CheckCXXHeader, + 'CheckLib' : CheckLib, + 'CheckLibWithHeader' : CheckLibWithHeader, + } + self.AddTests(default_tests) + self.AddTests(custom_tests) + self.confdir = SConfFS.Dir(env.subst(conf_dir)) + if config_h is not None: + config_h = SConfFS.File(config_h) + self.config_h = config_h + self._startup() + + def Finish(self): + """Call this method after finished with your tests: + env = sconf.Finish() + """ + self._shutdown() + return self.env + + def Define(self, name, value = None, comment = None): + """ + Define a pre processor symbol name, with the optional given value in the + current config header. + + If value is None (default), then #define name is written. If value is not + none, then #define name value is written. + + comment is a string which will be put as a C comment in the + header, to explain the meaning of the value (appropriate C comments /* and + */ will be put automatically.""" + lines = [] + if comment: + comment_str = "/* %s */" % comment + lines.append(comment_str) + + if value is not None: + define_str = "#define %s %s" % (name, value) + else: + define_str = "#define %s" % name + lines.append(define_str) + lines.append('') + + self.config_h_text = self.config_h_text + string.join(lines, '\n') + + def BuildNodes(self, nodes): + """ + Tries to build the given nodes immediately. Returns 1 on success, + 0 on error. + """ + if self.logstream is not None: + # override stdout / stderr to write in log file + oldStdout = sys.stdout + sys.stdout = self.logstream + oldStderr = sys.stderr + sys.stderr = self.logstream + + # the engine assumes the current path is the SConstruct directory ... + old_fs_dir = SConfFS.getcwd() + old_os_dir = os.getcwd() + SConfFS.chdir(SConfFS.Top, change_os_dir=1) + + # Because we take responsibility here for writing out our + # own .sconsign info (see SConfBuildTask.execute(), above), + # we override the store_info() method with a null place-holder + # so we really control how it gets written. + for n in nodes: + n.store_info = n.do_not_store_info + + ret = 1 + + try: + # ToDo: use user options for calc + save_max_drift = SConfFS.get_max_drift() + SConfFS.set_max_drift(0) + tm = SCons.Taskmaster.Taskmaster(nodes, SConfBuildTask) + # we don't want to build tests in parallel + jobs = SCons.Job.Jobs(1, tm ) + jobs.run() + for n in nodes: + state = n.get_state() + if (state != SCons.Node.executed and + state != SCons.Node.up_to_date): + # the node could not be built. we return 0 in this case + ret = 0 + finally: + SConfFS.set_max_drift(save_max_drift) + os.chdir(old_os_dir) + SConfFS.chdir(old_fs_dir, change_os_dir=0) + if self.logstream is not None: + # restore stdout / stderr + sys.stdout = oldStdout + sys.stderr = oldStderr + return ret + + def pspawn_wrapper(self, sh, escape, cmd, args, env): + """Wrapper function for handling piped spawns. + + This looks to the calling interface (in Action.py) like a "normal" + spawn, but associates the call with the PSPAWN variable from + the construction environment and with the streams to which we + want the output logged. This gets slid into the construction + environment as the SPAWN variable so Action.py doesn't have to + know or care whether it's spawning a piped command or not. + """ + return self.pspawn(sh, escape, cmd, args, env, self.logstream, self.logstream) + + + def TryBuild(self, builder, text = None, extension = ""): + """Low level TryBuild implementation. Normally you don't need to + call that - you can use TryCompile / TryLink / TryRun instead + """ + global _ac_build_counter + + # Make sure we have a PSPAWN value, and save the current + # SPAWN value. + try: + self.pspawn = self.env['PSPAWN'] + except KeyError: + raise SCons.Errors.UserError('Missing PSPAWN construction variable.') + try: + save_spawn = self.env['SPAWN'] + except KeyError: + raise SCons.Errors.UserError('Missing SPAWN construction variable.') + + nodesToBeBuilt = [] + + f = "conftest_" + str(_ac_build_counter) + pref = self.env.subst( builder.builder.prefix ) + suff = self.env.subst( builder.builder.suffix ) + target = self.confdir.File(pref + f + suff) + + try: + # Slide our wrapper into the construction environment as + # the SPAWN function. + self.env['SPAWN'] = self.pspawn_wrapper + sourcetext = self.env.Value(text) + + if text is not None: + textFile = self.confdir.File(f + extension) + textFileNode = self.env.SConfSourceBuilder(target=textFile, + source=sourcetext) + nodesToBeBuilt.extend(textFileNode) + source = textFileNode + else: + source = None + + nodes = builder(target = target, source = source) + if not SCons.Util.is_List(nodes): + nodes = [nodes] + nodesToBeBuilt.extend(nodes) + result = self.BuildNodes(nodesToBeBuilt) + + finally: + self.env['SPAWN'] = save_spawn + + _ac_build_counter = _ac_build_counter + 1 + if result: + self.lastTarget = nodes[0] + else: + self.lastTarget = None + + return result + + def TryAction(self, action, text = None, extension = ""): + """Tries to execute the given action with optional source file + contents <text> and optional source file extension <extension>, + Returns the status (0 : failed, 1 : ok) and the contents of the + output file. + """ + builder = SCons.Builder.Builder(action=action) + self.env.Append( BUILDERS = {'SConfActionBuilder' : builder} ) + ok = self.TryBuild(self.env.SConfActionBuilder, text, extension) + del self.env['BUILDERS']['SConfActionBuilder'] + if ok: + outputStr = self.lastTarget.get_contents() + return (1, outputStr) + return (0, "") + + def TryCompile( self, text, extension): + """Compiles the program given in text to an env.Object, using extension + as file extension (e.g. '.c'). Returns 1, if compilation was + successful, 0 otherwise. The target is saved in self.lastTarget (for + further processing). + """ + return self.TryBuild(self.env.Object, text, extension) + + def TryLink( self, text, extension ): + """Compiles the program given in text to an executable env.Program, + using extension as file extension (e.g. '.c'). Returns 1, if + compilation was successful, 0 otherwise. The target is saved in + self.lastTarget (for further processing). + """ + return self.TryBuild(self.env.Program, text, extension ) + + def TryRun(self, text, extension ): + """Compiles and runs the program given in text, using extension + as file extension (e.g. '.c'). Returns (1, outputStr) on success, + (0, '') otherwise. The target (a file containing the program's stdout) + is saved in self.lastTarget (for further processing). + """ + ok = self.TryLink(text, extension) + if( ok ): + prog = self.lastTarget + pname = prog.path + output = self.confdir.File(os.path.basename(pname)+'.out') + node = self.env.Command(output, prog, [ [ pname, ">", "${TARGET}"] ]) + ok = self.BuildNodes(node) + if ok: + outputStr = output.get_contents() + return( 1, outputStr) + return (0, "") + + class TestWrapper: + """A wrapper around Tests (to ensure sanity)""" + def __init__(self, test, sconf): + self.test = test + self.sconf = sconf + def __call__(self, *args, **kw): + if not self.sconf.active: + raise (SCons.Errors.UserError, + "Test called after sconf.Finish()") + context = CheckContext(self.sconf) + ret = apply(self.test, (context,) + args, kw) + if self.sconf.config_h is not None: + self.sconf.config_h_text = self.sconf.config_h_text + context.config_h + context.Result("error: no result") + return ret + + def AddTest(self, test_name, test_instance): + """Adds test_class to this SConf instance. It can be called with + self.test_name(...)""" + setattr(self, test_name, SConfBase.TestWrapper(test_instance, self)) + + def AddTests(self, tests): + """Adds all the tests given in the tests dictionary to this SConf + instance + """ + for name in tests.keys(): + self.AddTest(name, tests[name]) + + def _createDir( self, node ): + dirName = str(node) + if dryrun: + if not os.path.isdir( dirName ): + raise ConfigureDryRunError(dirName) + else: + if not os.path.isdir( dirName ): + os.makedirs( dirName ) + node._exists = 1 + + def _startup(self): + """Private method. Set up logstream, and set the environment + variables necessary for a piped build + """ + global _ac_config_logs + global sconf_global + global SConfFS + + self.lastEnvFs = self.env.fs + self.env.fs = SConfFS + self._createDir(self.confdir) + self.confdir.up().add_ignore( [self.confdir] ) + + if self.logfile is not None and not dryrun: + # truncate logfile, if SConf.Configure is called for the first time + # in a build + if _ac_config_logs.has_key(self.logfile): + log_mode = "a" + else: + _ac_config_logs[self.logfile] = None + log_mode = "w" + fp = open(str(self.logfile), log_mode) + self.logstream = SCons.Util.Unbuffered(fp) + # logfile may stay in a build directory, so we tell + # the build system not to override it with a eventually + # existing file with the same name in the source directory + self.logfile.dir.add_ignore( [self.logfile] ) + + tb = traceback.extract_stack()[-3-self.depth] + old_fs_dir = SConfFS.getcwd() + SConfFS.chdir(SConfFS.Top, change_os_dir=0) + self.logstream.write('file %s,line %d:\n\tConfigure(confdir = %s)\n' % + (tb[0], tb[1], str(self.confdir)) ) + SConfFS.chdir(old_fs_dir) + else: + self.logstream = None + # we use a special builder to create source files from TEXT + action = SCons.Action.Action(_createSource, + _stringSource) + sconfSrcBld = SCons.Builder.Builder(action=action) + self.env.Append( BUILDERS={'SConfSourceBuilder':sconfSrcBld} ) + self.config_h_text = _ac_config_hs.get(self.config_h, "") + self.active = 1 + # only one SConf instance should be active at a time ... + sconf_global = self + + def _shutdown(self): + """Private method. Reset to non-piped spawn""" + global sconf_global, _ac_config_hs + + if not self.active: + raise SCons.Errors.UserError, "Finish may be called only once!" + if self.logstream is not None and not dryrun: + self.logstream.write("\n") + self.logstream.close() + self.logstream = None + # remove the SConfSourceBuilder from the environment + blds = self.env['BUILDERS'] + del blds['SConfSourceBuilder'] + self.env.Replace( BUILDERS=blds ) + self.active = 0 + sconf_global = None + if not self.config_h is None: + _ac_config_hs[self.config_h] = self.config_h_text + self.env.fs = self.lastEnvFs + +class CheckContext: + """Provides a context for configure tests. Defines how a test writes to the + screen and log file. + + A typical test is just a callable with an instance of CheckContext as + first argument: + + def CheckCustom(context, ...) + context.Message('Checking my weird test ... ') + ret = myWeirdTestFunction(...) + context.Result(ret) + + Often, myWeirdTestFunction will be one of + context.TryCompile/context.TryLink/context.TryRun. The results of + those are cached, for they are only rebuild, if the dependencies have + changed. + """ + + def __init__(self, sconf): + """Constructor. Pass the corresponding SConf instance.""" + self.sconf = sconf + self.did_show_result = 0 + + # for Conftest.py: + self.vardict = {} + self.havedict = {} + self.headerfilename = None + self.config_h = "" # config_h text will be stored here + # we don't regenerate the config.h file after each test. That means, + # that tests won't be able to include the config.h file, and so + # they can't do an #ifdef HAVE_XXX_H. This shouldn't be a major + # issue, though. If it turns out, that we need to include config.h + # in tests, we must ensure, that the dependencies are worked out + # correctly. Note that we can't use Conftest.py's support for config.h, + # cause we will need to specify a builder for the config.h file ... + + def Message(self, text): + """Inform about what we are doing right now, e.g. + 'Checking for SOMETHING ... ' + """ + self.Display(text) + self.sconf.cached = 1 + self.did_show_result = 0 + + def Result(self, res): + """Inform about the result of the test. res may be an integer or a + string. In case of an integer, the written text will be 'yes' or 'no'. + The result is only displayed when self.did_show_result is not set. + """ + if type(res) in BooleanTypes: + if res: + text = "yes" + else: + text = "no" + elif type(res) == types.StringType: + text = res + else: + raise TypeError, "Expected string, int or bool, got " + str(type(res)) + + if self.did_show_result == 0: + # Didn't show result yet, do it now. + self.Display(text + "\n") + self.did_show_result = 1 + + def TryBuild(self, *args, **kw): + return apply(self.sconf.TryBuild, args, kw) + + def TryAction(self, *args, **kw): + return apply(self.sconf.TryAction, args, kw) + + def TryCompile(self, *args, **kw): + return apply(self.sconf.TryCompile, args, kw) + + def TryLink(self, *args, **kw): + return apply(self.sconf.TryLink, args, kw) + + def TryRun(self, *args, **kw): + return apply(self.sconf.TryRun, args, kw) + + def __getattr__( self, attr ): + if( attr == 'env' ): + return self.sconf.env + elif( attr == 'lastTarget' ): + return self.sconf.lastTarget + else: + raise AttributeError, "CheckContext instance has no attribute '%s'" % attr + + #### Stuff used by Conftest.py (look there for explanations). + + def BuildProg(self, text, ext): + self.sconf.cached = 1 + # TODO: should use self.vardict for $CC, $CPPFLAGS, etc. + return not self.TryBuild(self.env.Program, text, ext) + + def CompileProg(self, text, ext): + self.sconf.cached = 1 + # TODO: should use self.vardict for $CC, $CPPFLAGS, etc. + return not self.TryBuild(self.env.Object, text, ext) + + def CompileSharedObject(self, text, ext): + self.sconf.cached = 1 + # TODO: should use self.vardict for $SHCC, $CPPFLAGS, etc. + return not self.TryBuild(self.env.SharedObject, text, ext) + + def RunProg(self, text, ext): + self.sconf.cached = 1 + # TODO: should use self.vardict for $CC, $CPPFLAGS, etc. + st, out = self.TryRun(text, ext) + return not st, out + + def AppendLIBS(self, lib_name_list): + oldLIBS = self.env.get( 'LIBS', [] ) + self.env.Append(LIBS = lib_name_list) + return oldLIBS + + def PrependLIBS(self, lib_name_list): + oldLIBS = self.env.get( 'LIBS', [] ) + self.env.Prepend(LIBS = lib_name_list) + return oldLIBS + + def SetLIBS(self, val): + oldLIBS = self.env.get( 'LIBS', [] ) + self.env.Replace(LIBS = val) + return oldLIBS + + def Display(self, msg): + if self.sconf.cached: + # We assume that Display is called twice for each test here + # once for the Checking for ... message and once for the result. + # The self.sconf.cached flag can only be set between those calls + msg = "(cached) " + msg + self.sconf.cached = 0 + progress_display(msg, append_newline=0) + self.Log("scons: Configure: " + msg + "\n") + + def Log(self, msg): + if self.sconf.logstream is not None: + self.sconf.logstream.write(msg) + + #### End of stuff used by Conftest.py. + + +def SConf(*args, **kw): + if kw.get(build_type, True): + kw['_depth'] = kw.get('_depth', 0) + 1 + for bt in build_types: + try: + del kw[bt] + except KeyError: + pass + return apply(SConfBase, args, kw) + else: + return SCons.Util.Null() + + +def CheckFunc(context, function_name, header = None, language = None): + res = SCons.Conftest.CheckFunc(context, function_name, header = header, language = language) + context.did_show_result = 1 + return not res + +def CheckType(context, type_name, includes = "", language = None): + res = SCons.Conftest.CheckType(context, type_name, + header = includes, language = language) + context.did_show_result = 1 + return not res + +def CheckTypeSize(context, type_name, includes = "", language = None, expect = None): + res = SCons.Conftest.CheckTypeSize(context, type_name, + header = includes, language = language, + expect = expect) + context.did_show_result = 1 + return res + +def CheckDeclaration(context, declaration, includes = "", language = None): + res = SCons.Conftest.CheckDeclaration(context, declaration, + includes = includes, + language = language) + context.did_show_result = 1 + return not res + +def createIncludesFromHeaders(headers, leaveLast, include_quotes = '""'): + # used by CheckHeader and CheckLibWithHeader to produce C - #include + # statements from the specified header (list) + if not SCons.Util.is_List(headers): + headers = [headers] + l = [] + if leaveLast: + lastHeader = headers[-1] + headers = headers[:-1] + else: + lastHeader = None + for s in headers: + l.append("#include %s%s%s\n" + % (include_quotes[0], s, include_quotes[1])) + return string.join(l, ''), lastHeader + +def CheckHeader(context, header, include_quotes = '<>', language = None): + """ + A test for a C or C++ header file. + """ + prog_prefix, hdr_to_check = \ + createIncludesFromHeaders(header, 1, include_quotes) + res = SCons.Conftest.CheckHeader(context, hdr_to_check, prog_prefix, + language = language, + include_quotes = include_quotes) + context.did_show_result = 1 + return not res + +def CheckCC(context): + res = SCons.Conftest.CheckCC(context) + context.did_show_result = 1 + return not res + +def CheckCXX(context): + res = SCons.Conftest.CheckCXX(context) + context.did_show_result = 1 + return not res + +def CheckSHCC(context): + res = SCons.Conftest.CheckSHCC(context) + context.did_show_result = 1 + return not res + +def CheckSHCXX(context): + res = SCons.Conftest.CheckSHCXX(context) + context.did_show_result = 1 + return not res + +# Bram: Make this function obsolete? CheckHeader() is more generic. + +def CheckCHeader(context, header, include_quotes = '""'): + """ + A test for a C header file. + """ + return CheckHeader(context, header, include_quotes, language = "C") + + +# Bram: Make this function obsolete? CheckHeader() is more generic. + +def CheckCXXHeader(context, header, include_quotes = '""'): + """ + A test for a C++ header file. + """ + return CheckHeader(context, header, include_quotes, language = "C++") + + +def CheckLib(context, library = None, symbol = "main", + header = None, language = None, autoadd = 1): + """ + A test for a library. See also CheckLibWithHeader. + Note that library may also be None to test whether the given symbol + compiles without flags. + """ + + if library == []: + library = [None] + + if not SCons.Util.is_List(library): + library = [library] + + # ToDo: accept path for the library + res = SCons.Conftest.CheckLib(context, library, symbol, header = header, + language = language, autoadd = autoadd) + context.did_show_result = 1 + return not res + +# XXX +# Bram: Can only include one header and can't use #ifdef HAVE_HEADER_H. + +def CheckLibWithHeader(context, libs, header, language, + call = None, autoadd = 1): + # ToDo: accept path for library. Support system header files. + """ + Another (more sophisticated) test for a library. + Checks, if library and header is available for language (may be 'C' + or 'CXX'). Call maybe be a valid expression _with_ a trailing ';'. + As in CheckLib, we support library=None, to test if the call compiles + without extra link flags. + """ + prog_prefix, dummy = \ + createIncludesFromHeaders(header, 0) + if libs == []: + libs = [None] + + if not SCons.Util.is_List(libs): + libs = [libs] + + res = SCons.Conftest.CheckLib(context, libs, None, prog_prefix, + call = call, language = language, autoadd = autoadd) + context.did_show_result = 1 + return not res + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/SConsign.py b/engine/SCons/SConsign.py new file mode 100644 index 0000000..a70becd --- /dev/null +++ b/engine/SCons/SConsign.py @@ -0,0 +1,381 @@ +"""SCons.SConsign + +Writing and reading information to the .sconsign file or files. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/SConsign.py 4577 2009/12/27 19:43:56 scons" + +import cPickle +import os +import os.path + +import SCons.dblite +import SCons.Warnings + +def corrupt_dblite_warning(filename): + SCons.Warnings.warn(SCons.Warnings.CorruptSConsignWarning, + "Ignoring corrupt .sconsign file: %s"%filename) + +SCons.dblite.ignore_corrupt_dbfiles = 1 +SCons.dblite.corruption_warning = corrupt_dblite_warning + +#XXX Get rid of the global array so this becomes re-entrant. +sig_files = [] + +# Info for the database SConsign implementation (now the default): +# "DataBase" is a dictionary that maps top-level SConstruct directories +# to open database handles. +# "DB_Module" is the Python database module to create the handles. +# "DB_Name" is the base name of the database file (minus any +# extension the underlying DB module will add). +DataBase = {} +DB_Module = SCons.dblite +DB_Name = ".sconsign" +DB_sync_list = [] + +def Get_DataBase(dir): + global DataBase, DB_Module, DB_Name + top = dir.fs.Top + if not os.path.isabs(DB_Name) and top.repositories: + mode = "c" + for d in [top] + top.repositories: + if dir.is_under(d): + try: + return DataBase[d], mode + except KeyError: + path = d.entry_abspath(DB_Name) + try: db = DataBase[d] = DB_Module.open(path, mode) + except (IOError, OSError): pass + else: + if mode != "r": + DB_sync_list.append(db) + return db, mode + mode = "r" + try: + return DataBase[top], "c" + except KeyError: + db = DataBase[top] = DB_Module.open(DB_Name, "c") + DB_sync_list.append(db) + return db, "c" + except TypeError: + print "DataBase =", DataBase + raise + +def Reset(): + """Reset global state. Used by unit tests that end up using + SConsign multiple times to get a clean slate for each test.""" + global sig_files, DB_sync_list + sig_files = [] + DB_sync_list = [] + +normcase = os.path.normcase + +def write(): + global sig_files + for sig_file in sig_files: + sig_file.write(sync=0) + for db in DB_sync_list: + try: + syncmethod = db.sync + except AttributeError: + pass # Not all anydbm modules have sync() methods. + else: + syncmethod() + +class SConsignEntry: + """ + Wrapper class for the generic entry in a .sconsign file. + The Node subclass populates it with attributes as it pleases. + + XXX As coded below, we do expect a '.binfo' attribute to be added, + but we'll probably generalize this in the next refactorings. + """ + current_version_id = 1 + def __init__(self): + # Create an object attribute from the class attribute so it ends up + # in the pickled data in the .sconsign file. + _version_id = self.current_version_id + def convert_to_sconsign(self): + self.binfo.convert_to_sconsign() + def convert_from_sconsign(self, dir, name): + self.binfo.convert_from_sconsign(dir, name) + +class Base: + """ + This is the controlling class for the signatures for the collection of + entries associated with a specific directory. The actual directory + association will be maintained by a subclass that is specific to + the underlying storage method. This class provides a common set of + methods for fetching and storing the individual bits of information + that make up signature entry. + """ + def __init__(self): + self.entries = {} + self.dirty = False + self.to_be_merged = {} + + def get_entry(self, filename): + """ + Fetch the specified entry attribute. + """ + return self.entries[filename] + + def set_entry(self, filename, obj): + """ + Set the entry. + """ + self.entries[filename] = obj + self.dirty = True + + def do_not_set_entry(self, filename, obj): + pass + + def store_info(self, filename, node): + entry = node.get_stored_info() + entry.binfo.merge(node.get_binfo()) + self.to_be_merged[filename] = node + self.dirty = True + + def do_not_store_info(self, filename, node): + pass + + def merge(self): + for key, node in self.to_be_merged.items(): + entry = node.get_stored_info() + try: + ninfo = entry.ninfo + except AttributeError: + # This happens with SConf Nodes, because the configuration + # subsystem takes direct control over how the build decision + # is made and its information stored. + pass + else: + ninfo.merge(node.get_ninfo()) + self.entries[key] = entry + self.to_be_merged = {} + +class DB(Base): + """ + A Base subclass that reads and writes signature information + from a global .sconsign.db* file--the actual file suffix is + determined by the database module. + """ + def __init__(self, dir): + Base.__init__(self) + + self.dir = dir + + db, mode = Get_DataBase(dir) + + # Read using the path relative to the top of the Repository + # (self.dir.tpath) from which we're fetching the signature + # information. + path = normcase(dir.tpath) + try: + rawentries = db[path] + except KeyError: + pass + else: + try: + self.entries = cPickle.loads(rawentries) + if type(self.entries) is not type({}): + self.entries = {} + raise TypeError + except KeyboardInterrupt: + raise + except Exception, e: + SCons.Warnings.warn(SCons.Warnings.CorruptSConsignWarning, + "Ignoring corrupt sconsign entry : %s (%s)\n"%(self.dir.tpath, e)) + for key, entry in self.entries.items(): + entry.convert_from_sconsign(dir, key) + + if mode == "r": + # This directory is actually under a repository, which means + # likely they're reaching in directly for a dependency on + # a file there. Don't actually set any entry info, so we + # won't try to write to that .sconsign.dblite file. + self.set_entry = self.do_not_set_entry + self.store_info = self.do_not_store_info + + global sig_files + sig_files.append(self) + + def write(self, sync=1): + if not self.dirty: + return + + self.merge() + + db, mode = Get_DataBase(self.dir) + + # Write using the path relative to the top of the SConstruct + # directory (self.dir.path), not relative to the top of + # the Repository; we only write to our own .sconsign file, + # not to .sconsign files in Repositories. + path = normcase(self.dir.path) + for key, entry in self.entries.items(): + entry.convert_to_sconsign() + db[path] = cPickle.dumps(self.entries, 1) + + if sync: + try: + syncmethod = db.sync + except AttributeError: + # Not all anydbm modules have sync() methods. + pass + else: + syncmethod() + +class Dir(Base): + def __init__(self, fp=None, dir=None): + """ + fp - file pointer to read entries from + """ + Base.__init__(self) + + if not fp: + return + + self.entries = cPickle.load(fp) + if type(self.entries) is not type({}): + self.entries = {} + raise TypeError + + if dir: + for key, entry in self.entries.items(): + entry.convert_from_sconsign(dir, key) + +class DirFile(Dir): + """ + Encapsulates reading and writing a per-directory .sconsign file. + """ + def __init__(self, dir): + """ + dir - the directory for the file + """ + + self.dir = dir + self.sconsign = os.path.join(dir.path, '.sconsign') + + try: + fp = open(self.sconsign, 'rb') + except IOError: + fp = None + + try: + Dir.__init__(self, fp, dir) + except KeyboardInterrupt: + raise + except: + SCons.Warnings.warn(SCons.Warnings.CorruptSConsignWarning, + "Ignoring corrupt .sconsign file: %s"%self.sconsign) + + global sig_files + sig_files.append(self) + + def write(self, sync=1): + """ + Write the .sconsign file to disk. + + Try to write to a temporary file first, and rename it if we + succeed. If we can't write to the temporary file, it's + probably because the directory isn't writable (and if so, + how did we build anything in this directory, anyway?), so + try to write directly to the .sconsign file as a backup. + If we can't rename, try to copy the temporary contents back + to the .sconsign file. Either way, always try to remove + the temporary file at the end. + """ + if not self.dirty: + return + + self.merge() + + temp = os.path.join(self.dir.path, '.scons%d' % os.getpid()) + try: + file = open(temp, 'wb') + fname = temp + except IOError: + try: + file = open(self.sconsign, 'wb') + fname = self.sconsign + except IOError: + return + for key, entry in self.entries.items(): + entry.convert_to_sconsign() + cPickle.dump(self.entries, file, 1) + file.close() + if fname != self.sconsign: + try: + mode = os.stat(self.sconsign)[0] + os.chmod(self.sconsign, 0666) + os.unlink(self.sconsign) + except (IOError, OSError): + # Try to carry on in the face of either OSError + # (things like permission issues) or IOError (disk + # or network issues). If there's a really dangerous + # issue, it should get re-raised by the calls below. + pass + try: + os.rename(fname, self.sconsign) + except OSError: + # An OSError failure to rename may indicate something + # like the directory has no write permission, but + # the .sconsign file itself might still be writable, + # so try writing on top of it directly. An IOError + # here, or in any of the following calls, would get + # raised, indicating something like a potentially + # serious disk or network issue. + open(self.sconsign, 'wb').write(open(fname, 'rb').read()) + os.chmod(self.sconsign, mode) + try: + os.unlink(temp) + except (IOError, OSError): + pass + +ForDirectory = DB + +def File(name, dbm_module=None): + """ + Arrange for all signatures to be stored in a global .sconsign.db* + file. + """ + global ForDirectory, DB_Name, DB_Module + if name is None: + ForDirectory = DirFile + DB_Module = None + else: + ForDirectory = DB + DB_Name = name + if not dbm_module is None: + DB_Module = dbm_module + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Scanner/C.py b/engine/SCons/Scanner/C.py new file mode 100644 index 0000000..bc44e66 --- /dev/null +++ b/engine/SCons/Scanner/C.py @@ -0,0 +1,132 @@ +"""SCons.Scanner.C + +This module implements the depenency scanner for C/C++ code. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Scanner/C.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Node.FS +import SCons.Scanner +import SCons.Util + +import SCons.cpp + +class SConsCPPScanner(SCons.cpp.PreProcessor): + """ + SCons-specific subclass of the cpp.py module's processing. + + We subclass this so that: 1) we can deal with files represented + by Nodes, not strings; 2) we can keep track of the files that are + missing. + """ + def __init__(self, *args, **kw): + apply(SCons.cpp.PreProcessor.__init__, (self,)+args, kw) + self.missing = [] + def initialize_result(self, fname): + self.result = SCons.Util.UniqueList([fname]) + def finalize_result(self, fname): + return self.result[1:] + def find_include_file(self, t): + keyword, quote, fname = t + result = SCons.Node.FS.find_file(fname, self.searchpath[quote]) + if not result: + self.missing.append((fname, self.current_file)) + return result + def read_file(self, file): + try: + fp = open(str(file.rfile())) + except EnvironmentError, e: + self.missing.append((file, self.current_file)) + return '' + else: + return fp.read() + +def dictify_CPPDEFINES(env): + cppdefines = env.get('CPPDEFINES', {}) + if cppdefines is None: + return {} + if SCons.Util.is_Sequence(cppdefines): + result = {} + for c in cppdefines: + if SCons.Util.is_Sequence(c): + result[c[0]] = c[1] + else: + result[c] = None + return result + if not SCons.Util.is_Dict(cppdefines): + return {cppdefines : None} + return cppdefines + +class SConsCPPScannerWrapper: + """ + The SCons wrapper around a cpp.py scanner. + + This is the actual glue between the calling conventions of generic + SCons scanners, and the (subclass of) cpp.py class that knows how + to look for #include lines with reasonably real C-preprocessor-like + evaluation of #if/#ifdef/#else/#elif lines. + """ + def __init__(self, name, variable): + self.name = name + self.path = SCons.Scanner.FindPathDirs(variable) + def __call__(self, node, env, path = ()): + cpp = SConsCPPScanner(current = node.get_dir(), + cpppath = path, + dict = dictify_CPPDEFINES(env)) + result = cpp(node) + for included, includer in cpp.missing: + fmt = "No dependency generated for file: %s (included from: %s) -- file not found" + SCons.Warnings.warn(SCons.Warnings.DependencyWarning, + fmt % (included, includer)) + return result + + def recurse_nodes(self, nodes): + return nodes + def select(self, node): + return self + +def CScanner(): + """Return a prototype Scanner instance for scanning source files + that use the C pre-processor""" + + # Here's how we would (or might) use the CPP scanner code above that + # knows how to evaluate #if/#ifdef/#else/#elif lines when searching + # for #includes. This is commented out for now until we add the + # right configurability to let users pick between the scanners. + #return SConsCPPScannerWrapper("CScanner", "CPPPATH") + + cs = SCons.Scanner.ClassicCPP("CScanner", + "$CPPSUFFIXES", + "CPPPATH", + '^[ \t]*#[ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")') + return cs + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Scanner/D.py b/engine/SCons/Scanner/D.py new file mode 100644 index 0000000..df9b99a --- /dev/null +++ b/engine/SCons/Scanner/D.py @@ -0,0 +1,74 @@ +"""SCons.Scanner.D + +Scanner for the Digital Mars "D" programming language. + +Coded by Andy Friesen +17 Nov 2003 + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Scanner/D.py 4577 2009/12/27 19:43:56 scons" + +import re +import string + +import SCons.Scanner + +def DScanner(): + """Return a prototype Scanner instance for scanning D source files""" + ds = D() + return ds + +class D(SCons.Scanner.Classic): + def __init__ (self): + SCons.Scanner.Classic.__init__ (self, + name = "DScanner", + suffixes = '$DSUFFIXES', + path_variable = 'DPATH', + regex = 'import\s+(?:[a-zA-Z0-9_.]+)\s*(?:,\s*(?:[a-zA-Z0-9_.]+)\s*)*;') + + self.cre2 = re.compile ('(?:import\s)?\s*([a-zA-Z0-9_.]+)\s*(?:,|;)', re.M) + + def find_include(self, include, source_dir, path): + # translate dots (package separators) to slashes + inc = string.replace(include, '.', '/') + + i = SCons.Node.FS.find_file(inc + '.d', (source_dir,) + path) + if i is None: + i = SCons.Node.FS.find_file (inc + '.di', (source_dir,) + path) + return i, include + + def find_include_names(self, node): + includes = [] + for i in self.cre.findall(node.get_text_contents()): + includes = includes + self.cre2.findall(i) + return includes + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Scanner/Dir.py b/engine/SCons/Scanner/Dir.py new file mode 100644 index 0000000..2d0a8b3 --- /dev/null +++ b/engine/SCons/Scanner/Dir.py @@ -0,0 +1,111 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Scanner/Dir.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Node.FS +import SCons.Scanner + +def only_dirs(nodes): + is_Dir = lambda n: isinstance(n.disambiguate(), SCons.Node.FS.Dir) + return filter(is_Dir, nodes) + +def DirScanner(**kw): + """Return a prototype Scanner instance for scanning + directories for on-disk files""" + kw['node_factory'] = SCons.Node.FS.Entry + kw['recursive'] = only_dirs + return apply(SCons.Scanner.Base, (scan_on_disk, "DirScanner"), kw) + +def DirEntryScanner(**kw): + """Return a prototype Scanner instance for "scanning" + directory Nodes for their in-memory entries""" + kw['node_factory'] = SCons.Node.FS.Entry + kw['recursive'] = None + return apply(SCons.Scanner.Base, (scan_in_memory, "DirEntryScanner"), kw) + +skip_entry = {} + +skip_entry_list = [ + '.', + '..', + '.sconsign', + # Used by the native dblite.py module. + '.sconsign.dblite', + # Used by dbm and dumbdbm. + '.sconsign.dir', + # Used by dbm. + '.sconsign.pag', + # Used by dumbdbm. + '.sconsign.dat', + '.sconsign.bak', + # Used by some dbm emulations using Berkeley DB. + '.sconsign.db', +] + +for skip in skip_entry_list: + skip_entry[skip] = 1 + skip_entry[SCons.Node.FS._my_normcase(skip)] = 1 + +do_not_scan = lambda k: not skip_entry.has_key(k) + +def scan_on_disk(node, env, path=()): + """ + Scans a directory for on-disk files and directories therein. + + Looking up the entries will add these to the in-memory Node tree + representation of the file system, so all we have to do is just + that and then call the in-memory scanning function. + """ + try: + flist = node.fs.listdir(node.abspath) + except (IOError, OSError): + return [] + e = node.Entry + for f in filter(do_not_scan, flist): + # Add ./ to the beginning of the file name so if it begins with a + # '#' we don't look it up relative to the top-level directory. + e('./' + f) + return scan_in_memory(node, env, path) + +def scan_in_memory(node, env, path=()): + """ + "Scans" a Node.FS.Dir for its in-memory entries. + """ + try: + entries = node.entries + except AttributeError: + # It's not a Node.FS.Dir (or doesn't look enough like one for + # our purposes), which can happen if a target list containing + # mixed Node types (Dirs and Files, for example) has a Dir as + # the first entry. + return [] + entry_list = filter(do_not_scan, entries.keys()) + entry_list.sort() + return map(lambda n, e=entries: e[n], entry_list) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Scanner/Fortran.py b/engine/SCons/Scanner/Fortran.py new file mode 100644 index 0000000..c0ac91f --- /dev/null +++ b/engine/SCons/Scanner/Fortran.py @@ -0,0 +1,320 @@ +"""SCons.Scanner.Fortran + +This module implements the dependency scanner for Fortran code. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Scanner/Fortran.py 4577 2009/12/27 19:43:56 scons" + +import re +import string + +import SCons.Node +import SCons.Node.FS +import SCons.Scanner +import SCons.Util +import SCons.Warnings + +class F90Scanner(SCons.Scanner.Classic): + """ + A Classic Scanner subclass for Fortran source files which takes + into account both USE and INCLUDE statements. This scanner will + work for both F77 and F90 (and beyond) compilers. + + Currently, this scanner assumes that the include files do not contain + USE statements. To enable the ability to deal with USE statements + in include files, add logic right after the module names are found + to loop over each include file, search for and locate each USE + statement, and append each module name to the list of dependencies. + Caching the search results in a common dictionary somewhere so that + the same include file is not searched multiple times would be a + smart thing to do. + """ + + def __init__(self, name, suffixes, path_variable, + use_regex, incl_regex, def_regex, *args, **kw): + + self.cre_use = re.compile(use_regex, re.M) + self.cre_incl = re.compile(incl_regex, re.M) + self.cre_def = re.compile(def_regex, re.M) + + def _scan(node, env, path, self=self): + node = node.rfile() + + if not node.exists(): + return [] + + return self.scan(node, env, path) + + kw['function'] = _scan + kw['path_function'] = SCons.Scanner.FindPathDirs(path_variable) + kw['recursive'] = 1 + kw['skeys'] = suffixes + kw['name'] = name + + apply(SCons.Scanner.Current.__init__, (self,) + args, kw) + + def scan(self, node, env, path=()): + + # cache the includes list in node so we only scan it once: + if node.includes != None: + mods_and_includes = node.includes + else: + # retrieve all included filenames + includes = self.cre_incl.findall(node.get_text_contents()) + # retrieve all USE'd module names + modules = self.cre_use.findall(node.get_text_contents()) + # retrieve all defined module names + defmodules = self.cre_def.findall(node.get_text_contents()) + + # Remove all USE'd module names that are defined in the same file + d = {} + for m in defmodules: + d[m] = 1 + modules = filter(lambda m, d=d: not d.has_key(m), modules) + #modules = self.undefinedModules(modules, defmodules) + + # Convert module name to a .mod filename + suffix = env.subst('$FORTRANMODSUFFIX') + modules = map(lambda x, s=suffix: string.lower(x) + s, modules) + # Remove unique items from the list + mods_and_includes = SCons.Util.unique(includes+modules) + node.includes = mods_and_includes + + # This is a hand-coded DSU (decorate-sort-undecorate, or + # Schwartzian transform) pattern. The sort key is the raw name + # of the file as specifed on the USE or INCLUDE line, which lets + # us keep the sort order constant regardless of whether the file + # is actually found in a Repository or locally. + nodes = [] + source_dir = node.get_dir() + if callable(path): + path = path() + for dep in mods_and_includes: + n, i = self.find_include(dep, source_dir, path) + + if n is None: + SCons.Warnings.warn(SCons.Warnings.DependencyWarning, + "No dependency generated for file: %s (referenced by: %s) -- file not found" % (i, node)) + else: + sortkey = self.sort_key(dep) + nodes.append((sortkey, n)) + + nodes.sort() + nodes = map(lambda pair: pair[1], nodes) + return nodes + +def FortranScan(path_variable="FORTRANPATH"): + """Return a prototype Scanner instance for scanning source files + for Fortran USE & INCLUDE statements""" + +# The USE statement regex matches the following: +# +# USE module_name +# USE :: module_name +# USE, INTRINSIC :: module_name +# USE, NON_INTRINSIC :: module_name +# +# Limitations +# +# -- While the regex can handle multiple USE statements on one line, +# it cannot properly handle them if they are commented out. +# In either of the following cases: +# +# ! USE mod_a ; USE mod_b [entire line is commented out] +# USE mod_a ! ; USE mod_b [in-line comment of second USE statement] +# +# the second module name (mod_b) will be picked up as a dependency +# even though it should be ignored. The only way I can see +# to rectify this would be to modify the scanner to eliminate +# the call to re.findall, read in the contents of the file, +# treating the comment character as an end-of-line character +# in addition to the normal linefeed, loop over each line, +# weeding out the comments, and looking for the USE statements. +# One advantage to this is that the regex passed to the scanner +# would no longer need to match a semicolon. +# +# -- I question whether or not we need to detect dependencies to +# INTRINSIC modules because these are built-in to the compiler. +# If we consider them a dependency, will SCons look for them, not +# find them, and kill the build? Or will we there be standard +# compiler-specific directories we will need to point to so the +# compiler and SCons can locate the proper object and mod files? + +# Here is a breakdown of the regex: +# +# (?i) : regex is case insensitive +# ^ : start of line +# (?: : group a collection of regex symbols without saving the match as a "group" +# ^|; : matches either the start of the line or a semicolon - semicolon +# ) : end the unsaved grouping +# \s* : any amount of white space +# USE : match the string USE, case insensitive +# (?: : group a collection of regex symbols without saving the match as a "group" +# \s+| : match one or more whitespace OR .... (the next entire grouped set of regex symbols) +# (?: : group a collection of regex symbols without saving the match as a "group" +# (?: : establish another unsaved grouping of regex symbols +# \s* : any amount of white space +# , : match a comma +# \s* : any amount of white space +# (?:NON_)? : optionally match the prefix NON_, case insensitive +# INTRINSIC : match the string INTRINSIC, case insensitive +# )? : optionally match the ", INTRINSIC/NON_INTRINSIC" grouped expression +# \s* : any amount of white space +# :: : match a double colon that must appear after the INTRINSIC/NON_INTRINSIC attribute +# ) : end the unsaved grouping +# ) : end the unsaved grouping +# \s* : match any amount of white space +# (\w+) : match the module name that is being USE'd +# +# + use_regex = "(?i)(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)" + + +# The INCLUDE statement regex matches the following: +# +# INCLUDE 'some_Text' +# INCLUDE "some_Text" +# INCLUDE "some_Text" ; INCLUDE "some_Text" +# INCLUDE kind_"some_Text" +# INCLUDE kind_'some_Text" +# +# where some_Text can include any alphanumeric and/or special character +# as defined by the Fortran 2003 standard. +# +# Limitations: +# +# -- The Fortran standard dictates that a " or ' in the INCLUDE'd +# string must be represented as a "" or '', if the quotes that wrap +# the entire string are either a ' or ", respectively. While the +# regular expression below can detect the ' or " characters just fine, +# the scanning logic, presently is unable to detect them and reduce +# them to a single instance. This probably isn't an issue since, +# in practice, ' or " are not generally used in filenames. +# +# -- This regex will not properly deal with multiple INCLUDE statements +# when the entire line has been commented out, ala +# +# ! INCLUDE 'some_file' ; INCLUDE 'some_file' +# +# In such cases, it will properly ignore the first INCLUDE file, +# but will actually still pick up the second. Interestingly enough, +# the regex will properly deal with these cases: +# +# INCLUDE 'some_file' +# INCLUDE 'some_file' !; INCLUDE 'some_file' +# +# To get around the above limitation, the FORTRAN programmer could +# simply comment each INCLUDE statement separately, like this +# +# ! INCLUDE 'some_file' !; INCLUDE 'some_file' +# +# The way I see it, the only way to get around this limitation would +# be to modify the scanning logic to replace the calls to re.findall +# with a custom loop that processes each line separately, throwing +# away fully commented out lines before attempting to match against +# the INCLUDE syntax. +# +# Here is a breakdown of the regex: +# +# (?i) : regex is case insensitive +# (?: : begin a non-saving group that matches the following: +# ^ : either the start of the line +# | : or +# ['">]\s*; : a semicolon that follows a single quote, +# double quote or greater than symbol (with any +# amount of whitespace in between). This will +# allow the regex to match multiple INCLUDE +# statements per line (although it also requires +# the positive lookahead assertion that is +# used below). It will even properly deal with +# (i.e. ignore) cases in which the additional +# INCLUDES are part of an in-line comment, ala +# " INCLUDE 'someFile' ! ; INCLUDE 'someFile2' " +# ) : end of non-saving group +# \s* : any amount of white space +# INCLUDE : match the string INCLUDE, case insensitive +# \s+ : match one or more white space characters +# (?\w+_)? : match the optional "kind-param _" prefix allowed by the standard +# [<"'] : match the include delimiter - an apostrophe, double quote, or less than symbol +# (.+?) : match one or more characters that make up +# the included path and file name and save it +# in a group. The Fortran standard allows for +# any non-control character to be used. The dot +# operator will pick up any character, including +# control codes, but I can't conceive of anyone +# putting control codes in their file names. +# The question mark indicates it is non-greedy so +# that regex will match only up to the next quote, +# double quote, or greater than symbol +# (?=["'>]) : positive lookahead assertion to match the include +# delimiter - an apostrophe, double quote, or +# greater than symbol. This level of complexity +# is required so that the include delimiter is +# not consumed by the match, thus allowing the +# sub-regex discussed above to uniquely match a +# set of semicolon-separated INCLUDE statements +# (as allowed by the F2003 standard) + + include_regex = """(?i)(?:^|['">]\s*;)\s*INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])""" + +# The MODULE statement regex finds module definitions by matching +# the following: +# +# MODULE module_name +# +# but *not* the following: +# +# MODULE PROCEDURE procedure_name +# +# Here is a breakdown of the regex: +# +# (?i) : regex is case insensitive +# ^\s* : any amount of white space +# MODULE : match the string MODULE, case insensitive +# \s+ : match one or more white space characters +# (?!PROCEDURE) : but *don't* match if the next word matches +# PROCEDURE (negative lookahead assertion), +# case insensitive +# (\w+) : match one or more alphanumeric characters +# that make up the defined module name and +# save it in a group + + def_regex = """(?i)^\s*MODULE\s+(?!PROCEDURE)(\w+)""" + + scanner = F90Scanner("FortranScan", + "$FORTRANSUFFIXES", + path_variable, + use_regex, + include_regex, + def_regex) + return scanner + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Scanner/IDL.py b/engine/SCons/Scanner/IDL.py new file mode 100644 index 0000000..683e9bb --- /dev/null +++ b/engine/SCons/Scanner/IDL.py @@ -0,0 +1,48 @@ +"""SCons.Scanner.IDL + +This module implements the depenency scanner for IDL (Interface +Definition Language) files. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Scanner/IDL.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Node.FS +import SCons.Scanner + +def IDLScan(): + """Return a prototype Scanner instance for scanning IDL source files""" + cs = SCons.Scanner.ClassicCPP("IDLScan", + "$IDLSUFFIXES", + "CPPPATH", + '^[ \t]*(?:#[ \t]*include|[ \t]*import)[ \t]+(<|")([^>"]+)(>|")') + return cs + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Scanner/LaTeX.py b/engine/SCons/Scanner/LaTeX.py new file mode 100644 index 0000000..761a584 --- /dev/null +++ b/engine/SCons/Scanner/LaTeX.py @@ -0,0 +1,345 @@ +"""SCons.Scanner.LaTeX + +This module implements the dependency scanner for LaTeX code. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Scanner/LaTeX.py 4577 2009/12/27 19:43:56 scons" + +import os.path +import string +import re + +import SCons.Scanner +import SCons.Util + +# list of graphics file extensions for TeX and LaTeX +TexGraphics = ['.eps', '.ps'] +LatexGraphics = ['.pdf', '.png', '.jpg', '.gif', '.tif'] + +# Used as a return value of modify_env_var if the variable is not set. +class _Null: + pass +_null = _Null + +# The user specifies the paths in env[variable], similar to other builders. +# They may be relative and must be converted to absolute, as expected +# by LaTeX and Co. The environment may already have some paths in +# env['ENV'][var]. These paths are honored, but the env[var] paths have +# higher precedence. All changes are un-done on exit. +def modify_env_var(env, var, abspath): + try: + save = env['ENV'][var] + except KeyError: + save = _null + env.PrependENVPath(var, abspath) + try: + if SCons.Util.is_List(env[var]): + #TODO(1.5) + #env.PrependENVPath(var, [os.path.abspath(str(p)) for p in env[var]]) + env.PrependENVPath(var, map(lambda p: os.path.abspath(str(p)), env[var])) + else: + # Split at os.pathsep to convert into absolute path + #TODO(1.5) env.PrependENVPath(var, [os.path.abspath(p) for p in str(env[var]).split(os.pathsep)]) + env.PrependENVPath(var, map(lambda p: os.path.abspath(p), string.split(str(env[var]), os.pathsep))) + except KeyError: + pass + + # Convert into a string explicitly to append ":" (without which it won't search system + # paths as well). The problem is that env.AppendENVPath(var, ":") + # does not work, refuses to append ":" (os.pathsep). + + if SCons.Util.is_List(env['ENV'][var]): + # TODO(1.5) + #env['ENV'][var] = os.pathsep.join(env['ENV'][var]) + env['ENV'][var] = string.join(env['ENV'][var], os.pathsep) + # Append the trailing os.pathsep character here to catch the case with no env[var] + env['ENV'][var] = env['ENV'][var] + os.pathsep + + return save + +class FindENVPathDirs: + """A class to bind a specific *PATH variable name to a function that + will return all of the *path directories.""" + def __init__(self, variable): + self.variable = variable + def __call__(self, env, dir=None, target=None, source=None, argument=None): + import SCons.PathList + try: + path = env['ENV'][self.variable] + except KeyError: + return () + + dir = dir or env.fs._cwd + path = SCons.PathList.PathList(path).subst_path(env, target, source) + return tuple(dir.Rfindalldirs(path)) + + + +def LaTeXScanner(): + """Return a prototype Scanner instance for scanning LaTeX source files + when built with latex. + """ + ds = LaTeX(name = "LaTeXScanner", + suffixes = '$LATEXSUFFIXES', + # in the search order, see below in LaTeX class docstring + graphics_extensions = TexGraphics, + recursive = 0) + return ds + +def PDFLaTeXScanner(): + """Return a prototype Scanner instance for scanning LaTeX source files + when built with pdflatex. + """ + ds = LaTeX(name = "PDFLaTeXScanner", + suffixes = '$LATEXSUFFIXES', + # in the search order, see below in LaTeX class docstring + graphics_extensions = LatexGraphics, + recursive = 0) + return ds + +class LaTeX(SCons.Scanner.Base): + """Class for scanning LaTeX files for included files. + + Unlike most scanners, which use regular expressions that just + return the included file name, this returns a tuple consisting + of the keyword for the inclusion ("include", "includegraphics", + "input", or "bibliography"), and then the file name itself. + Based on a quick look at LaTeX documentation, it seems that we + should append .tex suffix for the "include" keywords, append .tex if + there is no extension for the "input" keyword, and need to add .bib + for the "bibliography" keyword that does not accept extensions by itself. + + Finally, if there is no extension for an "includegraphics" keyword + latex will append .ps or .eps to find the file, while pdftex may use .pdf, + .jpg, .tif, .mps, or .png. + + The actual subset and search order may be altered by + DeclareGraphicsExtensions command. This complication is ignored. + The default order corresponds to experimentation with teTeX + $ latex --version + pdfeTeX 3.141592-1.21a-2.2 (Web2C 7.5.4) + kpathsea version 3.5.4 + The order is: + ['.eps', '.ps'] for latex + ['.png', '.pdf', '.jpg', '.tif']. + + Another difference is that the search path is determined by the type + of the file being searched: + env['TEXINPUTS'] for "input" and "include" keywords + env['TEXINPUTS'] for "includegraphics" keyword + env['TEXINPUTS'] for "lstinputlisting" keyword + env['BIBINPUTS'] for "bibliography" keyword + env['BSTINPUTS'] for "bibliographystyle" keyword + + FIXME: also look for the class or style in document[class|style]{} + FIXME: also look for the argument of bibliographystyle{} + """ + keyword_paths = {'include': 'TEXINPUTS', + 'input': 'TEXINPUTS', + 'includegraphics': 'TEXINPUTS', + 'bibliography': 'BIBINPUTS', + 'bibliographystyle': 'BSTINPUTS', + 'usepackage': 'TEXINPUTS', + 'lstinputlisting': 'TEXINPUTS'} + env_variables = SCons.Util.unique(keyword_paths.values()) + + def __init__(self, name, suffixes, graphics_extensions, *args, **kw): + + # We have to include \n with the % we exclude from the first part + # part of the regex because the expression is compiled with re.M. + # Without the \n, the ^ could match the beginning of a *previous* + # line followed by one or more newline characters (i.e. blank + # lines), interfering with a match on the next line. + regex = r'^[^%\n]*\\(include|includegraphics(?:\[[^\]]+\])?|lstinputlisting(?:\[[^\]]+\])?|input|bibliography|usepackage){([^}]*)}' + self.cre = re.compile(regex, re.M) + self.graphics_extensions = graphics_extensions + + def _scan(node, env, path=(), self=self): + node = node.rfile() + if not node.exists(): + return [] + return self.scan(node, path) + + class FindMultiPathDirs: + """The stock FindPathDirs function has the wrong granularity: + it is called once per target, while we need the path that depends + on what kind of included files is being searched. This wrapper + hides multiple instances of FindPathDirs, one per the LaTeX path + variable in the environment. When invoked, the function calculates + and returns all the required paths as a dictionary (converted into + a tuple to become hashable). Then the scan function converts it + back and uses a dictionary of tuples rather than a single tuple + of paths. + """ + def __init__(self, dictionary): + self.dictionary = {} + for k,n in dictionary.items(): + self.dictionary[k] = ( SCons.Scanner.FindPathDirs(n), + FindENVPathDirs(n) ) + + def __call__(self, env, dir=None, target=None, source=None, + argument=None): + di = {} + for k,(c,cENV) in self.dictionary.items(): + di[k] = ( c(env, dir=None, target=None, source=None, + argument=None) , + cENV(env, dir=None, target=None, source=None, + argument=None) ) + # To prevent "dict is not hashable error" + return tuple(di.items()) + + class LaTeXScanCheck: + """Skip all but LaTeX source files, i.e., do not scan *.eps, + *.pdf, *.jpg, etc. + """ + def __init__(self, suffixes): + self.suffixes = suffixes + def __call__(self, node, env): + current = not node.has_builder() or node.is_up_to_date() + scannable = node.get_suffix() in env.subst_list(self.suffixes)[0] + # Returning false means that the file is not scanned. + return scannable and current + + kw['function'] = _scan + kw['path_function'] = FindMultiPathDirs(LaTeX.keyword_paths) + kw['recursive'] = 1 + kw['skeys'] = suffixes + kw['scan_check'] = LaTeXScanCheck(suffixes) + kw['name'] = name + + apply(SCons.Scanner.Base.__init__, (self,) + args, kw) + + def _latex_names(self, include): + filename = include[1] + if include[0] == 'input': + base, ext = os.path.splitext( filename ) + if ext == "": + return [filename + '.tex'] + if (include[0] == 'include'): + return [filename + '.tex'] + if include[0] == 'bibliography': + base, ext = os.path.splitext( filename ) + if ext == "": + return [filename + '.bib'] + if include[0] == 'usepackage': + base, ext = os.path.splitext( filename ) + if ext == "": + return [filename + '.sty'] + if include[0] == 'includegraphics': + base, ext = os.path.splitext( filename ) + if ext == "": + #TODO(1.5) return [filename + e for e in self.graphics_extensions] + #return map(lambda e, f=filename: f+e, self.graphics_extensions + TexGraphics) + # use the line above to find dependency for PDF builder when only .eps figure is present + # Since it will be found if the user tell scons how to make the pdf figure leave it out for now. + return map(lambda e, f=filename: f+e, self.graphics_extensions) + return [filename] + + def sort_key(self, include): + return SCons.Node.FS._my_normcase(str(include)) + + def find_include(self, include, source_dir, path): + try: + sub_path = path[include[0]] + except (IndexError, KeyError): + sub_path = () + try_names = self._latex_names(include) + for n in try_names: + # see if we find it using the path in env[var] + i = SCons.Node.FS.find_file(n, (source_dir,) + sub_path[0]) + if i: + return i, include + # see if we find it using the path in env['ENV'][var] + i = SCons.Node.FS.find_file(n, (source_dir,) + sub_path[1]) + if i: + return i, include + return i, include + + def scan(self, node, path=()): + # Modify the default scan function to allow for the regular + # expression to return a comma separated list of file names + # as can be the case with the bibliography keyword. + + # Cache the includes list in node so we only scan it once: + path_dict = dict(list(path)) + noopt_cre = re.compile('\[.*$') + if node.includes != None: + includes = node.includes + else: + includes = self.cre.findall(node.get_text_contents()) + # 1. Split comma-separated lines, e.g. + # ('bibliography', 'phys,comp') + # should become two entries + # ('bibliography', 'phys') + # ('bibliography', 'comp') + # 2. Remove the options, e.g., such as + # ('includegraphics[clip,width=0.7\\linewidth]', 'picture.eps') + # should become + # ('includegraphics', 'picture.eps') + split_includes = [] + for include in includes: + inc_type = noopt_cre.sub('', include[0]) + inc_list = string.split(include[1],',') + for j in range(len(inc_list)): + split_includes.append( (inc_type, inc_list[j]) ) + # + includes = split_includes + node.includes = includes + + # This is a hand-coded DSU (decorate-sort-undecorate, or + # Schwartzian transform) pattern. The sort key is the raw name + # of the file as specifed on the \include, \input, etc. line. + # TODO: what about the comment in the original Classic scanner: + # """which lets + # us keep the sort order constant regardless of whether the file + # is actually found in a Repository or locally.""" + nodes = [] + source_dir = node.get_dir() + for include in includes: + # + # Handle multiple filenames in include[1] + # + n, i = self.find_include(include, source_dir, path_dict) + if n is None: + # Do not bother with 'usepackage' warnings, as they most + # likely refer to system-level files + if include[0] != 'usepackage': + SCons.Warnings.warn(SCons.Warnings.DependencyWarning, + "No dependency generated for file: %s (included from: %s) -- file not found" % (i, node)) + else: + sortkey = self.sort_key(n) + nodes.append((sortkey, n)) + # + nodes.sort() + nodes = map(lambda pair: pair[1], nodes) + return nodes + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Scanner/Prog.py b/engine/SCons/Scanner/Prog.py new file mode 100644 index 0000000..d0ba16b --- /dev/null +++ b/engine/SCons/Scanner/Prog.py @@ -0,0 +1,103 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Scanner/Prog.py 4577 2009/12/27 19:43:56 scons" + +import string + +import SCons.Node +import SCons.Node.FS +import SCons.Scanner +import SCons.Util + +# global, set by --debug=findlibs +print_find_libs = None + +def ProgramScanner(**kw): + """Return a prototype Scanner instance for scanning executable + files for static-lib dependencies""" + kw['path_function'] = SCons.Scanner.FindPathDirs('LIBPATH') + ps = apply(SCons.Scanner.Base, [scan, "ProgramScanner"], kw) + return ps + +def scan(node, env, libpath = ()): + """ + This scanner scans program files for static-library + dependencies. It will search the LIBPATH environment variable + for libraries specified in the LIBS variable, returning any + files it finds as dependencies. + """ + try: + libs = env['LIBS'] + except KeyError: + # There are no LIBS in this environment, so just return a null list: + return [] + if SCons.Util.is_String(libs): + libs = string.split(libs) + else: + libs = SCons.Util.flatten(libs) + + try: + prefix = env['LIBPREFIXES'] + if not SCons.Util.is_List(prefix): + prefix = [ prefix ] + except KeyError: + prefix = [ '' ] + + try: + suffix = env['LIBSUFFIXES'] + if not SCons.Util.is_List(suffix): + suffix = [ suffix ] + except KeyError: + suffix = [ '' ] + + pairs = [] + for suf in map(env.subst, suffix): + for pref in map(env.subst, prefix): + pairs.append((pref, suf)) + + result = [] + + if callable(libpath): + libpath = libpath() + + find_file = SCons.Node.FS.find_file + adjustixes = SCons.Util.adjustixes + for lib in libs: + if SCons.Util.is_String(lib): + lib = env.subst(lib) + for pref, suf in pairs: + l = adjustixes(lib, pref, suf) + l = find_file(l, libpath, verbose=print_find_libs) + if l: + result.append(l) + else: + result.append(lib) + + return result + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Scanner/RC.py b/engine/SCons/Scanner/RC.py new file mode 100644 index 0000000..243de3c --- /dev/null +++ b/engine/SCons/Scanner/RC.py @@ -0,0 +1,55 @@ +"""SCons.Scanner.RC + +This module implements the depenency scanner for RC (Interface +Definition Language) files. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Scanner/RC.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Node.FS +import SCons.Scanner +import re + +def RCScan(): + """Return a prototype Scanner instance for scanning RC source files""" + + res_re= r'^(?:\s*#\s*(?:include)|' \ + '.*?\s+(?:ICON|BITMAP|CURSOR|HTML|FONT|MESSAGETABLE|TYPELIB|REGISTRY|D3DFX)' \ + '\s*.*?)' \ + '\s*(<|"| )([^>"\s]+)(?:[>" ])*$' + resScanner = SCons.Scanner.ClassicCPP( "ResourceScanner", + "$RCSUFFIXES", + "CPPPATH", + res_re ) + + return resScanner + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Scanner/__init__.py b/engine/SCons/Scanner/__init__.py new file mode 100644 index 0000000..7e72504 --- /dev/null +++ b/engine/SCons/Scanner/__init__.py @@ -0,0 +1,415 @@ +"""SCons.Scanner + +The Scanner package for the SCons software construction utility. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Scanner/__init__.py 4577 2009/12/27 19:43:56 scons" + +import re +import string + +import SCons.Node.FS +import SCons.Util + + +class _Null: + pass + +# This is used instead of None as a default argument value so None can be +# used as an actual argument value. +_null = _Null + +def Scanner(function, *args, **kw): + """ + Public interface factory function for creating different types + of Scanners based on the different types of "functions" that may + be supplied. + + TODO: Deprecate this some day. We've moved the functionality + inside the Base class and really don't need this factory function + any more. It was, however, used by some of our Tool modules, so + the call probably ended up in various people's custom modules + patterned on SCons code. + """ + if SCons.Util.is_Dict(function): + return apply(Selector, (function,) + args, kw) + else: + return apply(Base, (function,) + args, kw) + + + +class FindPathDirs: + """A class to bind a specific *PATH variable name to a function that + will return all of the *path directories.""" + def __init__(self, variable): + self.variable = variable + def __call__(self, env, dir=None, target=None, source=None, argument=None): + import SCons.PathList + try: + path = env[self.variable] + except KeyError: + return () + + dir = dir or env.fs._cwd + path = SCons.PathList.PathList(path).subst_path(env, target, source) + return tuple(dir.Rfindalldirs(path)) + + + +class Base: + """ + The base class for dependency scanners. This implements + straightforward, single-pass scanning of a single file. + """ + + def __init__(self, + function, + name = "NONE", + argument = _null, + skeys = _null, + path_function = None, + node_class = SCons.Node.FS.Entry, + node_factory = None, + scan_check = None, + recursive = None): + """ + Construct a new scanner object given a scanner function. + + 'function' - a scanner function taking two or three + arguments and returning a list of strings. + + 'name' - a name for identifying this scanner object. + + 'argument' - an optional argument that, if specified, will be + passed to both the scanner function and the path_function. + + 'skeys' - an optional list argument that can be used to determine + which scanner should be used for a given Node. In the case of File + nodes, for example, the 'skeys' would be file suffixes. + + 'path_function' - a function that takes four or five arguments + (a construction environment, Node for the directory containing + the SConscript file that defined the primary target, list of + target nodes, list of source nodes, and optional argument for + this instance) and returns a tuple of the directories that can + be searched for implicit dependency files. May also return a + callable() which is called with no args and returns the tuple + (supporting Bindable class). + + 'node_class' - the class of Nodes which this scan will return. + If node_class is None, then this scanner will not enforce any + Node conversion and will return the raw results from the + underlying scanner function. + + 'node_factory' - the factory function to be called to translate + the raw results returned by the scanner function into the + expected node_class objects. + + 'scan_check' - a function to be called to first check whether + this node really needs to be scanned. + + 'recursive' - specifies that this scanner should be invoked + recursively on all of the implicit dependencies it returns + (the canonical example being #include lines in C source files). + May be a callable, which will be called to filter the list + of nodes found to select a subset for recursive scanning + (the canonical example being only recursively scanning + subdirectories within a directory). + + The scanner function's first argument will be a Node that should + be scanned for dependencies, the second argument will be an + Environment object, the third argument will be the tuple of paths + returned by the path_function, and the fourth argument will be + the value passed into 'argument', and the returned list should + contain the Nodes for all the direct dependencies of the file. + + Examples: + + s = Scanner(my_scanner_function) + + s = Scanner(function = my_scanner_function) + + s = Scanner(function = my_scanner_function, argument = 'foo') + + """ + + # Note: this class could easily work with scanner functions that take + # something other than a filename as an argument (e.g. a database + # node) and a dependencies list that aren't file names. All that + # would need to be changed is the documentation. + + self.function = function + self.path_function = path_function + self.name = name + self.argument = argument + + if skeys is _null: + if SCons.Util.is_Dict(function): + skeys = function.keys() + else: + skeys = [] + self.skeys = skeys + + self.node_class = node_class + self.node_factory = node_factory + self.scan_check = scan_check + if callable(recursive): + self.recurse_nodes = recursive + elif recursive: + self.recurse_nodes = self._recurse_all_nodes + else: + self.recurse_nodes = self._recurse_no_nodes + + def path(self, env, dir=None, target=None, source=None): + if not self.path_function: + return () + if not self.argument is _null: + return self.path_function(env, dir, target, source, self.argument) + else: + return self.path_function(env, dir, target, source) + + def __call__(self, node, env, path = ()): + """ + This method scans a single object. 'node' is the node + that will be passed to the scanner function, and 'env' is the + environment that will be passed to the scanner function. A list of + direct dependency nodes for the specified node will be returned. + """ + if self.scan_check and not self.scan_check(node, env): + return [] + + self = self.select(node) + + if not self.argument is _null: + list = self.function(node, env, path, self.argument) + else: + list = self.function(node, env, path) + + kw = {} + if hasattr(node, 'dir'): + kw['directory'] = node.dir + node_factory = env.get_factory(self.node_factory) + nodes = [] + for l in list: + if self.node_class and not isinstance(l, self.node_class): + l = apply(node_factory, (l,), kw) + nodes.append(l) + return nodes + + def __cmp__(self, other): + try: + return cmp(self.__dict__, other.__dict__) + except AttributeError: + # other probably doesn't have a __dict__ + return cmp(self.__dict__, other) + + def __hash__(self): + return id(self) + + def __str__(self): + return self.name + + def add_skey(self, skey): + """Add a skey to the list of skeys""" + self.skeys.append(skey) + + def get_skeys(self, env=None): + if env and SCons.Util.is_String(self.skeys): + return env.subst_list(self.skeys)[0] + return self.skeys + + def select(self, node): + if SCons.Util.is_Dict(self.function): + key = node.scanner_key() + try: + return self.function[key] + except KeyError: + return None + else: + return self + + def _recurse_all_nodes(self, nodes): + return nodes + + def _recurse_no_nodes(self, nodes): + return [] + + recurse_nodes = _recurse_no_nodes + + def add_scanner(self, skey, scanner): + self.function[skey] = scanner + self.add_skey(skey) + + +class Selector(Base): + """ + A class for selecting a more specific scanner based on the + scanner_key() (suffix) for a specific Node. + + TODO: This functionality has been moved into the inner workings of + the Base class, and this class will be deprecated at some point. + (It was never exposed directly as part of the public interface, + although it is used by the Scanner() factory function that was + used by various Tool modules and therefore was likely a template + for custom modules that may be out there.) + """ + def __init__(self, dict, *args, **kw): + apply(Base.__init__, (self, None,)+args, kw) + self.dict = dict + self.skeys = dict.keys() + + def __call__(self, node, env, path = ()): + return self.select(node)(node, env, path) + + def select(self, node): + try: + return self.dict[node.scanner_key()] + except KeyError: + return None + + def add_scanner(self, skey, scanner): + self.dict[skey] = scanner + self.add_skey(skey) + + +class Current(Base): + """ + A class for scanning files that are source files (have no builder) + or are derived files and are current (which implies that they exist, + either locally or in a repository). + """ + + def __init__(self, *args, **kw): + def current_check(node, env): + return not node.has_builder() or node.is_up_to_date() + kw['scan_check'] = current_check + apply(Base.__init__, (self,) + args, kw) + +class Classic(Current): + """ + A Scanner subclass to contain the common logic for classic CPP-style + include scanning, but which can be customized to use different + regular expressions to find the includes. + + Note that in order for this to work "out of the box" (without + overriding the find_include() and sort_key() methods), the regular + expression passed to the constructor must return the name of the + include file in group 0. + """ + + def __init__(self, name, suffixes, path_variable, regex, *args, **kw): + + self.cre = re.compile(regex, re.M) + + def _scan(node, env, path=(), self=self): + node = node.rfile() + if not node.exists(): + return [] + return self.scan(node, path) + + kw['function'] = _scan + kw['path_function'] = FindPathDirs(path_variable) + kw['recursive'] = 1 + kw['skeys'] = suffixes + kw['name'] = name + + apply(Current.__init__, (self,) + args, kw) + + def find_include(self, include, source_dir, path): + n = SCons.Node.FS.find_file(include, (source_dir,) + tuple(path)) + return n, include + + def sort_key(self, include): + return SCons.Node.FS._my_normcase(include) + + def find_include_names(self, node): + return self.cre.findall(node.get_text_contents()) + + def scan(self, node, path=()): + + # cache the includes list in node so we only scan it once: + if node.includes is not None: + includes = node.includes + else: + includes = self.find_include_names (node) + # Intern the names of the include files. Saves some memory + # if the same header is included many times. + node.includes = map(SCons.Util.silent_intern, includes) + + # This is a hand-coded DSU (decorate-sort-undecorate, or + # Schwartzian transform) pattern. The sort key is the raw name + # of the file as specifed on the #include line (including the + # " or <, since that may affect what file is found), which lets + # us keep the sort order constant regardless of whether the file + # is actually found in a Repository or locally. + nodes = [] + source_dir = node.get_dir() + if callable(path): + path = path() + for include in includes: + n, i = self.find_include(include, source_dir, path) + + if n is None: + SCons.Warnings.warn(SCons.Warnings.DependencyWarning, + "No dependency generated for file: %s (included from: %s) -- file not found" % (i, node)) + else: + sortkey = self.sort_key(include) + nodes.append((sortkey, n)) + + nodes.sort() + nodes = map(lambda pair: pair[1], nodes) + return nodes + +class ClassicCPP(Classic): + """ + A Classic Scanner subclass which takes into account the type of + bracketing used to include the file, and uses classic CPP rules + for searching for the files based on the bracketing. + + Note that in order for this to work, the regular expression passed + to the constructor must return the leading bracket in group 0, and + the contained filename in group 1. + """ + def find_include(self, include, source_dir, path): + if include[0] == '"': + paths = (source_dir,) + tuple(path) + else: + paths = tuple(path) + (source_dir,) + + n = SCons.Node.FS.find_file(include[1], paths) + + i = SCons.Util.silent_intern(include[1]) + return n, i + + def sort_key(self, include): + return SCons.Node.FS._my_normcase(string.join(include)) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Script/Interactive.py b/engine/SCons/Script/Interactive.py new file mode 100644 index 0000000..8c27c87 --- /dev/null +++ b/engine/SCons/Script/Interactive.py @@ -0,0 +1,386 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Script/Interactive.py 4577 2009/12/27 19:43:56 scons" + +__doc__ = """ +SCons interactive mode +""" + +# TODO: +# +# This has the potential to grow into something with a really big life +# of its own, which might or might not be a good thing. Nevertheless, +# here are some enhancements that will probably be requested some day +# and are worth keeping in mind (assuming this takes off): +# +# - A command to re-read / re-load the SConscript files. This may +# involve allowing people to specify command-line options (e.g. -f, +# -I, --no-site-dir) that affect how the SConscript files are read. +# +# - Additional command-line options on the "build" command. +# +# Of the supported options that seemed to make sense (after a quick +# pass through the list), the ones that seemed likely enough to be +# used are listed in the man page and have explicit test scripts. +# +# These had code changed in Script/Main.py to support them, but didn't +# seem likely to be used regularly, so had no test scripts added: +# +# build --diskcheck=* +# build --implicit-cache=* +# build --implicit-deps-changed=* +# build --implicit-deps-unchanged=* +# +# These look like they should "just work" with no changes to the +# existing code, but like those above, look unlikely to be used and +# therefore had no test scripts added: +# +# build --random +# +# These I'm not sure about. They might be useful for individual +# "build" commands, and may even work, but they seem unlikely enough +# that we'll wait until they're requested before spending any time on +# writing test scripts for them, or investigating whether they work. +# +# build -q [??? is there a useful analog to the exit status?] +# build --duplicate= +# build --profile= +# build --max-drift= +# build --warn=* +# build --Y +# +# - Most of the SCons command-line options that the "build" command +# supports should be settable as default options that apply to all +# subsequent "build" commands. Maybe a "set {option}" command that +# maps to "SetOption('{option}')". +# +# - Need something in the 'help' command that prints the -h output. +# +# - A command to run the configure subsystem separately (must see how +# this interacts with the new automake model). +# +# - Command-line completion of target names; maybe even of SCons options? +# Completion is something that's supported by the Python cmd module, +# so this should be doable without too much trouble. +# + +import cmd +import copy +import os +import re +import shlex +import string +import sys + +try: + import readline +except ImportError: + pass + +class SConsInteractiveCmd(cmd.Cmd): + """\ + build [TARGETS] Build the specified TARGETS and their dependencies. + 'b' is a synonym. + clean [TARGETS] Clean (remove) the specified TARGETS and their + dependencies. 'c' is a synonym. + exit Exit SCons interactive mode. + help [COMMAND] Prints help for the specified COMMAND. 'h' and + '?' are synonyms. + shell [COMMANDLINE] Execute COMMANDLINE in a subshell. 'sh' and '!' + are synonyms. + version Prints SCons version information. + """ + + synonyms = { + 'b' : 'build', + 'c' : 'clean', + 'h' : 'help', + 'scons' : 'build', + 'sh' : 'shell', + } + + def __init__(self, **kw): + cmd.Cmd.__init__(self) + for key, val in kw.items(): + setattr(self, key, val) + + if sys.platform == 'win32': + self.shell_variable = 'COMSPEC' + else: + self.shell_variable = 'SHELL' + + def default(self, argv): + print "*** Unknown command: %s" % argv[0] + + def onecmd(self, line): + line = string.strip(line) + if not line: + print self.lastcmd + return self.emptyline() + self.lastcmd = line + if line[0] == '!': + line = 'shell ' + line[1:] + elif line[0] == '?': + line = 'help ' + line[1:] + if os.sep == '\\': + line = string.replace(line, '\\', '\\\\') + argv = shlex.split(line) + argv[0] = self.synonyms.get(argv[0], argv[0]) + if not argv[0]: + return self.default(line) + else: + try: + func = getattr(self, 'do_' + argv[0]) + except AttributeError: + return self.default(argv) + return func(argv) + + def do_build(self, argv): + """\ + build [TARGETS] Build the specified TARGETS and their + dependencies. 'b' is a synonym. + """ + import SCons.Node + import SCons.SConsign + import SCons.Script.Main + + options = copy.deepcopy(self.options) + + options, targets = self.parser.parse_args(argv[1:], values=options) + + SCons.Script.COMMAND_LINE_TARGETS = targets + + if targets: + SCons.Script.BUILD_TARGETS = targets + else: + # If the user didn't specify any targets on the command line, + # use the list of default targets. + SCons.Script.BUILD_TARGETS = SCons.Script._build_plus_default + + nodes = SCons.Script.Main._build_targets(self.fs, + options, + targets, + self.target_top) + + if not nodes: + return + + # Call each of the Node's alter_targets() methods, which may + # provide additional targets that ended up as part of the build + # (the canonical example being a VariantDir() when we're building + # from a source directory) and which we therefore need their + # state cleared, too. + x = [] + for n in nodes: + x.extend(n.alter_targets()[0]) + nodes.extend(x) + + # Clean up so that we can perform the next build correctly. + # + # We do this by walking over all the children of the targets, + # and clearing their state. + # + # We currently have to re-scan each node to find their + # children, because built nodes have already been partially + # cleared and don't remember their children. (In scons + # 0.96.1 and earlier, this wasn't the case, and we didn't + # have to re-scan the nodes.) + # + # Because we have to re-scan each node, we can't clear the + # nodes as we walk over them, because we may end up rescanning + # a cleared node as we scan a later node. Therefore, only + # store the list of nodes that need to be cleared as we walk + # the tree, and clear them in a separate pass. + # + # XXX: Someone more familiar with the inner workings of scons + # may be able to point out a more efficient way to do this. + + SCons.Script.Main.progress_display("scons: Clearing cached node information ...") + + seen_nodes = {} + + def get_unseen_children(node, parent, seen_nodes=seen_nodes): + def is_unseen(node, seen_nodes=seen_nodes): + return not seen_nodes.has_key(node) + return filter(is_unseen, node.children(scan=1)) + + def add_to_seen_nodes(node, parent, seen_nodes=seen_nodes): + seen_nodes[node] = 1 + + # If this file is in a VariantDir and has a + # corresponding source file in the source tree, remember the + # node in the source tree, too. This is needed in + # particular to clear cached implicit dependencies on the + # source file, since the scanner will scan it if the + # VariantDir was created with duplicate=0. + try: + rfile_method = node.rfile + except AttributeError: + return + else: + rfile = rfile_method() + if rfile != node: + seen_nodes[rfile] = 1 + + for node in nodes: + walker = SCons.Node.Walker(node, + kids_func=get_unseen_children, + eval_func=add_to_seen_nodes) + n = walker.next() + while n: + n = walker.next() + + for node in seen_nodes.keys(): + # Call node.clear() to clear most of the state + node.clear() + # node.clear() doesn't reset node.state, so call + # node.set_state() to reset it manually + node.set_state(SCons.Node.no_state) + node.implicit = None + + # Debug: Uncomment to verify that all Taskmaster reference + # counts have been reset to zero. + #if node.ref_count != 0: + # from SCons.Debug import Trace + # Trace('node %s, ref_count %s !!!\n' % (node, node.ref_count)) + + SCons.SConsign.Reset() + SCons.Script.Main.progress_display("scons: done clearing node information.") + + def do_clean(self, argv): + """\ + clean [TARGETS] Clean (remove) the specified TARGETS + and their dependencies. 'c' is a synonym. + """ + return self.do_build(['build', '--clean'] + argv[1:]) + + def do_EOF(self, argv): + print + self.do_exit(argv) + + def _do_one_help(self, arg): + try: + # If help_<arg>() exists, then call it. + func = getattr(self, 'help_' + arg) + except AttributeError: + try: + func = getattr(self, 'do_' + arg) + except AttributeError: + doc = None + else: + doc = self._doc_to_help(func) + if doc: + sys.stdout.write(doc + '\n') + sys.stdout.flush() + else: + doc = self.strip_initial_spaces(func()) + if doc: + sys.stdout.write(doc + '\n') + sys.stdout.flush() + + def _doc_to_help(self, obj): + doc = obj.__doc__ + if doc is None: + return '' + return self._strip_initial_spaces(doc) + + def _strip_initial_spaces(self, s): + #lines = s.split('\n') + lines = string.split(s, '\n') + spaces = re.match(' *', lines[0]).group(0) + #def strip_spaces(l): + # if l.startswith(spaces): + # l = l[len(spaces):] + # return l + #return '\n'.join([ strip_spaces(l) for l in lines ]) + def strip_spaces(l, spaces=spaces): + if l[:len(spaces)] == spaces: + l = l[len(spaces):] + return l + lines = map(strip_spaces, lines) + return string.join(lines, '\n') + + def do_exit(self, argv): + """\ + exit Exit SCons interactive mode. + """ + sys.exit(0) + + def do_help(self, argv): + """\ + help [COMMAND] Prints help for the specified COMMAND. 'h' + and '?' are synonyms. + """ + if argv[1:]: + for arg in argv[1:]: + if self._do_one_help(arg): + break + else: + # If bare 'help' is called, print this class's doc + # string (if it has one). + doc = self._doc_to_help(self.__class__) + if doc: + sys.stdout.write(doc + '\n') + sys.stdout.flush() + + def do_shell(self, argv): + """\ + shell [COMMANDLINE] Execute COMMANDLINE in a subshell. 'sh' and + '!' are synonyms. + """ + import subprocess + argv = argv[1:] + if not argv: + argv = os.environ[self.shell_variable] + try: + # Per "[Python-Dev] subprocess insufficiently platform-independent?" + # http://mail.python.org/pipermail/python-dev/2008-August/081979.html "+ + # Doing the right thing with an argument list currently + # requires different shell= values on Windows and Linux. + p = subprocess.Popen(argv, shell=(sys.platform=='win32')) + except EnvironmentError, e: + sys.stderr.write('scons: %s: %s\n' % (argv[0], e.strerror)) + else: + p.wait() + + def do_version(self, argv): + """\ + version Prints SCons version information. + """ + sys.stdout.write(self.parser.version + '\n') + +def interact(fs, parser, options, targets, target_top): + c = SConsInteractiveCmd(prompt = 'scons>>> ', + fs = fs, + parser = parser, + options = options, + targets = targets, + target_top = target_top) + c.cmdloop() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Script/Main.py b/engine/SCons/Script/Main.py new file mode 100644 index 0000000..0d63b10 --- /dev/null +++ b/engine/SCons/Script/Main.py @@ -0,0 +1,1360 @@ +"""SCons.Script + +This file implements the main() function used by the scons script. + +Architecturally, this *is* the scons script, and will likely only be +called from the external "scons" wrapper. Consequently, anything here +should not be, or be considered, part of the build engine. If it's +something that we expect other software to want to use, it should go in +some other module. If it's specific to the "scons" script invocation, +it goes here. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Script/Main.py 4577 2009/12/27 19:43:56 scons" + +import os +import os.path +import string +import sys +import time +import traceback + +# Strip the script directory from sys.path() so on case-insensitive +# (Windows) systems Python doesn't think that the "scons" script is the +# "SCons" package. Replace it with our own version directory so, if +# if they're there, we pick up the right version of the build engine +# modules. +#sys.path = [os.path.join(sys.prefix, +# 'lib', +# 'scons-%d' % SCons.__version__)] + sys.path[1:] + +import SCons.CacheDir +import SCons.Debug +import SCons.Defaults +import SCons.Environment +import SCons.Errors +import SCons.Job +import SCons.Node +import SCons.Node.FS +import SCons.SConf +import SCons.Script +import SCons.Taskmaster +import SCons.Util +import SCons.Warnings + +import SCons.Script.Interactive + +def fetch_win32_parallel_msg(): + # A subsidiary function that exists solely to isolate this import + # so we don't have to pull it in on all platforms, and so that an + # in-line "import" statement in the _main() function below doesn't + # cause warnings about local names shadowing use of the 'SCons' + # globl in nest scopes and UnboundLocalErrors and the like in some + # versions (2.1) of Python. + import SCons.Platform.win32 + return SCons.Platform.win32.parallel_msg + +# + +class SConsPrintHelpException(Exception): + pass + +display = SCons.Util.display +progress_display = SCons.Util.DisplayEngine() + +first_command_start = None +last_command_end = None + +class Progressor: + prev = '' + count = 0 + target_string = '$TARGET' + + def __init__(self, obj, interval=1, file=None, overwrite=False): + if file is None: + file = sys.stdout + + self.obj = obj + self.file = file + self.interval = interval + self.overwrite = overwrite + + if callable(obj): + self.func = obj + elif SCons.Util.is_List(obj): + self.func = self.spinner + elif string.find(obj, self.target_string) != -1: + self.func = self.replace_string + else: + self.func = self.string + + def write(self, s): + self.file.write(s) + self.file.flush() + self.prev = s + + def erase_previous(self): + if self.prev: + length = len(self.prev) + if self.prev[-1] in ('\n', '\r'): + length = length - 1 + self.write(' ' * length + '\r') + self.prev = '' + + def spinner(self, node): + self.write(self.obj[self.count % len(self.obj)]) + + def string(self, node): + self.write(self.obj) + + def replace_string(self, node): + self.write(string.replace(self.obj, self.target_string, str(node))) + + def __call__(self, node): + self.count = self.count + 1 + if (self.count % self.interval) == 0: + if self.overwrite: + self.erase_previous() + self.func(node) + +ProgressObject = SCons.Util.Null() + +def Progress(*args, **kw): + global ProgressObject + ProgressObject = apply(Progressor, args, kw) + +# Task control. +# + +_BuildFailures = [] + +def GetBuildFailures(): + return _BuildFailures + +class BuildTask(SCons.Taskmaster.OutOfDateTask): + """An SCons build task.""" + progress = ProgressObject + + def display(self, message): + display('scons: ' + message) + + def prepare(self): + self.progress(self.targets[0]) + return SCons.Taskmaster.OutOfDateTask.prepare(self) + + def needs_execute(self): + if SCons.Taskmaster.OutOfDateTask.needs_execute(self): + return True + if self.top and self.targets[0].has_builder(): + display("scons: `%s' is up to date." % str(self.node)) + return False + + def execute(self): + if print_time: + start_time = time.time() + global first_command_start + if first_command_start is None: + first_command_start = start_time + SCons.Taskmaster.OutOfDateTask.execute(self) + if print_time: + global cumulative_command_time + global last_command_end + finish_time = time.time() + last_command_end = finish_time + cumulative_command_time = cumulative_command_time+finish_time-start_time + sys.stdout.write("Command execution time: %f seconds\n"%(finish_time-start_time)) + + def do_failed(self, status=2): + _BuildFailures.append(self.exception[1]) + global exit_status + global this_build_status + if self.options.ignore_errors: + SCons.Taskmaster.OutOfDateTask.executed(self) + elif self.options.keep_going: + SCons.Taskmaster.OutOfDateTask.fail_continue(self) + exit_status = status + this_build_status = status + else: + SCons.Taskmaster.OutOfDateTask.fail_stop(self) + exit_status = status + this_build_status = status + + def executed(self): + t = self.targets[0] + if self.top and not t.has_builder() and not t.side_effect: + if not t.exists(): + def classname(obj): + return string.split(str(obj.__class__), '.')[-1] + if classname(t) in ('File', 'Dir', 'Entry'): + errstr="Do not know how to make %s target `%s' (%s)." % (classname(t), t, t.abspath) + else: # Alias or Python or ... + errstr="Do not know how to make %s target `%s'." % (classname(t), t) + sys.stderr.write("scons: *** " + errstr) + if not self.options.keep_going: + sys.stderr.write(" Stop.") + sys.stderr.write("\n") + try: + raise SCons.Errors.BuildError(t, errstr) + except KeyboardInterrupt: + raise + except: + self.exception_set() + self.do_failed() + else: + print "scons: Nothing to be done for `%s'." % t + SCons.Taskmaster.OutOfDateTask.executed(self) + else: + SCons.Taskmaster.OutOfDateTask.executed(self) + + def failed(self): + # Handle the failure of a build task. The primary purpose here + # is to display the various types of Errors and Exceptions + # appropriately. + exc_info = self.exc_info() + try: + t, e, tb = exc_info + except ValueError: + t, e = exc_info + tb = None + + if t is None: + # The Taskmaster didn't record an exception for this Task; + # see if the sys module has one. + try: + t, e, tb = sys.exc_info()[:] + except ValueError: + t, e = exc_info + tb = None + + # Deprecated string exceptions will have their string stored + # in the first entry of the tuple. + if e is None: + e = t + + buildError = SCons.Errors.convert_to_BuildError(e) + if not buildError.node: + buildError.node = self.node + + node = buildError.node + if not SCons.Util.is_List(node): + node = [ node ] + nodename = string.join(map(str, node), ', ') + + errfmt = "scons: *** [%s] %s\n" + sys.stderr.write(errfmt % (nodename, buildError)) + + if (buildError.exc_info[2] and buildError.exc_info[1] and + # TODO(1.5) + #not isinstance( + # buildError.exc_info[1], + # (EnvironmentError, SCons.Errors.StopError, SCons.Errors.UserError))): + not isinstance(buildError.exc_info[1], EnvironmentError) and + not isinstance(buildError.exc_info[1], SCons.Errors.StopError) and + not isinstance(buildError.exc_info[1], SCons.Errors.UserError)): + type, value, trace = buildError.exc_info + traceback.print_exception(type, value, trace) + elif tb and print_stacktrace: + sys.stderr.write("scons: internal stack trace:\n") + traceback.print_tb(tb, file=sys.stderr) + + self.exception = (e, buildError, tb) # type, value, traceback + self.do_failed(buildError.exitstatus) + + self.exc_clear() + + def postprocess(self): + if self.top: + t = self.targets[0] + for tp in self.options.tree_printers: + tp.display(t) + if self.options.debug_includes: + tree = t.render_include_tree() + if tree: + print + print tree + SCons.Taskmaster.OutOfDateTask.postprocess(self) + + def make_ready(self): + """Make a task ready for execution""" + SCons.Taskmaster.OutOfDateTask.make_ready(self) + if self.out_of_date and self.options.debug_explain: + explanation = self.out_of_date[0].explain() + if explanation: + sys.stdout.write("scons: " + explanation) + +class CleanTask(SCons.Taskmaster.AlwaysTask): + """An SCons clean task.""" + def fs_delete(self, path, pathstr, remove=1): + try: + if os.path.lexists(path): + if os.path.isfile(path) or os.path.islink(path): + if remove: os.unlink(path) + display("Removed " + pathstr) + elif os.path.isdir(path) and not os.path.islink(path): + # delete everything in the dir + entries = os.listdir(path) + # Sort for deterministic output (os.listdir() Can + # return entries in a random order). + entries.sort() + for e in entries: + p = os.path.join(path, e) + s = os.path.join(pathstr, e) + if os.path.isfile(p): + if remove: os.unlink(p) + display("Removed " + s) + else: + self.fs_delete(p, s, remove) + # then delete dir itself + if remove: os.rmdir(path) + display("Removed directory " + pathstr) + else: + errstr = "Path '%s' exists but isn't a file or directory." + raise SCons.Errors.UserError(errstr % (pathstr)) + except SCons.Errors.UserError, e: + print e + except (IOError, OSError), e: + print "scons: Could not remove '%s':" % pathstr, e.strerror + + def show(self): + target = self.targets[0] + if (target.has_builder() or target.side_effect) and not target.noclean: + for t in self.targets: + if not t.isdir(): + display("Removed " + str(t)) + if SCons.Environment.CleanTargets.has_key(target): + files = SCons.Environment.CleanTargets[target] + for f in files: + self.fs_delete(f.abspath, str(f), 0) + + def remove(self): + target = self.targets[0] + if (target.has_builder() or target.side_effect) and not target.noclean: + for t in self.targets: + try: + removed = t.remove() + except OSError, e: + # An OSError may indicate something like a permissions + # issue, an IOError would indicate something like + # the file not existing. In either case, print a + # message and keep going to try to remove as many + # targets aa possible. + print "scons: Could not remove '%s':" % str(t), e.strerror + else: + if removed: + display("Removed " + str(t)) + if SCons.Environment.CleanTargets.has_key(target): + files = SCons.Environment.CleanTargets[target] + for f in files: + self.fs_delete(f.abspath, str(f)) + + execute = remove + + # We want the Taskmaster to update the Node states (and therefore + # handle reference counts, etc.), but we don't want to call + # back to the Node's post-build methods, which would do things + # we don't want, like store .sconsign information. + executed = SCons.Taskmaster.Task.executed_without_callbacks + + # Have the taskmaster arrange to "execute" all of the targets, because + # we'll figure out ourselves (in remove() or show() above) whether + # anything really needs to be done. + make_ready = SCons.Taskmaster.Task.make_ready_all + + def prepare(self): + pass + +class QuestionTask(SCons.Taskmaster.AlwaysTask): + """An SCons task for the -q (question) option.""" + def prepare(self): + pass + + def execute(self): + if self.targets[0].get_state() != SCons.Node.up_to_date or \ + (self.top and not self.targets[0].exists()): + global exit_status + global this_build_status + exit_status = 1 + this_build_status = 1 + self.tm.stop() + + def executed(self): + pass + + +class TreePrinter: + def __init__(self, derived=False, prune=False, status=False): + self.derived = derived + self.prune = prune + self.status = status + def get_all_children(self, node): + return node.all_children() + def get_derived_children(self, node): + children = node.all_children(None) + return filter(lambda x: x.has_builder(), children) + def display(self, t): + if self.derived: + func = self.get_derived_children + else: + func = self.get_all_children + s = self.status and 2 or 0 + SCons.Util.print_tree(t, func, prune=self.prune, showtags=s) + + +def python_version_string(): + return string.split(sys.version)[0] + +def python_version_unsupported(version=sys.version_info): + return version < (1, 5, 2) + +def python_version_deprecated(version=sys.version_info): + return version < (2, 4, 0) + + +# Global variables + +print_objects = 0 +print_memoizer = 0 +print_stacktrace = 0 +print_time = 0 +sconscript_time = 0 +cumulative_command_time = 0 +exit_status = 0 # final exit status, assume success by default +this_build_status = 0 # "exit status" of an individual build +num_jobs = None +delayed_warnings = [] + +class FakeOptionParser: + """ + A do-nothing option parser, used for the initial OptionsParser variable. + + During normal SCons operation, the OptionsParser is created right + away by the main() function. Certain tests scripts however, can + introspect on different Tool modules, the initialization of which + can try to add a new, local option to an otherwise uninitialized + OptionsParser object. This allows that introspection to happen + without blowing up. + + """ + class FakeOptionValues: + def __getattr__(self, attr): + return None + values = FakeOptionValues() + def add_local_option(self, *args, **kw): + pass + +OptionsParser = FakeOptionParser() + +def AddOption(*args, **kw): + if not kw.has_key('default'): + kw['default'] = None + result = apply(OptionsParser.add_local_option, args, kw) + return result + +def GetOption(name): + return getattr(OptionsParser.values, name) + +def SetOption(name, value): + return OptionsParser.values.set_option(name, value) + +# +class Stats: + def __init__(self): + self.stats = [] + self.labels = [] + self.append = self.do_nothing + self.print_stats = self.do_nothing + def enable(self, outfp): + self.outfp = outfp + self.append = self.do_append + self.print_stats = self.do_print + def do_nothing(self, *args, **kw): + pass + +class CountStats(Stats): + def do_append(self, label): + self.labels.append(label) + self.stats.append(SCons.Debug.fetchLoggedInstances()) + def do_print(self): + stats_table = {} + for s in self.stats: + for n in map(lambda t: t[0], s): + stats_table[n] = [0, 0, 0, 0] + i = 0 + for s in self.stats: + for n, c in s: + stats_table[n][i] = c + i = i + 1 + keys = stats_table.keys() + keys.sort() + self.outfp.write("Object counts:\n") + pre = [" "] + post = [" %s\n"] + l = len(self.stats) + fmt1 = string.join(pre + [' %7s']*l + post, '') + fmt2 = string.join(pre + [' %7d']*l + post, '') + labels = self.labels[:l] + labels.append(("", "Class")) + self.outfp.write(fmt1 % tuple(map(lambda x: x[0], labels))) + self.outfp.write(fmt1 % tuple(map(lambda x: x[1], labels))) + for k in keys: + r = stats_table[k][:l] + [k] + self.outfp.write(fmt2 % tuple(r)) + +count_stats = CountStats() + +class MemStats(Stats): + def do_append(self, label): + self.labels.append(label) + self.stats.append(SCons.Debug.memory()) + def do_print(self): + fmt = 'Memory %-32s %12d\n' + for label, stats in map(None, self.labels, self.stats): + self.outfp.write(fmt % (label, stats)) + +memory_stats = MemStats() + +# utility functions + +def _scons_syntax_error(e): + """Handle syntax errors. Print out a message and show where the error + occurred. + """ + etype, value, tb = sys.exc_info() + lines = traceback.format_exception_only(etype, value) + for line in lines: + sys.stderr.write(line+'\n') + sys.exit(2) + +def find_deepest_user_frame(tb): + """ + Find the deepest stack frame that is not part of SCons. + + Input is a "pre-processed" stack trace in the form + returned by traceback.extract_tb() or traceback.extract_stack() + """ + + tb.reverse() + + # find the deepest traceback frame that is not part + # of SCons: + for frame in tb: + filename = frame[0] + if string.find(filename, os.sep+'SCons'+os.sep) == -1: + return frame + return tb[0] + +def _scons_user_error(e): + """Handle user errors. Print out a message and a description of the + error, along with the line number and routine where it occured. + The file and line number will be the deepest stack frame that is + not part of SCons itself. + """ + global print_stacktrace + etype, value, tb = sys.exc_info() + if print_stacktrace: + traceback.print_exception(etype, value, tb) + filename, lineno, routine, dummy = find_deepest_user_frame(traceback.extract_tb(tb)) + sys.stderr.write("\nscons: *** %s\n" % value) + sys.stderr.write('File "%s", line %d, in %s\n' % (filename, lineno, routine)) + sys.exit(2) + +def _scons_user_warning(e): + """Handle user warnings. Print out a message and a description of + the warning, along with the line number and routine where it occured. + The file and line number will be the deepest stack frame that is + not part of SCons itself. + """ + etype, value, tb = sys.exc_info() + filename, lineno, routine, dummy = find_deepest_user_frame(traceback.extract_tb(tb)) + sys.stderr.write("\nscons: warning: %s\n" % e) + sys.stderr.write('File "%s", line %d, in %s\n' % (filename, lineno, routine)) + +def _scons_internal_warning(e): + """Slightly different from _scons_user_warning in that we use the + *current call stack* rather than sys.exc_info() to get our stack trace. + This is used by the warnings framework to print warnings.""" + filename, lineno, routine, dummy = find_deepest_user_frame(traceback.extract_stack()) + sys.stderr.write("\nscons: warning: %s\n" % e[0]) + sys.stderr.write('File "%s", line %d, in %s\n' % (filename, lineno, routine)) + +def _scons_internal_error(): + """Handle all errors but user errors. Print out a message telling + the user what to do in this case and print a normal trace. + """ + print 'internal error' + traceback.print_exc() + sys.exit(2) + +def _SConstruct_exists(dirname='', repositories=[], filelist=None): + """This function checks that an SConstruct file exists in a directory. + If so, it returns the path of the file. By default, it checks the + current directory. + """ + if not filelist: + filelist = ['SConstruct', 'Sconstruct', 'sconstruct'] + for file in filelist: + sfile = os.path.join(dirname, file) + if os.path.isfile(sfile): + return sfile + if not os.path.isabs(sfile): + for rep in repositories: + if os.path.isfile(os.path.join(rep, sfile)): + return sfile + return None + +def _set_debug_values(options): + global print_memoizer, print_objects, print_stacktrace, print_time + + debug_values = options.debug + + if "count" in debug_values: + # All of the object counts are within "if __debug__:" blocks, + # which get stripped when running optimized (with python -O or + # from compiled *.pyo files). Provide a warning if __debug__ is + # stripped, so it doesn't just look like --debug=count is broken. + enable_count = False + if __debug__: enable_count = True + if enable_count: + count_stats.enable(sys.stdout) + else: + msg = "--debug=count is not supported when running SCons\n" + \ + "\twith the python -O option or optimized (.pyo) modules." + SCons.Warnings.warn(SCons.Warnings.NoObjectCountWarning, msg) + if "dtree" in debug_values: + options.tree_printers.append(TreePrinter(derived=True)) + options.debug_explain = ("explain" in debug_values) + if "findlibs" in debug_values: + SCons.Scanner.Prog.print_find_libs = "findlibs" + options.debug_includes = ("includes" in debug_values) + print_memoizer = ("memoizer" in debug_values) + if "memory" in debug_values: + memory_stats.enable(sys.stdout) + print_objects = ("objects" in debug_values) + if "presub" in debug_values: + SCons.Action.print_actions_presub = 1 + if "stacktrace" in debug_values: + print_stacktrace = 1 + if "stree" in debug_values: + options.tree_printers.append(TreePrinter(status=True)) + if "time" in debug_values: + print_time = 1 + if "tree" in debug_values: + options.tree_printers.append(TreePrinter()) + +def _create_path(plist): + path = '.' + for d in plist: + if os.path.isabs(d): + path = d + else: + path = path + '/' + d + return path + +def _load_site_scons_dir(topdir, site_dir_name=None): + """Load the site_scons dir under topdir. + Adds site_scons to sys.path, imports site_scons/site_init.py, + and adds site_scons/site_tools to default toolpath.""" + if site_dir_name: + err_if_not_found = True # user specified: err if missing + else: + site_dir_name = "site_scons" + err_if_not_found = False + + site_dir = os.path.join(topdir.path, site_dir_name) + if not os.path.exists(site_dir): + if err_if_not_found: + raise SCons.Errors.UserError, "site dir %s not found."%site_dir + return + + site_init_filename = "site_init.py" + site_init_modname = "site_init" + site_tools_dirname = "site_tools" + sys.path = [os.path.abspath(site_dir)] + sys.path + site_init_file = os.path.join(site_dir, site_init_filename) + site_tools_dir = os.path.join(site_dir, site_tools_dirname) + if os.path.exists(site_init_file): + import imp + # TODO(2.4): turn this into try:-except:-finally: + try: + try: + fp, pathname, description = imp.find_module(site_init_modname, + [site_dir]) + # Load the file into SCons.Script namespace. This is + # opaque and clever; m is the module object for the + # SCons.Script module, and the exec ... in call executes a + # file (or string containing code) in the context of the + # module's dictionary, so anything that code defines ends + # up adding to that module. This is really short, but all + # the error checking makes it longer. + try: + m = sys.modules['SCons.Script'] + except Exception, e: + fmt = 'cannot import site_init.py: missing SCons.Script module %s' + raise SCons.Errors.InternalError, fmt % repr(e) + try: + # This is the magic. + exec fp in m.__dict__ + except KeyboardInterrupt: + raise + except Exception, e: + fmt = '*** Error loading site_init file %s:\n' + sys.stderr.write(fmt % repr(site_init_file)) + raise + except KeyboardInterrupt: + raise + except ImportError, e: + fmt = '*** cannot import site init file %s:\n' + sys.stderr.write(fmt % repr(site_init_file)) + raise + finally: + if fp: + fp.close() + if os.path.exists(site_tools_dir): + SCons.Tool.DefaultToolpath.append(os.path.abspath(site_tools_dir)) + +def version_string(label, module): + version = module.__version__ + build = module.__build__ + if build: + if build[0] != '.': + build = '.' + build + version = version + build + fmt = "\t%s: v%s, %s, by %s on %s\n" + return fmt % (label, + version, + module.__date__, + module.__developer__, + module.__buildsys__) + +def _main(parser): + global exit_status + global this_build_status + + options = parser.values + + # Here's where everything really happens. + + # First order of business: set up default warnings and then + # handle the user's warning options, so that we can issue (or + # suppress) appropriate warnings about anything that might happen, + # as configured by the user. + + default_warnings = [ SCons.Warnings.CorruptSConsignWarning, + SCons.Warnings.DeprecatedWarning, + SCons.Warnings.DuplicateEnvironmentWarning, + SCons.Warnings.FutureReservedVariableWarning, + SCons.Warnings.LinkWarning, + SCons.Warnings.MissingSConscriptWarning, + SCons.Warnings.NoMD5ModuleWarning, + SCons.Warnings.NoMetaclassSupportWarning, + SCons.Warnings.NoObjectCountWarning, + SCons.Warnings.NoParallelSupportWarning, + SCons.Warnings.MisleadingKeywordsWarning, + SCons.Warnings.ReservedVariableWarning, + SCons.Warnings.StackSizeWarning, + SCons.Warnings.VisualVersionMismatch, + SCons.Warnings.VisualCMissingWarning, + ] + + for warning in default_warnings: + SCons.Warnings.enableWarningClass(warning) + SCons.Warnings._warningOut = _scons_internal_warning + SCons.Warnings.process_warn_strings(options.warn) + + # Now that we have the warnings configuration set up, we can actually + # issue (or suppress) any warnings about warning-worthy things that + # occurred while the command-line options were getting parsed. + try: + dw = options.delayed_warnings + except AttributeError: + pass + else: + delayed_warnings.extend(dw) + for warning_type, message in delayed_warnings: + SCons.Warnings.warn(warning_type, message) + + if options.diskcheck: + SCons.Node.FS.set_diskcheck(options.diskcheck) + + # Next, we want to create the FS object that represents the outside + # world's file system, as that's central to a lot of initialization. + # To do this, however, we need to be in the directory from which we + # want to start everything, which means first handling any relevant + # options that might cause us to chdir somewhere (-C, -D, -U, -u). + if options.directory: + script_dir = os.path.abspath(_create_path(options.directory)) + else: + script_dir = os.getcwd() + + target_top = None + if options.climb_up: + target_top = '.' # directory to prepend to targets + while script_dir and not _SConstruct_exists(script_dir, + options.repository, + options.file): + script_dir, last_part = os.path.split(script_dir) + if last_part: + target_top = os.path.join(last_part, target_top) + else: + script_dir = '' + + if script_dir and script_dir != os.getcwd(): + display("scons: Entering directory `%s'" % script_dir) + try: + os.chdir(script_dir) + except OSError: + sys.stderr.write("Could not change directory to %s\n" % script_dir) + + # Now that we're in the top-level SConstruct directory, go ahead + # and initialize the FS object that represents the file system, + # and make it the build engine default. + fs = SCons.Node.FS.get_default_fs() + + for rep in options.repository: + fs.Repository(rep) + + # Now that we have the FS object, the next order of business is to + # check for an SConstruct file (or other specified config file). + # If there isn't one, we can bail before doing any more work. + scripts = [] + if options.file: + scripts.extend(options.file) + if not scripts: + sfile = _SConstruct_exists(repositories=options.repository, + filelist=options.file) + if sfile: + scripts.append(sfile) + + if not scripts: + if options.help: + # There's no SConstruct, but they specified -h. + # Give them the options usage now, before we fail + # trying to read a non-existent SConstruct file. + raise SConsPrintHelpException + raise SCons.Errors.UserError, "No SConstruct file found." + + if scripts[0] == "-": + d = fs.getcwd() + else: + d = fs.File(scripts[0]).dir + fs.set_SConstruct_dir(d) + + _set_debug_values(options) + SCons.Node.implicit_cache = options.implicit_cache + SCons.Node.implicit_deps_changed = options.implicit_deps_changed + SCons.Node.implicit_deps_unchanged = options.implicit_deps_unchanged + + if options.no_exec: + SCons.SConf.dryrun = 1 + SCons.Action.execute_actions = None + if options.question: + SCons.SConf.dryrun = 1 + if options.clean: + SCons.SConf.SetBuildType('clean') + if options.help: + SCons.SConf.SetBuildType('help') + SCons.SConf.SetCacheMode(options.config) + SCons.SConf.SetProgressDisplay(progress_display) + + if options.no_progress or options.silent: + progress_display.set_mode(0) + + if options.site_dir: + _load_site_scons_dir(d, options.site_dir) + elif not options.no_site_dir: + _load_site_scons_dir(d) + + if options.include_dir: + sys.path = options.include_dir + sys.path + + # That should cover (most of) the options. Next, set up the variables + # that hold command-line arguments, so the SConscript files that we + # read and execute have access to them. + targets = [] + xmit_args = [] + for a in parser.largs: + if a[:1] == '-': + continue + if '=' in a: + xmit_args.append(a) + else: + targets.append(a) + SCons.Script._Add_Targets(targets + parser.rargs) + SCons.Script._Add_Arguments(xmit_args) + + # If stdout is not a tty, replace it with a wrapper object to call flush + # after every write. + # + # Tty devices automatically flush after every newline, so the replacement + # isn't necessary. Furthermore, if we replace sys.stdout, the readline + # module will no longer work. This affects the behavior during + # --interactive mode. --interactive should only be used when stdin and + # stdout refer to a tty. + if not hasattr(sys.stdout, 'isatty') or not sys.stdout.isatty(): + sys.stdout = SCons.Util.Unbuffered(sys.stdout) + if not hasattr(sys.stderr, 'isatty') or not sys.stderr.isatty(): + sys.stderr = SCons.Util.Unbuffered(sys.stderr) + + memory_stats.append('before reading SConscript files:') + count_stats.append(('pre-', 'read')) + + # And here's where we (finally) read the SConscript files. + + progress_display("scons: Reading SConscript files ...") + + start_time = time.time() + try: + for script in scripts: + SCons.Script._SConscript._SConscript(fs, script) + except SCons.Errors.StopError, e: + # We had problems reading an SConscript file, such as it + # couldn't be copied in to the VariantDir. Since we're just + # reading SConscript files and haven't started building + # things yet, stop regardless of whether they used -i or -k + # or anything else. + sys.stderr.write("scons: *** %s Stop.\n" % e) + exit_status = 2 + sys.exit(exit_status) + global sconscript_time + sconscript_time = time.time() - start_time + + progress_display("scons: done reading SConscript files.") + + memory_stats.append('after reading SConscript files:') + count_stats.append(('post-', 'read')) + + # Re-{enable,disable} warnings in case they disabled some in + # the SConscript file. + # + # We delay enabling the PythonVersionWarning class until here so that, + # if they explicity disabled it in either in the command line or in + # $SCONSFLAGS, or in the SConscript file, then the search through + # the list of deprecated warning classes will find that disabling + # first and not issue the warning. + SCons.Warnings.enableWarningClass(SCons.Warnings.PythonVersionWarning) + SCons.Warnings.process_warn_strings(options.warn) + + # Now that we've read the SConscript files, we can check for the + # warning about deprecated Python versions--delayed until here + # in case they disabled the warning in the SConscript files. + if python_version_deprecated(): + msg = "Support for pre-2.4 Python (%s) is deprecated.\n" + \ + " If this will cause hardship, contact dev@scons.tigris.org." + SCons.Warnings.warn(SCons.Warnings.PythonVersionWarning, + msg % python_version_string()) + + if not options.help: + SCons.SConf.CreateConfigHBuilder(SCons.Defaults.DefaultEnvironment()) + + # Now re-parse the command-line options (any to the left of a '--' + # argument, that is) with any user-defined command-line options that + # the SConscript files may have added to the parser object. This will + # emit the appropriate error message and exit if any unknown option + # was specified on the command line. + + parser.preserve_unknown_options = False + parser.parse_args(parser.largs, options) + + if options.help: + help_text = SCons.Script.help_text + if help_text is None: + # They specified -h, but there was no Help() inside the + # SConscript files. Give them the options usage. + raise SConsPrintHelpException + else: + print help_text + print "Use scons -H for help about command-line options." + exit_status = 0 + return + + # Change directory to the top-level SConstruct directory, then tell + # the Node.FS subsystem that we're all done reading the SConscript + # files and calling Repository() and VariantDir() and changing + # directories and the like, so it can go ahead and start memoizing + # the string values of file system nodes. + + fs.chdir(fs.Top) + + SCons.Node.FS.save_strings(1) + + # Now that we've read the SConscripts we can set the options + # that are SConscript settable: + SCons.Node.implicit_cache = options.implicit_cache + SCons.Node.FS.set_duplicate(options.duplicate) + fs.set_max_drift(options.max_drift) + + SCons.Job.explicit_stack_size = options.stack_size + + if options.md5_chunksize: + SCons.Node.FS.File.md5_chunksize = options.md5_chunksize + + platform = SCons.Platform.platform_module() + + if options.interactive: + SCons.Script.Interactive.interact(fs, OptionsParser, options, + targets, target_top) + + else: + + # Build the targets + nodes = _build_targets(fs, options, targets, target_top) + if not nodes: + exit_status = 2 + +def _build_targets(fs, options, targets, target_top): + + global this_build_status + this_build_status = 0 + + progress_display.set_mode(not (options.no_progress or options.silent)) + display.set_mode(not options.silent) + SCons.Action.print_actions = not options.silent + SCons.Action.execute_actions = not options.no_exec + SCons.Node.FS.do_store_info = not options.no_exec + SCons.SConf.dryrun = options.no_exec + + if options.diskcheck: + SCons.Node.FS.set_diskcheck(options.diskcheck) + + SCons.CacheDir.cache_enabled = not options.cache_disable + SCons.CacheDir.cache_debug = options.cache_debug + SCons.CacheDir.cache_force = options.cache_force + SCons.CacheDir.cache_show = options.cache_show + + if options.no_exec: + CleanTask.execute = CleanTask.show + else: + CleanTask.execute = CleanTask.remove + + lookup_top = None + if targets or SCons.Script.BUILD_TARGETS != SCons.Script._build_plus_default: + # They specified targets on the command line or modified + # BUILD_TARGETS in the SConscript file(s), so if they used -u, + # -U or -D, we have to look up targets relative to the top, + # but we build whatever they specified. + if target_top: + lookup_top = fs.Dir(target_top) + target_top = None + + targets = SCons.Script.BUILD_TARGETS + else: + # There are no targets specified on the command line, + # so if they used -u, -U or -D, we may have to restrict + # what actually gets built. + d = None + if target_top: + if options.climb_up == 1: + # -u, local directory and below + target_top = fs.Dir(target_top) + lookup_top = target_top + elif options.climb_up == 2: + # -D, all Default() targets + target_top = None + lookup_top = None + elif options.climb_up == 3: + # -U, local SConscript Default() targets + target_top = fs.Dir(target_top) + def check_dir(x, target_top=target_top): + if hasattr(x, 'cwd') and not x.cwd is None: + cwd = x.cwd.srcnode() + return cwd == target_top + else: + # x doesn't have a cwd, so it's either not a target, + # or not a file, so go ahead and keep it as a default + # target and let the engine sort it out: + return 1 + d = filter(check_dir, SCons.Script.DEFAULT_TARGETS) + SCons.Script.DEFAULT_TARGETS[:] = d + target_top = None + lookup_top = None + + targets = SCons.Script._Get_Default_Targets(d, fs) + + if not targets: + sys.stderr.write("scons: *** No targets specified and no Default() targets found. Stop.\n") + return None + + def Entry(x, ltop=lookup_top, ttop=target_top, fs=fs): + if isinstance(x, SCons.Node.Node): + node = x + else: + node = None + # Why would ltop be None? Unfortunately this happens. + if ltop is None: ltop = '' + # Curdir becomes important when SCons is called with -u, -C, + # or similar option that changes directory, and so the paths + # of targets given on the command line need to be adjusted. + curdir = os.path.join(os.getcwd(), str(ltop)) + for lookup in SCons.Node.arg2nodes_lookups: + node = lookup(x, curdir=curdir) + if node is not None: + break + if node is None: + node = fs.Entry(x, directory=ltop, create=1) + if ttop and not node.is_under(ttop): + if isinstance(node, SCons.Node.FS.Dir) and ttop.is_under(node): + node = ttop + else: + node = None + return node + + nodes = filter(None, map(Entry, targets)) + + task_class = BuildTask # default action is to build targets + opening_message = "Building targets ..." + closing_message = "done building targets." + if options.keep_going: + failure_message = "done building targets (errors occurred during build)." + else: + failure_message = "building terminated because of errors." + if options.question: + task_class = QuestionTask + try: + if options.clean: + task_class = CleanTask + opening_message = "Cleaning targets ..." + closing_message = "done cleaning targets." + if options.keep_going: + failure_message = "done cleaning targets (errors occurred during clean)." + else: + failure_message = "cleaning terminated because of errors." + except AttributeError: + pass + + task_class.progress = ProgressObject + + if options.random: + def order(dependencies): + """Randomize the dependencies.""" + import random + # This is cribbed from the implementation of + # random.shuffle() in Python 2.X. + d = dependencies + for i in xrange(len(d)-1, 0, -1): + j = int(random.random() * (i+1)) + d[i], d[j] = d[j], d[i] + return d + else: + def order(dependencies): + """Leave the order of dependencies alone.""" + return dependencies + + if options.taskmastertrace_file == '-': + tmtrace = sys.stdout + elif options.taskmastertrace_file: + tmtrace = open(options.taskmastertrace_file, 'wb') + else: + tmtrace = None + taskmaster = SCons.Taskmaster.Taskmaster(nodes, task_class, order, tmtrace) + + # Let the BuildTask objects get at the options to respond to the + # various print_* settings, tree_printer list, etc. + BuildTask.options = options + + global num_jobs + num_jobs = options.num_jobs + jobs = SCons.Job.Jobs(num_jobs, taskmaster) + if num_jobs > 1: + msg = None + if jobs.num_jobs == 1: + msg = "parallel builds are unsupported by this version of Python;\n" + \ + "\tignoring -j or num_jobs option.\n" + elif sys.platform == 'win32': + msg = fetch_win32_parallel_msg() + if msg: + SCons.Warnings.warn(SCons.Warnings.NoParallelSupportWarning, msg) + + memory_stats.append('before building targets:') + count_stats.append(('pre-', 'build')) + + def jobs_postfunc( + jobs=jobs, + options=options, + closing_message=closing_message, + failure_message=failure_message + ): + if jobs.were_interrupted(): + if not options.no_progress and not options.silent: + sys.stderr.write("scons: Build interrupted.\n") + global exit_status + global this_build_status + exit_status = 2 + this_build_status = 2 + + if this_build_status: + progress_display("scons: " + failure_message) + else: + progress_display("scons: " + closing_message) + if not options.no_exec: + if jobs.were_interrupted(): + progress_display("scons: writing .sconsign file.") + SCons.SConsign.write() + + progress_display("scons: " + opening_message) + jobs.run(postfunc = jobs_postfunc) + + memory_stats.append('after building targets:') + count_stats.append(('post-', 'build')) + + return nodes + +def _exec_main(parser, values): + sconsflags = os.environ.get('SCONSFLAGS', '') + all_args = string.split(sconsflags) + sys.argv[1:] + + options, args = parser.parse_args(all_args, values) + + if type(options.debug) == type([]) and "pdb" in options.debug: + import pdb + pdb.Pdb().runcall(_main, parser) + elif options.profile_file: + try: + from cProfile import Profile + except ImportError, e: + from profile import Profile + + # Some versions of Python 2.4 shipped a profiler that had the + # wrong 'c_exception' entry in its dispatch table. Make sure + # we have the right one. (This may put an unnecessary entry + # in the table in earlier versions of Python, but its presence + # shouldn't hurt anything). + try: + dispatch = Profile.dispatch + except AttributeError: + pass + else: + dispatch['c_exception'] = Profile.trace_dispatch_return + + prof = Profile() + try: + prof.runcall(_main, parser) + except SConsPrintHelpException, e: + prof.dump_stats(options.profile_file) + raise e + except SystemExit: + pass + prof.dump_stats(options.profile_file) + else: + _main(parser) + +def main(): + global OptionsParser + global exit_status + global first_command_start + + # Check up front for a Python version we do not support. We + # delay the check for deprecated Python versions until later, + # after the SConscript files have been read, in case they + # disable that warning. + if python_version_unsupported(): + msg = "scons: *** SCons version %s does not run under Python version %s.\n" + sys.stderr.write(msg % (SCons.__version__, python_version_string())) + sys.exit(1) + + parts = ["SCons by Steven Knight et al.:\n"] + try: + import __main__ + parts.append(version_string("script", __main__)) + except (ImportError, AttributeError): + # On Windows there is no scons.py, so there is no + # __main__.__version__, hence there is no script version. + pass + parts.append(version_string("engine", SCons)) + parts.append("Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation") + version = string.join(parts, '') + + import SConsOptions + parser = SConsOptions.Parser(version) + values = SConsOptions.SConsValues(parser.get_default_values()) + + OptionsParser = parser + + try: + _exec_main(parser, values) + except SystemExit, s: + if s: + exit_status = s + except KeyboardInterrupt: + print("scons: Build interrupted.") + sys.exit(2) + except SyntaxError, e: + _scons_syntax_error(e) + except SCons.Errors.InternalError: + _scons_internal_error() + except SCons.Errors.UserError, e: + _scons_user_error(e) + except SConsPrintHelpException: + parser.print_help() + exit_status = 0 + except SCons.Errors.BuildError, e: + exit_status = e.exitstatus + except: + # An exception here is likely a builtin Python exception Python + # code in an SConscript file. Show them precisely what the + # problem was and where it happened. + SCons.Script._SConscript.SConscript_exception() + sys.exit(2) + + memory_stats.print_stats() + count_stats.print_stats() + + if print_objects: + SCons.Debug.listLoggedInstances('*') + #SCons.Debug.dumpLoggedInstances('*') + + if print_memoizer: + SCons.Memoize.Dump("Memoizer (memory cache) hits and misses:") + + # Dump any development debug info that may have been enabled. + # These are purely for internal debugging during development, so + # there's no need to control them with --debug= options; they're + # controlled by changing the source code. + SCons.Debug.dump_caller_counts() + SCons.Taskmaster.dump_stats() + + if print_time: + total_time = time.time() - SCons.Script.start_time + if num_jobs == 1: + ct = cumulative_command_time + else: + if last_command_end is None or first_command_start is None: + ct = 0.0 + else: + ct = last_command_end - first_command_start + scons_time = total_time - sconscript_time - ct + print "Total build time: %f seconds"%total_time + print "Total SConscript file execution time: %f seconds"%sconscript_time + print "Total SCons execution time: %f seconds"%scons_time + print "Total command execution time: %f seconds"%ct + + sys.exit(exit_status) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Script/SConsOptions.py b/engine/SCons/Script/SConsOptions.py new file mode 100644 index 0000000..171268a --- /dev/null +++ b/engine/SCons/Script/SConsOptions.py @@ -0,0 +1,944 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Script/SConsOptions.py 4577 2009/12/27 19:43:56 scons" + +import optparse +import re +import string +import sys +import textwrap + +try: + no_hyphen_re = re.compile(r'(\s+|(?<=[\w\!\"\'\&\.\,\?])-{2,}(?=\w))') +except re.error: + # Pre-2.0 Python versions don't have the (?<= negative + # look-behind assertion. + no_hyphen_re = re.compile(r'(\s+|-*\w{2,}-(?=\w{2,}))') + +try: + from gettext import gettext +except ImportError: + def gettext(message): + return message +_ = gettext + +import SCons.Node.FS +import SCons.Warnings + +OptionValueError = optparse.OptionValueError +SUPPRESS_HELP = optparse.SUPPRESS_HELP + +diskcheck_all = SCons.Node.FS.diskcheck_types() + +def diskcheck_convert(value): + if value is None: + return [] + if not SCons.Util.is_List(value): + value = string.split(value, ',') + result = [] + for v in map(string.lower, value): + if v == 'all': + result = diskcheck_all + elif v == 'none': + result = [] + elif v in diskcheck_all: + result.append(v) + else: + raise ValueError, v + return result + +class SConsValues(optparse.Values): + """ + Holder class for uniform access to SCons options, regardless + of whether or not they can be set on the command line or in the + SConscript files (using the SetOption() function). + + A SCons option value can originate three different ways: + + 1) set on the command line; + 2) set in an SConscript file; + 3) the default setting (from the the op.add_option() + calls in the Parser() function, below). + + The command line always overrides a value set in a SConscript file, + which in turn always overrides default settings. Because we want + to support user-specified options in the SConscript file itself, + though, we may not know about all of the options when the command + line is first parsed, so we can't make all the necessary precedence + decisions at the time the option is configured. + + The solution implemented in this class is to keep these different sets + of settings separate (command line, SConscript file, and default) + and to override the __getattr__() method to check them in turn. + This should allow the rest of the code to just fetch values as + attributes of an instance of this class, without having to worry + about where they came from. + + Note that not all command line options are settable from SConscript + files, and the ones that are must be explicitly added to the + "settable" list in this class, and optionally validated and coerced + in the set_option() method. + """ + + def __init__(self, defaults): + self.__dict__['__defaults__'] = defaults + self.__dict__['__SConscript_settings__'] = {} + + def __getattr__(self, attr): + """ + Fetches an options value, checking first for explicit settings + from the command line (which are direct attributes), then the + SConscript file settings, then the default values. + """ + try: + return self.__dict__[attr] + except KeyError: + try: + return self.__dict__['__SConscript_settings__'][attr] + except KeyError: + return getattr(self.__dict__['__defaults__'], attr) + + settable = [ + 'clean', + 'diskcheck', + 'duplicate', + 'help', + 'implicit_cache', + 'max_drift', + 'md5_chunksize', + 'no_exec', + 'num_jobs', + 'random', + 'stack_size', + 'warn', + ] + + def set_option(self, name, value): + """ + Sets an option from an SConscript file. + """ + if not name in self.settable: + raise SCons.Errors.UserError, "This option is not settable from a SConscript file: %s"%name + + if name == 'num_jobs': + try: + value = int(value) + if value < 1: + raise ValueError + except ValueError: + raise SCons.Errors.UserError, "A positive integer is required: %s"%repr(value) + elif name == 'max_drift': + try: + value = int(value) + except ValueError: + raise SCons.Errors.UserError, "An integer is required: %s"%repr(value) + elif name == 'duplicate': + try: + value = str(value) + except ValueError: + raise SCons.Errors.UserError, "A string is required: %s"%repr(value) + if not value in SCons.Node.FS.Valid_Duplicates: + raise SCons.Errors.UserError, "Not a valid duplication style: %s" % value + # Set the duplicate style right away so it can affect linking + # of SConscript files. + SCons.Node.FS.set_duplicate(value) + elif name == 'diskcheck': + try: + value = diskcheck_convert(value) + except ValueError, v: + raise SCons.Errors.UserError, "Not a valid diskcheck value: %s"%v + if not self.__dict__.has_key('diskcheck'): + # No --diskcheck= option was specified on the command line. + # Set this right away so it can affect the rest of the + # file/Node lookups while processing the SConscript files. + SCons.Node.FS.set_diskcheck(value) + elif name == 'stack_size': + try: + value = int(value) + except ValueError: + raise SCons.Errors.UserError, "An integer is required: %s"%repr(value) + elif name == 'md5_chunksize': + try: + value = int(value) + except ValueError: + raise SCons.Errors.UserError, "An integer is required: %s"%repr(value) + elif name == 'warn': + if SCons.Util.is_String(value): + value = [value] + value = self.__SConscript_settings__.get(name, []) + value + SCons.Warnings.process_warn_strings(value) + + self.__SConscript_settings__[name] = value + +class SConsOption(optparse.Option): + def convert_value(self, opt, value): + if value is not None: + if self.nargs in (1, '?'): + return self.check_value(opt, value) + else: + return tuple(map(lambda v, o=opt, s=self: s.check_value(o, v), value)) + + def process(self, opt, value, values, parser): + + # First, convert the value(s) to the right type. Howl if any + # value(s) are bogus. + value = self.convert_value(opt, value) + + # And then take whatever action is expected of us. + # This is a separate method to make life easier for + # subclasses to add new actions. + return self.take_action( + self.action, self.dest, opt, value, values, parser) + + def _check_nargs_optional(self): + if self.nargs == '?' and self._short_opts: + fmt = "option %s: nargs='?' is incompatible with short options" + raise SCons.Errors.UserError, fmt % self._short_opts[0] + + try: + _orig_CONST_ACTIONS = optparse.Option.CONST_ACTIONS + + _orig_CHECK_METHODS = optparse.Option.CHECK_METHODS + + except AttributeError: + # optparse.Option had no CONST_ACTIONS before Python 2.5. + + _orig_CONST_ACTIONS = ("store_const",) + + def _check_const(self): + if self.action not in self.CONST_ACTIONS and self.const is not None: + raise OptionError( + "'const' must not be supplied for action %r" % self.action, + self) + + # optparse.Option collects its list of unbound check functions + # up front. This sucks because it means we can't just override + # the _check_const() function like a normal method, we have to + # actually replace it in the list. This seems to be the most + # straightforward way to do that. + + _orig_CHECK_METHODS = [optparse.Option._check_action, + optparse.Option._check_type, + optparse.Option._check_choice, + optparse.Option._check_dest, + _check_const, + optparse.Option._check_nargs, + optparse.Option._check_callback] + + CHECK_METHODS = _orig_CHECK_METHODS + [_check_nargs_optional] + + CONST_ACTIONS = _orig_CONST_ACTIONS + optparse.Option.TYPED_ACTIONS + +class SConsOptionGroup(optparse.OptionGroup): + """ + A subclass for SCons-specific option groups. + + The only difference between this and the base class is that we print + the group's help text flush left, underneath their own title but + lined up with the normal "SCons Options". + """ + def format_help(self, formatter): + """ + Format an option group's help text, outdenting the title so it's + flush with the "SCons Options" title we print at the top. + """ + formatter.dedent() + result = formatter.format_heading(self.title) + formatter.indent() + result = result + optparse.OptionContainer.format_help(self, formatter) + return result + +class SConsOptionParser(optparse.OptionParser): + preserve_unknown_options = False + + def error(self, msg): + self.print_usage(sys.stderr) + sys.stderr.write("SCons error: %s\n" % msg) + sys.exit(2) + + def _process_long_opt(self, rargs, values): + """ + SCons-specific processing of long options. + + This is copied directly from the normal + optparse._process_long_opt() method, except that, if configured + to do so, we catch the exception thrown when an unknown option + is encountered and just stick it back on the "leftover" arguments + for later (re-)processing. + """ + arg = rargs.pop(0) + + # Value explicitly attached to arg? Pretend it's the next + # argument. + if "=" in arg: + (opt, next_arg) = string.split(arg, "=", 1) + rargs.insert(0, next_arg) + had_explicit_value = True + else: + opt = arg + had_explicit_value = False + + try: + opt = self._match_long_opt(opt) + except optparse.BadOptionError: + if self.preserve_unknown_options: + # SCons-specific: if requested, add unknown options to + # the "leftover arguments" list for later processing. + self.largs.append(arg) + if had_explicit_value: + # The unknown option will be re-processed later, + # so undo the insertion of the explicit value. + rargs.pop(0) + return + raise + + option = self._long_opt[opt] + if option.takes_value(): + nargs = option.nargs + if nargs == '?': + if had_explicit_value: + value = rargs.pop(0) + else: + value = option.const + elif len(rargs) < nargs: + if nargs == 1: + self.error(_("%s option requires an argument") % opt) + else: + self.error(_("%s option requires %d arguments") + % (opt, nargs)) + elif nargs == 1: + value = rargs.pop(0) + else: + value = tuple(rargs[0:nargs]) + del rargs[0:nargs] + + elif had_explicit_value: + self.error(_("%s option does not take a value") % opt) + + else: + value = None + + option.process(opt, value, values, self) + + def add_local_option(self, *args, **kw): + """ + Adds a local option to the parser. + + This is initiated by a SetOption() call to add a user-defined + command-line option. We add the option to a separate option + group for the local options, creating the group if necessary. + """ + try: + group = self.local_option_group + except AttributeError: + group = SConsOptionGroup(self, 'Local Options') + group = self.add_option_group(group) + self.local_option_group = group + + result = apply(group.add_option, args, kw) + + if result: + # The option was added succesfully. We now have to add the + # default value to our object that holds the default values + # (so that an attempt to fetch the option's attribute will + # yield the default value when not overridden) and then + # we re-parse the leftover command-line options, so that + # any value overridden on the command line is immediately + # available if the user turns around and does a GetOption() + # right away. + setattr(self.values.__defaults__, result.dest, result.default) + self.parse_args(self.largs, self.values) + + return result + +class SConsIndentedHelpFormatter(optparse.IndentedHelpFormatter): + def format_usage(self, usage): + return "usage: %s\n" % usage + + def format_heading(self, heading): + """ + This translates any heading of "options" or "Options" into + "SCons Options." Unfortunately, we have to do this here, + because those titles are hard-coded in the optparse calls. + """ + if heading == 'options': + # The versions of optparse.py shipped with Pythons 2.3 and + # 2.4 pass this in uncapitalized; override that so we get + # consistent output on all versions. + heading = "Options" + if heading == 'Options': + heading = "SCons Options" + return optparse.IndentedHelpFormatter.format_heading(self, heading) + + def format_option(self, option): + """ + A copy of the normal optparse.IndentedHelpFormatter.format_option() + method. This has been snarfed so we can modify text wrapping to + out liking: + + -- add our own regular expression that doesn't break on hyphens + (so things like --no-print-directory don't get broken); + + -- wrap the list of options themselves when it's too long + (the wrapper.fill(opts) call below); + + -- set the subsequent_indent when wrapping the help_text. + """ + # The help for each option consists of two parts: + # * the opt strings and metavars + # eg. ("-x", or "-fFILENAME, --file=FILENAME") + # * the user-supplied help string + # eg. ("turn on expert mode", "read data from FILENAME") + # + # If possible, we write both of these on the same line: + # -x turn on expert mode + # + # But if the opt string list is too long, we put the help + # string on a second line, indented to the same column it would + # start in if it fit on the first line. + # -fFILENAME, --file=FILENAME + # read data from FILENAME + result = [] + + try: + opts = self.option_strings[option] + except AttributeError: + # The Python 2.3 version of optparse attaches this to + # to the option argument, not to this object. + opts = option.option_strings + + opt_width = self.help_position - self.current_indent - 2 + if len(opts) > opt_width: + wrapper = textwrap.TextWrapper(width=self.width, + initial_indent = ' ', + subsequent_indent = ' ') + wrapper.wordsep_re = no_hyphen_re + opts = wrapper.fill(opts) + '\n' + indent_first = self.help_position + else: # start help on same line as opts + opts = "%*s%-*s " % (self.current_indent, "", opt_width, opts) + indent_first = 0 + result.append(opts) + if option.help: + + try: + expand_default = self.expand_default + except AttributeError: + # The HelpFormatter base class in the Python 2.3 version + # of optparse has no expand_default() method. + help_text = option.help + else: + help_text = expand_default(option) + + # SCons: indent every line of the help text but the first. + wrapper = textwrap.TextWrapper(width=self.help_width, + subsequent_indent = ' ') + wrapper.wordsep_re = no_hyphen_re + help_lines = wrapper.wrap(help_text) + result.append("%*s%s\n" % (indent_first, "", help_lines[0])) + for line in help_lines[1:]: + result.append("%*s%s\n" % (self.help_position, "", line)) + elif opts[-1] != "\n": + result.append("\n") + return string.join(result, "") + + # For consistent help output across Python versions, we provide a + # subclass copy of format_option_strings() and these two variables. + # This is necessary (?) for Python2.3, which otherwise concatenates + # a short option with its metavar. + _short_opt_fmt = "%s %s" + _long_opt_fmt = "%s=%s" + + def format_option_strings(self, option): + """Return a comma-separated list of option strings & metavariables.""" + if option.takes_value(): + metavar = option.metavar or string.upper(option.dest) + short_opts = [] + for sopt in option._short_opts: + short_opts.append(self._short_opt_fmt % (sopt, metavar)) + long_opts = [] + for lopt in option._long_opts: + long_opts.append(self._long_opt_fmt % (lopt, metavar)) + else: + short_opts = option._short_opts + long_opts = option._long_opts + + if self.short_first: + opts = short_opts + long_opts + else: + opts = long_opts + short_opts + + return string.join(opts, ", ") + +def Parser(version): + """ + Returns an options parser object initialized with the standard + SCons options. + """ + + formatter = SConsIndentedHelpFormatter(max_help_position=30) + + op = SConsOptionParser(option_class=SConsOption, + add_help_option=False, + formatter=formatter, + usage="usage: scons [OPTION] [TARGET] ...",) + + op.preserve_unknown_options = True + op.version = version + + # Add the options to the parser we just created. + # + # These are in the order we want them to show up in the -H help + # text, basically alphabetical. Each op.add_option() call below + # should have a consistent format: + # + # op.add_option("-L", "--long-option-name", + # nargs=1, type="string", + # dest="long_option_name", default='foo', + # action="callback", callback=opt_long_option, + # help="help text goes here", + # metavar="VAR") + # + # Even though the optparse module constructs reasonable default + # destination names from the long option names, we're going to be + # explicit about each one for easier readability and so this code + # will at least show up when grepping the source for option attribute + # names, or otherwise browsing the source code. + + # options ignored for compatibility + def opt_ignore(option, opt, value, parser): + sys.stderr.write("Warning: ignoring %s option\n" % opt) + op.add_option("-b", "-d", "-e", "-m", "-S", "-t", "-w", + "--environment-overrides", + "--no-keep-going", + "--no-print-directory", + "--print-directory", + "--stop", + "--touch", + action="callback", callback=opt_ignore, + help="Ignored for compatibility.") + + op.add_option('-c', '--clean', '--remove', + dest="clean", default=False, + action="store_true", + help="Remove specified targets and dependencies.") + + op.add_option('-C', '--directory', + nargs=1, type="string", + dest="directory", default=[], + action="append", + help="Change to DIR before doing anything.", + metavar="DIR") + + op.add_option('--cache-debug', + nargs=1, + dest="cache_debug", default=None, + action="store", + help="Print CacheDir debug info to FILE.", + metavar="FILE") + + op.add_option('--cache-disable', '--no-cache', + dest='cache_disable', default=False, + action="store_true", + help="Do not retrieve built targets from CacheDir.") + + op.add_option('--cache-force', '--cache-populate', + dest='cache_force', default=False, + action="store_true", + help="Copy already-built targets into the CacheDir.") + + op.add_option('--cache-show', + dest='cache_show', default=False, + action="store_true", + help="Print build actions for files from CacheDir.") + + config_options = ["auto", "force" ,"cache"] + + def opt_config(option, opt, value, parser, c_options=config_options): + if not value in c_options: + raise OptionValueError("Warning: %s is not a valid config type" % value) + setattr(parser.values, option.dest, value) + opt_config_help = "Controls Configure subsystem: %s." \ + % string.join(config_options, ", ") + op.add_option('--config', + nargs=1, type="string", + dest="config", default="auto", + action="callback", callback=opt_config, + help = opt_config_help, + metavar="MODE") + + op.add_option('-D', + dest="climb_up", default=None, + action="store_const", const=2, + help="Search up directory tree for SConstruct, " + "build all Default() targets.") + + deprecated_debug_options = { + "dtree" : '; please use --tree=derived instead', + "nomemoizer" : ' and has no effect', + "stree" : '; please use --tree=all,status instead', + "tree" : '; please use --tree=all instead', + } + + debug_options = ["count", "explain", "findlibs", + "includes", "memoizer", "memory", "objects", + "pdb", "presub", "stacktrace", + "time"] + deprecated_debug_options.keys() + + def opt_debug(option, opt, value, parser, + debug_options=debug_options, + deprecated_debug_options=deprecated_debug_options): + if value in debug_options: + parser.values.debug.append(value) + if value in deprecated_debug_options.keys(): + try: + parser.values.delayed_warnings + except AttributeError: + parser.values.delayed_warnings = [] + msg = deprecated_debug_options[value] + w = "The --debug=%s option is deprecated%s." % (value, msg) + t = (SCons.Warnings.DeprecatedWarning, w) + parser.values.delayed_warnings.append(t) + else: + raise OptionValueError("Warning: %s is not a valid debug type" % value) + opt_debug_help = "Print various types of debugging information: %s." \ + % string.join(debug_options, ", ") + op.add_option('--debug', + nargs=1, type="string", + dest="debug", default=[], + action="callback", callback=opt_debug, + help=opt_debug_help, + metavar="TYPE") + + def opt_diskcheck(option, opt, value, parser): + try: + diskcheck_value = diskcheck_convert(value) + except ValueError, e: + raise OptionValueError("Warning: `%s' is not a valid diskcheck type" % e) + setattr(parser.values, option.dest, diskcheck_value) + + op.add_option('--diskcheck', + nargs=1, type="string", + dest='diskcheck', default=None, + action="callback", callback=opt_diskcheck, + help="Enable specific on-disk checks.", + metavar="TYPE") + + def opt_duplicate(option, opt, value, parser): + if not value in SCons.Node.FS.Valid_Duplicates: + raise OptionValueError("`%s' is not a valid duplication style." % value) + setattr(parser.values, option.dest, value) + # Set the duplicate style right away so it can affect linking + # of SConscript files. + SCons.Node.FS.set_duplicate(value) + + opt_duplicate_help = "Set the preferred duplication methods. Must be one of " \ + + string.join(SCons.Node.FS.Valid_Duplicates, ", ") + + op.add_option('--duplicate', + nargs=1, type="string", + dest="duplicate", default='hard-soft-copy', + action="callback", callback=opt_duplicate, + help=opt_duplicate_help) + + op.add_option('-f', '--file', '--makefile', '--sconstruct', + nargs=1, type="string", + dest="file", default=[], + action="append", + help="Read FILE as the top-level SConstruct file.") + + op.add_option('-h', '--help', + dest="help", default=False, + action="store_true", + help="Print defined help message, or this one.") + + op.add_option("-H", "--help-options", + action="help", + help="Print this message and exit.") + + op.add_option('-i', '--ignore-errors', + dest='ignore_errors', default=False, + action="store_true", + help="Ignore errors from build actions.") + + op.add_option('-I', '--include-dir', + nargs=1, + dest='include_dir', default=[], + action="append", + help="Search DIR for imported Python modules.", + metavar="DIR") + + op.add_option('--implicit-cache', + dest='implicit_cache', default=False, + action="store_true", + help="Cache implicit dependencies") + + def opt_implicit_deps(option, opt, value, parser): + setattr(parser.values, 'implicit_cache', True) + setattr(parser.values, option.dest, True) + + op.add_option('--implicit-deps-changed', + dest="implicit_deps_changed", default=False, + action="callback", callback=opt_implicit_deps, + help="Ignore cached implicit dependencies.") + + op.add_option('--implicit-deps-unchanged', + dest="implicit_deps_unchanged", default=False, + action="callback", callback=opt_implicit_deps, + help="Ignore changes in implicit dependencies.") + + op.add_option('--interact', '--interactive', + dest='interactive', default=False, + action="store_true", + help="Run in interactive mode.") + + op.add_option('-j', '--jobs', + nargs=1, type="int", + dest="num_jobs", default=1, + action="store", + help="Allow N jobs at once.", + metavar="N") + + op.add_option('-k', '--keep-going', + dest='keep_going', default=False, + action="store_true", + help="Keep going when a target can't be made.") + + op.add_option('--max-drift', + nargs=1, type="int", + dest='max_drift', default=SCons.Node.FS.default_max_drift, + action="store", + help="Set maximum system clock drift to N seconds.", + metavar="N") + + op.add_option('--md5-chunksize', + nargs=1, type="int", + dest='md5_chunksize', default=SCons.Node.FS.File.md5_chunksize, + action="store", + help="Set chunk-size for MD5 signature computation to N kilobytes.", + metavar="N") + + op.add_option('-n', '--no-exec', '--just-print', '--dry-run', '--recon', + dest='no_exec', default=False, + action="store_true", + help="Don't build; just print commands.") + + op.add_option('--no-site-dir', + dest='no_site_dir', default=False, + action="store_true", + help="Don't search or use the usual site_scons dir.") + + op.add_option('--profile', + nargs=1, + dest="profile_file", default=None, + action="store", + help="Profile SCons and put results in FILE.", + metavar="FILE") + + op.add_option('-q', '--question', + dest="question", default=False, + action="store_true", + help="Don't build; exit status says if up to date.") + + op.add_option('-Q', + dest='no_progress', default=False, + action="store_true", + help="Suppress \"Reading/Building\" progress messages.") + + op.add_option('--random', + dest="random", default=False, + action="store_true", + help="Build dependencies in random order.") + + op.add_option('-s', '--silent', '--quiet', + dest="silent", default=False, + action="store_true", + help="Don't print commands.") + + op.add_option('--site-dir', + nargs=1, + dest='site_dir', default=None, + action="store", + help="Use DIR instead of the usual site_scons dir.", + metavar="DIR") + + op.add_option('--stack-size', + nargs=1, type="int", + dest='stack_size', + action="store", + help="Set the stack size of the threads used to run jobs to N kilobytes.", + metavar="N") + + op.add_option('--taskmastertrace', + nargs=1, + dest="taskmastertrace_file", default=None, + action="store", + help="Trace Node evaluation to FILE.", + metavar="FILE") + + tree_options = ["all", "derived", "prune", "status"] + + def opt_tree(option, opt, value, parser, tree_options=tree_options): + import Main + tp = Main.TreePrinter() + for o in string.split(value, ','): + if o == 'all': + tp.derived = False + elif o == 'derived': + tp.derived = True + elif o == 'prune': + tp.prune = True + elif o == 'status': + tp.status = True + else: + raise OptionValueError("Warning: %s is not a valid --tree option" % o) + parser.values.tree_printers.append(tp) + + opt_tree_help = "Print a dependency tree in various formats: %s." \ + % string.join(tree_options, ", ") + + op.add_option('--tree', + nargs=1, type="string", + dest="tree_printers", default=[], + action="callback", callback=opt_tree, + help=opt_tree_help, + metavar="OPTIONS") + + op.add_option('-u', '--up', '--search-up', + dest="climb_up", default=0, + action="store_const", const=1, + help="Search up directory tree for SConstruct, " + "build targets at or below current directory.") + + op.add_option('-U', + dest="climb_up", default=0, + action="store_const", const=3, + help="Search up directory tree for SConstruct, " + "build Default() targets from local SConscript.") + + def opt_version(option, opt, value, parser): + sys.stdout.write(parser.version + '\n') + sys.exit(0) + op.add_option("-v", "--version", + action="callback", callback=opt_version, + help="Print the SCons version number and exit.") + + def opt_warn(option, opt, value, parser, tree_options=tree_options): + if SCons.Util.is_String(value): + value = string.split(value, ',') + parser.values.warn.extend(value) + + op.add_option('--warn', '--warning', + nargs=1, type="string", + dest="warn", default=[], + action="callback", callback=opt_warn, + help="Enable or disable warnings.", + metavar="WARNING-SPEC") + + op.add_option('-Y', '--repository', '--srcdir', + nargs=1, + dest="repository", default=[], + action="append", + help="Search REPOSITORY for source and target files.") + + # Options from Make and Cons classic that we do not yet support, + # but which we may support someday and whose (potential) meanings + # we don't want to change. These all get a "the -X option is not + # yet implemented" message and don't show up in the help output. + + def opt_not_yet(option, opt, value, parser): + msg = "Warning: the %s option is not yet implemented\n" % opt + sys.stderr.write(msg) + + op.add_option('-l', '--load-average', '--max-load', + nargs=1, type="int", + dest="load_average", default=0, + action="callback", callback=opt_not_yet, + # action="store", + # help="Don't start multiple jobs unless load is below " + # "LOAD-AVERAGE." + help=SUPPRESS_HELP) + op.add_option('--list-actions', + dest="list_actions", + action="callback", callback=opt_not_yet, + # help="Don't build; list files and build actions." + help=SUPPRESS_HELP) + op.add_option('--list-derived', + dest="list_derived", + action="callback", callback=opt_not_yet, + # help="Don't build; list files that would be built." + help=SUPPRESS_HELP) + op.add_option('--list-where', + dest="list_where", + action="callback", callback=opt_not_yet, + # help="Don't build; list files and where defined." + help=SUPPRESS_HELP) + op.add_option('-o', '--old-file', '--assume-old', + nargs=1, type="string", + dest="old_file", default=[], + action="callback", callback=opt_not_yet, + # action="append", + # help = "Consider FILE to be old; don't rebuild it." + help=SUPPRESS_HELP) + op.add_option('--override', + nargs=1, type="string", + action="callback", callback=opt_not_yet, + dest="override", + # help="Override variables as specified in FILE." + help=SUPPRESS_HELP) + op.add_option('-p', + action="callback", callback=opt_not_yet, + dest="p", + # help="Print internal environments/objects." + help=SUPPRESS_HELP) + op.add_option('-r', '-R', '--no-builtin-rules', '--no-builtin-variables', + action="callback", callback=opt_not_yet, + dest="no_builtin_rules", + # help="Clear default environments and variables." + help=SUPPRESS_HELP) + op.add_option('--write-filenames', + nargs=1, type="string", + dest="write_filenames", + action="callback", callback=opt_not_yet, + # help="Write all filenames examined into FILE." + help=SUPPRESS_HELP) + op.add_option('-W', '--new-file', '--assume-new', '--what-if', + nargs=1, type="string", + dest="new_file", + action="callback", callback=opt_not_yet, + # help="Consider FILE to be changed." + help=SUPPRESS_HELP) + op.add_option('--warn-undefined-variables', + dest="warn_undefined_variables", + action="callback", callback=opt_not_yet, + # help="Warn when an undefined variable is referenced." + help=SUPPRESS_HELP) + + return op + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Script/SConscript.py b/engine/SCons/Script/SConscript.py new file mode 100644 index 0000000..aa9fdab --- /dev/null +++ b/engine/SCons/Script/SConscript.py @@ -0,0 +1,642 @@ +"""SCons.Script.SConscript + +This module defines the Python API provided to SConscript and SConstruct +files. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Script/SConscript.py 4577 2009/12/27 19:43:56 scons" + +import SCons +import SCons.Action +import SCons.Builder +import SCons.Defaults +import SCons.Environment +import SCons.Errors +import SCons.Node +import SCons.Node.Alias +import SCons.Node.FS +import SCons.Platform +import SCons.SConf +import SCons.Script.Main +import SCons.Tool +import SCons.Util + +import os +import os.path +import re +import string +import sys +import traceback +import types +import UserList + +# The following variables used to live in this module. Some +# SConscript files out there may have referred to them directly as +# SCons.Script.SConscript.*. This is now supported by some special +# handling towards the bottom of the SConscript.__init__.py module. +#Arguments = {} +#ArgList = [] +#BuildTargets = TargetList() +#CommandLineTargets = [] +#DefaultTargets = [] + +class SConscriptReturn(Exception): + pass + +launch_dir = os.path.abspath(os.curdir) + +GlobalDict = None + +# global exports set by Export(): +global_exports = {} + +# chdir flag +sconscript_chdir = 1 + +def get_calling_namespaces(): + """Return the locals and globals for the function that called + into this module in the current call stack.""" + try: 1/0 + except ZeroDivisionError: + # Don't start iterating with the current stack-frame to + # prevent creating reference cycles (f_back is safe). + frame = sys.exc_info()[2].tb_frame.f_back + + # Find the first frame that *isn't* from this file. This means + # that we expect all of the SCons frames that implement an Export() + # or SConscript() call to be in this file, so that we can identify + # the first non-Script.SConscript frame as the user's local calling + # environment, and the locals and globals dictionaries from that + # frame as the calling namespaces. See the comment below preceding + # the DefaultEnvironmentCall block for even more explanation. + while frame.f_globals.get("__name__") == __name__: + frame = frame.f_back + + return frame.f_locals, frame.f_globals + + +def compute_exports(exports): + """Compute a dictionary of exports given one of the parameters + to the Export() function or the exports argument to SConscript().""" + + loc, glob = get_calling_namespaces() + + retval = {} + try: + for export in exports: + if SCons.Util.is_Dict(export): + retval.update(export) + else: + try: + retval[export] = loc[export] + except KeyError: + retval[export] = glob[export] + except KeyError, x: + raise SCons.Errors.UserError, "Export of non-existent variable '%s'"%x + + return retval + +class Frame: + """A frame on the SConstruct/SConscript call stack""" + def __init__(self, fs, exports, sconscript): + self.globals = BuildDefaultGlobals() + self.retval = None + self.prev_dir = fs.getcwd() + self.exports = compute_exports(exports) # exports from the calling SConscript + # make sure the sconscript attr is a Node. + if isinstance(sconscript, SCons.Node.Node): + self.sconscript = sconscript + elif sconscript == '-': + self.sconscript = None + else: + self.sconscript = fs.File(str(sconscript)) + +# the SConstruct/SConscript call stack: +call_stack = [] + +# For documentation on the methods in this file, see the scons man-page + +def Return(*vars, **kw): + retval = [] + try: + fvars = SCons.Util.flatten(vars) + for var in fvars: + for v in string.split(var): + retval.append(call_stack[-1].globals[v]) + except KeyError, x: + raise SCons.Errors.UserError, "Return of non-existent variable '%s'"%x + + if len(retval) == 1: + call_stack[-1].retval = retval[0] + else: + call_stack[-1].retval = tuple(retval) + + stop = kw.get('stop', True) + + if stop: + raise SConscriptReturn + + +stack_bottom = '% Stack boTTom %' # hard to define a variable w/this name :) + +def _SConscript(fs, *files, **kw): + top = fs.Top + sd = fs.SConstruct_dir.rdir() + exports = kw.get('exports', []) + + # evaluate each SConscript file + results = [] + for fn in files: + call_stack.append(Frame(fs, exports, fn)) + old_sys_path = sys.path + try: + SCons.Script.sconscript_reading = SCons.Script.sconscript_reading + 1 + if fn == "-": + exec sys.stdin in call_stack[-1].globals + else: + if isinstance(fn, SCons.Node.Node): + f = fn + else: + f = fs.File(str(fn)) + _file_ = None + + # Change directory to the top of the source + # tree to make sure the os's cwd and the cwd of + # fs match so we can open the SConscript. + fs.chdir(top, change_os_dir=1) + if f.rexists(): + actual = f.rfile() + _file_ = open(actual.get_abspath(), "r") + elif f.srcnode().rexists(): + actual = f.srcnode().rfile() + _file_ = open(actual.get_abspath(), "r") + elif f.has_src_builder(): + # The SConscript file apparently exists in a source + # code management system. Build it, but then clear + # the builder so that it doesn't get built *again* + # during the actual build phase. + f.build() + f.built() + f.builder_set(None) + if f.exists(): + _file_ = open(f.get_abspath(), "r") + if _file_: + # Chdir to the SConscript directory. Use a path + # name relative to the SConstruct file so that if + # we're using the -f option, we're essentially + # creating a parallel SConscript directory structure + # in our local directory tree. + # + # XXX This is broken for multiple-repository cases + # where the SConstruct and SConscript files might be + # in different Repositories. For now, cross that + # bridge when someone comes to it. + try: + src_dir = kw['src_dir'] + except KeyError: + ldir = fs.Dir(f.dir.get_path(sd)) + else: + ldir = fs.Dir(src_dir) + if not ldir.is_under(f.dir): + # They specified a source directory, but + # it's above the SConscript directory. + # Do the sensible thing and just use the + # SConcript directory. + ldir = fs.Dir(f.dir.get_path(sd)) + try: + fs.chdir(ldir, change_os_dir=sconscript_chdir) + except OSError: + # There was no local directory, so we should be + # able to chdir to the Repository directory. + # Note that we do this directly, not through + # fs.chdir(), because we still need to + # interpret the stuff within the SConscript file + # relative to where we are logically. + fs.chdir(ldir, change_os_dir=0) + os.chdir(actual.dir.get_abspath()) + + # Append the SConscript directory to the beginning + # of sys.path so Python modules in the SConscript + # directory can be easily imported. + sys.path = [ f.dir.get_abspath() ] + sys.path + + # This is the magic line that actually reads up + # and executes the stuff in the SConscript file. + # The locals for this frame contain the special + # bottom-of-the-stack marker so that any + # exceptions that occur when processing this + # SConscript can base the printed frames at this + # level and not show SCons internals as well. + call_stack[-1].globals.update({stack_bottom:1}) + old_file = call_stack[-1].globals.get('__file__') + try: + del call_stack[-1].globals['__file__'] + except KeyError: + pass + try: + try: + exec _file_ in call_stack[-1].globals + except SConscriptReturn: + pass + finally: + if old_file is not None: + call_stack[-1].globals.update({__file__:old_file}) + else: + SCons.Warnings.warn(SCons.Warnings.MissingSConscriptWarning, + "Ignoring missing SConscript '%s'" % f.path) + + finally: + SCons.Script.sconscript_reading = SCons.Script.sconscript_reading - 1 + sys.path = old_sys_path + frame = call_stack.pop() + try: + fs.chdir(frame.prev_dir, change_os_dir=sconscript_chdir) + except OSError: + # There was no local directory, so chdir to the + # Repository directory. Like above, we do this + # directly. + fs.chdir(frame.prev_dir, change_os_dir=0) + rdir = frame.prev_dir.rdir() + rdir._create() # Make sure there's a directory there. + try: + os.chdir(rdir.get_abspath()) + except OSError, e: + # We still couldn't chdir there, so raise the error, + # but only if actions are being executed. + # + # If the -n option was used, the directory would *not* + # have been created and we should just carry on and + # let things muddle through. This isn't guaranteed + # to work if the SConscript files are reading things + # from disk (for example), but it should work well + # enough for most configurations. + if SCons.Action.execute_actions: + raise e + + results.append(frame.retval) + + # if we only have one script, don't return a tuple + if len(results) == 1: + return results[0] + else: + return tuple(results) + +def SConscript_exception(file=sys.stderr): + """Print an exception stack trace just for the SConscript file(s). + This will show users who have Python errors where the problem is, + without cluttering the output with all of the internal calls leading + up to where we exec the SConscript.""" + exc_type, exc_value, exc_tb = sys.exc_info() + tb = exc_tb + while tb and not tb.tb_frame.f_locals.has_key(stack_bottom): + tb = tb.tb_next + if not tb: + # We did not find our exec statement, so this was actually a bug + # in SCons itself. Show the whole stack. + tb = exc_tb + stack = traceback.extract_tb(tb) + try: + type = exc_type.__name__ + except AttributeError: + type = str(exc_type) + if type[:11] == "exceptions.": + type = type[11:] + file.write('%s: %s:\n' % (type, exc_value)) + for fname, line, func, text in stack: + file.write(' File "%s", line %d:\n' % (fname, line)) + file.write(' %s\n' % text) + +def annotate(node): + """Annotate a node with the stack frame describing the + SConscript file and line number that created it.""" + tb = sys.exc_info()[2] + while tb and not tb.tb_frame.f_locals.has_key(stack_bottom): + tb = tb.tb_next + if not tb: + # We did not find any exec of an SConscript file: what?! + raise SCons.Errors.InternalError, "could not find SConscript stack frame" + node.creator = traceback.extract_stack(tb)[0] + +# The following line would cause each Node to be annotated using the +# above function. Unfortunately, this is a *huge* performance hit, so +# leave this disabled until we find a more efficient mechanism. +#SCons.Node.Annotate = annotate + +class SConsEnvironment(SCons.Environment.Base): + """An Environment subclass that contains all of the methods that + are particular to the wrapper SCons interface and which aren't + (or shouldn't be) part of the build engine itself. + + Note that not all of the methods of this class have corresponding + global functions, there are some private methods. + """ + + # + # Private methods of an SConsEnvironment. + # + def _exceeds_version(self, major, minor, v_major, v_minor): + """Return 1 if 'major' and 'minor' are greater than the version + in 'v_major' and 'v_minor', and 0 otherwise.""" + return (major > v_major or (major == v_major and minor > v_minor)) + + def _get_major_minor_revision(self, version_string): + """Split a version string into major, minor and (optionally) + revision parts. + + This is complicated by the fact that a version string can be + something like 3.2b1.""" + version = string.split(string.split(version_string, ' ')[0], '.') + v_major = int(version[0]) + v_minor = int(re.match('\d+', version[1]).group()) + if len(version) >= 3: + v_revision = int(re.match('\d+', version[2]).group()) + else: + v_revision = 0 + return v_major, v_minor, v_revision + + def _get_SConscript_filenames(self, ls, kw): + """ + Convert the parameters passed to # SConscript() calls into a list + of files and export variables. If the parameters are invalid, + throws SCons.Errors.UserError. Returns a tuple (l, e) where l + is a list of SConscript filenames and e is a list of exports. + """ + exports = [] + + if len(ls) == 0: + try: + dirs = kw["dirs"] + except KeyError: + raise SCons.Errors.UserError, \ + "Invalid SConscript usage - no parameters" + + if not SCons.Util.is_List(dirs): + dirs = [ dirs ] + dirs = map(str, dirs) + + name = kw.get('name', 'SConscript') + + files = map(lambda n, name = name: os.path.join(n, name), dirs) + + elif len(ls) == 1: + + files = ls[0] + + elif len(ls) == 2: + + files = ls[0] + exports = self.Split(ls[1]) + + else: + + raise SCons.Errors.UserError, \ + "Invalid SConscript() usage - too many arguments" + + if not SCons.Util.is_List(files): + files = [ files ] + + if kw.get('exports'): + exports.extend(self.Split(kw['exports'])) + + variant_dir = kw.get('variant_dir') or kw.get('build_dir') + if variant_dir: + if len(files) != 1: + raise SCons.Errors.UserError, \ + "Invalid SConscript() usage - can only specify one SConscript with a variant_dir" + duplicate = kw.get('duplicate', 1) + src_dir = kw.get('src_dir') + if not src_dir: + src_dir, fname = os.path.split(str(files[0])) + files = [os.path.join(str(variant_dir), fname)] + else: + if not isinstance(src_dir, SCons.Node.Node): + src_dir = self.fs.Dir(src_dir) + fn = files[0] + if not isinstance(fn, SCons.Node.Node): + fn = self.fs.File(fn) + if fn.is_under(src_dir): + # Get path relative to the source directory. + fname = fn.get_path(src_dir) + files = [os.path.join(str(variant_dir), fname)] + else: + files = [fn.abspath] + kw['src_dir'] = variant_dir + self.fs.VariantDir(variant_dir, src_dir, duplicate) + + return (files, exports) + + # + # Public methods of an SConsEnvironment. These get + # entry points in the global name space so they can be called + # as global functions. + # + + def Configure(self, *args, **kw): + if not SCons.Script.sconscript_reading: + raise SCons.Errors.UserError, "Calling Configure from Builders is not supported." + kw['_depth'] = kw.get('_depth', 0) + 1 + return apply(SCons.Environment.Base.Configure, (self,)+args, kw) + + def Default(self, *targets): + SCons.Script._Set_Default_Targets(self, targets) + + def EnsureSConsVersion(self, major, minor, revision=0): + """Exit abnormally if the SCons version is not late enough.""" + scons_ver = self._get_major_minor_revision(SCons.__version__) + if scons_ver < (major, minor, revision): + if revision: + scons_ver_string = '%d.%d.%d' % (major, minor, revision) + else: + scons_ver_string = '%d.%d' % (major, minor) + print "SCons %s or greater required, but you have SCons %s" % \ + (scons_ver_string, SCons.__version__) + sys.exit(2) + + def EnsurePythonVersion(self, major, minor): + """Exit abnormally if the Python version is not late enough.""" + try: + v_major, v_minor, v_micro, release, serial = sys.version_info + python_ver = (v_major, v_minor) + except AttributeError: + python_ver = self._get_major_minor_revision(sys.version)[:2] + if python_ver < (major, minor): + v = string.split(sys.version, " ", 1)[0] + print "Python %d.%d or greater required, but you have Python %s" %(major,minor,v) + sys.exit(2) + + def Exit(self, value=0): + sys.exit(value) + + def Export(self, *vars, **kw): + for var in vars: + global_exports.update(compute_exports(self.Split(var))) + global_exports.update(kw) + + def GetLaunchDir(self): + global launch_dir + return launch_dir + + def GetOption(self, name): + name = self.subst(name) + return SCons.Script.Main.GetOption(name) + + def Help(self, text): + text = self.subst(text, raw=1) + SCons.Script.HelpFunction(text) + + def Import(self, *vars): + try: + frame = call_stack[-1] + globals = frame.globals + exports = frame.exports + for var in vars: + var = self.Split(var) + for v in var: + if v == '*': + globals.update(global_exports) + globals.update(exports) + else: + if exports.has_key(v): + globals[v] = exports[v] + else: + globals[v] = global_exports[v] + except KeyError,x: + raise SCons.Errors.UserError, "Import of non-existent variable '%s'"%x + + def SConscript(self, *ls, **kw): + def subst_element(x, subst=self.subst): + if SCons.Util.is_List(x): + x = map(subst, x) + else: + x = subst(x) + return x + ls = map(subst_element, ls) + subst_kw = {} + for key, val in kw.items(): + if SCons.Util.is_String(val): + val = self.subst(val) + elif SCons.Util.is_List(val): + result = [] + for v in val: + if SCons.Util.is_String(v): + v = self.subst(v) + result.append(v) + val = result + subst_kw[key] = val + + files, exports = self._get_SConscript_filenames(ls, subst_kw) + subst_kw['exports'] = exports + return apply(_SConscript, [self.fs,] + files, subst_kw) + + def SConscriptChdir(self, flag): + global sconscript_chdir + sconscript_chdir = flag + + def SetOption(self, name, value): + name = self.subst(name) + SCons.Script.Main.SetOption(name, value) + +# +# +# +SCons.Environment.Environment = SConsEnvironment + +def Configure(*args, **kw): + if not SCons.Script.sconscript_reading: + raise SCons.Errors.UserError, "Calling Configure from Builders is not supported." + kw['_depth'] = 1 + return apply(SCons.SConf.SConf, args, kw) + +# It's very important that the DefaultEnvironmentCall() class stay in this +# file, with the get_calling_namespaces() function, the compute_exports() +# function, the Frame class and the SConsEnvironment.Export() method. +# These things make up the calling stack leading up to the actual global +# Export() or SConscript() call that the user issued. We want to allow +# users to export local variables that they define, like so: +# +# def func(): +# x = 1 +# Export('x') +# +# To support this, the get_calling_namespaces() function assumes that +# the *first* stack frame that's not from this file is the local frame +# for the Export() or SConscript() call. + +_DefaultEnvironmentProxy = None + +def get_DefaultEnvironmentProxy(): + global _DefaultEnvironmentProxy + if not _DefaultEnvironmentProxy: + default_env = SCons.Defaults.DefaultEnvironment() + _DefaultEnvironmentProxy = SCons.Environment.NoSubstitutionProxy(default_env) + return _DefaultEnvironmentProxy + +class DefaultEnvironmentCall: + """A class that implements "global function" calls of + Environment methods by fetching the specified method from the + DefaultEnvironment's class. Note that this uses an intermediate + proxy class instead of calling the DefaultEnvironment method + directly so that the proxy can override the subst() method and + thereby prevent expansion of construction variables (since from + the user's point of view this was called as a global function, + with no associated construction environment).""" + def __init__(self, method_name, subst=0): + self.method_name = method_name + if subst: + self.factory = SCons.Defaults.DefaultEnvironment + else: + self.factory = get_DefaultEnvironmentProxy + def __call__(self, *args, **kw): + env = self.factory() + method = getattr(env, self.method_name) + return apply(method, args, kw) + + +def BuildDefaultGlobals(): + """ + Create a dictionary containing all the default globals for + SConstruct and SConscript files. + """ + + global GlobalDict + if GlobalDict is None: + GlobalDict = {} + + import SCons.Script + d = SCons.Script.__dict__ + def not_a_module(m, d=d, mtype=type(SCons.Script)): + return type(d[m]) != mtype + for m in filter(not_a_module, dir(SCons.Script)): + GlobalDict[m] = d[m] + + return GlobalDict.copy() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Script/__init__.py b/engine/SCons/Script/__init__.py new file mode 100644 index 0000000..c4ac363 --- /dev/null +++ b/engine/SCons/Script/__init__.py @@ -0,0 +1,414 @@ +"""SCons.Script + +This file implements the main() function used by the scons script. + +Architecturally, this *is* the scons script, and will likely only be +called from the external "scons" wrapper. Consequently, anything here +should not be, or be considered, part of the build engine. If it's +something that we expect other software to want to use, it should go in +some other module. If it's specific to the "scons" script invocation, +it goes here. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Script/__init__.py 4577 2009/12/27 19:43:56 scons" + +import time +start_time = time.time() + +import os +import string +import sys +import UserList + +# Special chicken-and-egg handling of the "--debug=memoizer" flag: +# +# SCons.Memoize contains a metaclass implementation that affects how +# the other classes are instantiated. The Memoizer may add shim methods +# to classes that have methods that cache computed values in order to +# count and report the hits and misses. +# +# If we wait to enable the Memoization until after we've parsed the +# command line options normally, it will be too late, because the Memoizer +# will have already analyzed the classes that it's Memoizing and decided +# to not add the shims. So we use a special-case, up-front check for +# the "--debug=memoizer" flag and enable Memoizer before we import any +# of the other modules that use it. + +_args = sys.argv + string.split(os.environ.get('SCONSFLAGS', '')) +if "--debug=memoizer" in _args: + import SCons.Memoize + import SCons.Warnings + try: + SCons.Memoize.EnableMemoization() + except SCons.Warnings.Warning: + # Some warning was thrown (inability to --debug=memoizer on + # Python 1.5.2 because it doesn't have metaclasses). Arrange + # for it to be displayed or not after warnings are configured. + import Main + exc_type, exc_value, tb = sys.exc_info() + Main.delayed_warnings.append((exc_type, exc_value)) +del _args + +import SCons.Action +import SCons.Builder +import SCons.Environment +import SCons.Node.FS +import SCons.Options +import SCons.Platform +import SCons.Scanner +import SCons.SConf +import SCons.Subst +import SCons.Tool +import SCons.Util +import SCons.Variables +import SCons.Defaults + +import Main + +main = Main.main + +# The following are global class definitions and variables that used to +# live directly in this module back before 0.96.90, when it contained +# a lot of code. Some SConscript files in widely-distributed packages +# (Blender is the specific example) actually reached into SCons.Script +# directly to use some of these. Rather than break those SConscript +# files, we're going to propagate these names into the SCons.Script +# namespace here. +# +# Some of these are commented out because it's *really* unlikely anyone +# used them, but we're going to leave the comment here to try to make +# it obvious what to do if the situation arises. +BuildTask = Main.BuildTask +CleanTask = Main.CleanTask +QuestionTask = Main.QuestionTask +#PrintHelp = Main.PrintHelp +#SConscriptSettableOptions = Main.SConscriptSettableOptions + +AddOption = Main.AddOption +GetOption = Main.GetOption +SetOption = Main.SetOption +Progress = Main.Progress +GetBuildFailures = Main.GetBuildFailures + +#keep_going_on_error = Main.keep_going_on_error +#print_dtree = Main.print_dtree +#print_explanations = Main.print_explanations +#print_includes = Main.print_includes +#print_objects = Main.print_objects +#print_time = Main.print_time +#print_tree = Main.print_tree +#memory_stats = Main.memory_stats +#ignore_errors = Main.ignore_errors +#sconscript_time = Main.sconscript_time +#command_time = Main.command_time +#exit_status = Main.exit_status +#profiling = Main.profiling +#repositories = Main.repositories + +# +import SConscript +_SConscript = SConscript + +call_stack = _SConscript.call_stack + +# +Action = SCons.Action.Action +AddMethod = SCons.Util.AddMethod +AllowSubstExceptions = SCons.Subst.SetAllowableExceptions +Builder = SCons.Builder.Builder +Configure = _SConscript.Configure +Environment = SCons.Environment.Environment +#OptParser = SCons.SConsOptions.OptParser +FindPathDirs = SCons.Scanner.FindPathDirs +Platform = SCons.Platform.Platform +Return = _SConscript.Return +Scanner = SCons.Scanner.Base +Tool = SCons.Tool.Tool +WhereIs = SCons.Util.WhereIs + +# +BoolVariable = SCons.Variables.BoolVariable +EnumVariable = SCons.Variables.EnumVariable +ListVariable = SCons.Variables.ListVariable +PackageVariable = SCons.Variables.PackageVariable +PathVariable = SCons.Variables.PathVariable + +# Deprecated names that will go away some day. +BoolOption = SCons.Options.BoolOption +EnumOption = SCons.Options.EnumOption +ListOption = SCons.Options.ListOption +PackageOption = SCons.Options.PackageOption +PathOption = SCons.Options.PathOption + +# Action factories. +Chmod = SCons.Defaults.Chmod +Copy = SCons.Defaults.Copy +Delete = SCons.Defaults.Delete +Mkdir = SCons.Defaults.Mkdir +Move = SCons.Defaults.Move +Touch = SCons.Defaults.Touch + +# Pre-made, public scanners. +CScanner = SCons.Tool.CScanner +DScanner = SCons.Tool.DScanner +DirScanner = SCons.Defaults.DirScanner +ProgramScanner = SCons.Tool.ProgramScanner +SourceFileScanner = SCons.Tool.SourceFileScanner + +# Functions we might still convert to Environment methods. +CScan = SCons.Defaults.CScan +DefaultEnvironment = SCons.Defaults.DefaultEnvironment + +# Other variables we provide. +class TargetList(UserList.UserList): + def _do_nothing(self, *args, **kw): + pass + def _add_Default(self, list): + self.extend(list) + def _clear(self): + del self[:] + +ARGUMENTS = {} +ARGLIST = [] +BUILD_TARGETS = TargetList() +COMMAND_LINE_TARGETS = [] +DEFAULT_TARGETS = [] + +# BUILD_TARGETS can be modified in the SConscript files. If so, we +# want to treat the modified BUILD_TARGETS list as if they specified +# targets on the command line. To do that, though, we need to know if +# BUILD_TARGETS was modified through "official" APIs or by hand. We do +# this by updating two lists in parallel, the documented BUILD_TARGETS +# list, above, and this internal _build_plus_default targets list which +# should only have "official" API changes. Then Script/Main.py can +# compare these two afterwards to figure out if the user added their +# own targets to BUILD_TARGETS. +_build_plus_default = TargetList() + +def _Add_Arguments(alist): + for arg in alist: + a, b = string.split(arg, '=', 1) + ARGUMENTS[a] = b + ARGLIST.append((a, b)) + +def _Add_Targets(tlist): + if tlist: + COMMAND_LINE_TARGETS.extend(tlist) + BUILD_TARGETS.extend(tlist) + BUILD_TARGETS._add_Default = BUILD_TARGETS._do_nothing + BUILD_TARGETS._clear = BUILD_TARGETS._do_nothing + _build_plus_default.extend(tlist) + _build_plus_default._add_Default = _build_plus_default._do_nothing + _build_plus_default._clear = _build_plus_default._do_nothing + +def _Set_Default_Targets_Has_Been_Called(d, fs): + return DEFAULT_TARGETS + +def _Set_Default_Targets_Has_Not_Been_Called(d, fs): + if d is None: + d = [fs.Dir('.')] + return d + +_Get_Default_Targets = _Set_Default_Targets_Has_Not_Been_Called + +def _Set_Default_Targets(env, tlist): + global DEFAULT_TARGETS + global _Get_Default_Targets + _Get_Default_Targets = _Set_Default_Targets_Has_Been_Called + for t in tlist: + if t is None: + # Delete the elements from the list in-place, don't + # reassign an empty list to DEFAULT_TARGETS, so that the + # variables will still point to the same object we point to. + del DEFAULT_TARGETS[:] + BUILD_TARGETS._clear() + _build_plus_default._clear() + elif isinstance(t, SCons.Node.Node): + DEFAULT_TARGETS.append(t) + BUILD_TARGETS._add_Default([t]) + _build_plus_default._add_Default([t]) + else: + nodes = env.arg2nodes(t, env.fs.Entry) + DEFAULT_TARGETS.extend(nodes) + BUILD_TARGETS._add_Default(nodes) + _build_plus_default._add_Default(nodes) + +# +help_text = None + +def HelpFunction(text): + global help_text + if SCons.Script.help_text is None: + SCons.Script.help_text = text + else: + help_text = help_text + text + +# +# Will be non-zero if we are reading an SConscript file. +sconscript_reading = 0 + +# +def Variables(files=[], args=ARGUMENTS): + return SCons.Variables.Variables(files, args) + +def Options(files=[], args=ARGUMENTS): + return SCons.Options.Options(files, args) + +# The list of global functions to add to the SConscript name space +# that end up calling corresponding methods or Builders in the +# DefaultEnvironment(). +GlobalDefaultEnvironmentFunctions = [ + # Methods from the SConsEnvironment class, above. + 'Default', + 'EnsurePythonVersion', + 'EnsureSConsVersion', + 'Exit', + 'Export', + 'GetLaunchDir', + 'Help', + 'Import', + #'SConscript', is handled separately, below. + 'SConscriptChdir', + + # Methods from the Environment.Base class. + 'AddPostAction', + 'AddPreAction', + 'Alias', + 'AlwaysBuild', + 'BuildDir', + 'CacheDir', + 'Clean', + #The Command() method is handled separately, below. + 'Decider', + 'Depends', + 'Dir', + 'NoClean', + 'NoCache', + 'Entry', + 'Execute', + 'File', + 'FindFile', + 'FindInstalledFiles', + 'FindSourceFiles', + 'Flatten', + 'GetBuildPath', + 'Glob', + 'Ignore', + 'Install', + 'InstallAs', + 'Literal', + 'Local', + 'ParseDepends', + 'Precious', + 'Repository', + 'Requires', + 'SConsignFile', + 'SideEffect', + 'SourceCode', + 'SourceSignatures', + 'Split', + 'Tag', + 'TargetSignatures', + 'Value', + 'VariantDir', +] + +GlobalDefaultBuilders = [ + # Supported builders. + 'CFile', + 'CXXFile', + 'DVI', + 'Jar', + 'Java', + 'JavaH', + 'Library', + 'M4', + 'MSVSProject', + 'Object', + 'PCH', + 'PDF', + 'PostScript', + 'Program', + 'RES', + 'RMIC', + 'SharedLibrary', + 'SharedObject', + 'StaticLibrary', + 'StaticObject', + 'Tar', + 'TypeLibrary', + 'Zip', + 'Package', +] + +for name in GlobalDefaultEnvironmentFunctions + GlobalDefaultBuilders: + exec "%s = _SConscript.DefaultEnvironmentCall(%s)" % (name, repr(name)) +del name + +# There are a handful of variables that used to live in the +# Script/SConscript.py module that some SConscript files out there were +# accessing directly as SCons.Script.SConscript.*. The problem is that +# "SConscript" in this namespace is no longer a module, it's a global +# function call--or more precisely, an object that implements a global +# function call through the default Environment. Nevertheless, we can +# maintain backwards compatibility for SConscripts that were reaching in +# this way by hanging some attributes off the "SConscript" object here. +SConscript = _SConscript.DefaultEnvironmentCall('SConscript') + +# Make SConscript look enough like the module it used to be so +# that pychecker doesn't barf. +SConscript.__name__ = 'SConscript' + +SConscript.Arguments = ARGUMENTS +SConscript.ArgList = ARGLIST +SConscript.BuildTargets = BUILD_TARGETS +SConscript.CommandLineTargets = COMMAND_LINE_TARGETS +SConscript.DefaultTargets = DEFAULT_TARGETS + +# The global Command() function must be handled differently than the +# global functions for other construction environment methods because +# we want people to be able to use Actions that must expand $TARGET +# and $SOURCE later, when (and if) the Action is invoked to build +# the target(s). We do this with the subst=1 argument, which creates +# a DefaultEnvironmentCall instance that wraps up a normal default +# construction environment that performs variable substitution, not a +# proxy that doesn't. +# +# There's a flaw here, though, because any other $-variables on a command +# line will *also* be expanded, each to a null string, but that should +# only be a problem in the unusual case where someone was passing a '$' +# on a command line and *expected* the $ to get through to the shell +# because they were calling Command() and not env.Command()... This is +# unlikely enough that we're going to leave this as is and cross that +# bridge if someone actually comes to it. +Command = _SConscript.DefaultEnvironmentCall('Command', subst=1) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Sig.py b/engine/SCons/Sig.py new file mode 100644 index 0000000..3d8dcd0 --- /dev/null +++ b/engine/SCons/Sig.py @@ -0,0 +1,63 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Sig.py 4577 2009/12/27 19:43:56 scons" + +__doc__ = """Place-holder for the old SCons.Sig module hierarchy + +This is no longer used, but code out there (such as the NSIS module on +the SCons wiki) may try to import SCons.Sig. If so, we generate a warning +that points them to the line that caused the import, and don't die. + +If someone actually tried to use the sub-modules or functions within +the package (for example, SCons.Sig.MD5.signature()), then they'll still +get an AttributeError, but at least they'll know where to start looking. +""" + +import SCons.Util +import SCons.Warnings + +msg = 'The SCons.Sig module no longer exists.\n' \ + ' Remove the following "import SCons.Sig" line to eliminate this warning:' + +SCons.Warnings.warn(SCons.Warnings.DeprecatedWarning, msg) + +default_calc = None +default_module = None + +class MD5Null(SCons.Util.Null): + def __repr__(self): + return "MD5Null()" + +class TimeStampNull(SCons.Util.Null): + def __repr__(self): + return "TimeStampNull()" + +MD5 = MD5Null() +TimeStamp = TimeStampNull() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Subst.py b/engine/SCons/Subst.py new file mode 100644 index 0000000..803679e --- /dev/null +++ b/engine/SCons/Subst.py @@ -0,0 +1,911 @@ +"""SCons.Subst + +SCons string substitution. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Subst.py 4577 2009/12/27 19:43:56 scons" + +import re +import string +import types +import UserList +import UserString + +import SCons.Errors + +from SCons.Util import is_String, is_Sequence + +# Indexed by the SUBST_* constants below. +_strconv = [SCons.Util.to_String_for_subst, + SCons.Util.to_String_for_subst, + SCons.Util.to_String_for_signature] + + + +AllowableExceptions = (IndexError, NameError) + +def SetAllowableExceptions(*excepts): + global AllowableExceptions + AllowableExceptions = filter(None, excepts) + +def raise_exception(exception, target, s): + name = exception.__class__.__name__ + msg = "%s `%s' trying to evaluate `%s'" % (name, exception, s) + if target: + raise SCons.Errors.BuildError, (target[0], msg) + else: + raise SCons.Errors.UserError, msg + + + +class Literal: + """A wrapper for a string. If you use this object wrapped + around a string, then it will be interpreted as literal. + When passed to the command interpreter, all special + characters will be escaped.""" + def __init__(self, lstr): + self.lstr = lstr + + def __str__(self): + return self.lstr + + def escape(self, escape_func): + return escape_func(self.lstr) + + def for_signature(self): + return self.lstr + + def is_literal(self): + return 1 + +class SpecialAttrWrapper: + """This is a wrapper for what we call a 'Node special attribute.' + This is any of the attributes of a Node that we can reference from + Environment variable substitution, such as $TARGET.abspath or + $SOURCES[1].filebase. We implement the same methods as Literal + so we can handle special characters, plus a for_signature method, + such that we can return some canonical string during signature + calculation to avoid unnecessary rebuilds.""" + + def __init__(self, lstr, for_signature=None): + """The for_signature parameter, if supplied, will be the + canonical string we return from for_signature(). Else + we will simply return lstr.""" + self.lstr = lstr + if for_signature: + self.forsig = for_signature + else: + self.forsig = lstr + + def __str__(self): + return self.lstr + + def escape(self, escape_func): + return escape_func(self.lstr) + + def for_signature(self): + return self.forsig + + def is_literal(self): + return 1 + +def quote_spaces(arg): + """Generic function for putting double quotes around any string that + has white space in it.""" + if ' ' in arg or '\t' in arg: + return '"%s"' % arg + else: + return str(arg) + +class CmdStringHolder(UserString.UserString): + """This is a special class used to hold strings generated by + scons_subst() and scons_subst_list(). It defines a special method + escape(). When passed a function with an escape algorithm for a + particular platform, it will return the contained string with the + proper escape sequences inserted. + """ + def __init__(self, cmd, literal=None): + UserString.UserString.__init__(self, cmd) + self.literal = literal + + def is_literal(self): + return self.literal + + def escape(self, escape_func, quote_func=quote_spaces): + """Escape the string with the supplied function. The + function is expected to take an arbitrary string, then + return it with all special characters escaped and ready + for passing to the command interpreter. + + After calling this function, the next call to str() will + return the escaped string. + """ + + if self.is_literal(): + return escape_func(self.data) + elif ' ' in self.data or '\t' in self.data: + return quote_func(self.data) + else: + return self.data + +def escape_list(list, escape_func): + """Escape a list of arguments by running the specified escape_func + on every object in the list that has an escape() method.""" + def escape(obj, escape_func=escape_func): + try: + e = obj.escape + except AttributeError: + return obj + else: + return e(escape_func) + return map(escape, list) + +class NLWrapper: + """A wrapper class that delays turning a list of sources or targets + into a NodeList until it's needed. The specified function supplied + when the object is initialized is responsible for turning raw nodes + into proxies that implement the special attributes like .abspath, + .source, etc. This way, we avoid creating those proxies just + "in case" someone is going to use $TARGET or the like, and only + go through the trouble if we really have to. + + In practice, this might be a wash performance-wise, but it's a little + cleaner conceptually... + """ + + def __init__(self, list, func): + self.list = list + self.func = func + def _return_nodelist(self): + return self.nodelist + def _gen_nodelist(self): + list = self.list + if list is None: + list = [] + elif not is_Sequence(list): + list = [list] + # The map(self.func) call is what actually turns + # a list into appropriate proxies. + self.nodelist = SCons.Util.NodeList(map(self.func, list)) + self._create_nodelist = self._return_nodelist + return self.nodelist + _create_nodelist = _gen_nodelist + + +class Targets_or_Sources(UserList.UserList): + """A class that implements $TARGETS or $SOURCES expansions by in turn + wrapping a NLWrapper. This class handles the different methods used + to access the list, calling the NLWrapper to create proxies on demand. + + Note that we subclass UserList.UserList purely so that the + is_Sequence() function will identify an object of this class as + a list during variable expansion. We're not really using any + UserList.UserList methods in practice. + """ + def __init__(self, nl): + self.nl = nl + def __getattr__(self, attr): + nl = self.nl._create_nodelist() + return getattr(nl, attr) + def __getitem__(self, i): + nl = self.nl._create_nodelist() + return nl[i] + def __getslice__(self, i, j): + nl = self.nl._create_nodelist() + i = max(i, 0); j = max(j, 0) + return nl[i:j] + def __str__(self): + nl = self.nl._create_nodelist() + return str(nl) + def __repr__(self): + nl = self.nl._create_nodelist() + return repr(nl) + +class Target_or_Source: + """A class that implements $TARGET or $SOURCE expansions by in turn + wrapping a NLWrapper. This class handles the different methods used + to access an individual proxy Node, calling the NLWrapper to create + a proxy on demand. + """ + def __init__(self, nl): + self.nl = nl + def __getattr__(self, attr): + nl = self.nl._create_nodelist() + try: + nl0 = nl[0] + except IndexError: + # If there is nothing in the list, then we have no attributes to + # pass through, so raise AttributeError for everything. + raise AttributeError, "NodeList has no attribute: %s" % attr + return getattr(nl0, attr) + def __str__(self): + nl = self.nl._create_nodelist() + if nl: + return str(nl[0]) + return '' + def __repr__(self): + nl = self.nl._create_nodelist() + if nl: + return repr(nl[0]) + return '' + +class NullNodeList(SCons.Util.NullSeq): + def __call__(self, *args, **kwargs): return '' + def __str__(self): return '' + # TODO(1.5): unneeded after new-style classes introduce iterators + def __getitem__(self, i): + raise IndexError + +NullNodesList = NullNodeList() + +def subst_dict(target, source): + """Create a dictionary for substitution of special + construction variables. + + This translates the following special arguments: + + target - the target (object or array of objects), + used to generate the TARGET and TARGETS + construction variables + + source - the source (object or array of objects), + used to generate the SOURCES and SOURCE + construction variables + """ + dict = {} + + if target: + def get_tgt_subst_proxy(thing): + try: + subst_proxy = thing.get_subst_proxy() + except AttributeError: + subst_proxy = thing # probably a string, just return it + return subst_proxy + tnl = NLWrapper(target, get_tgt_subst_proxy) + dict['TARGETS'] = Targets_or_Sources(tnl) + dict['TARGET'] = Target_or_Source(tnl) + + # This is a total cheat, but hopefully this dictionary goes + # away soon anyway. We just let these expand to $TARGETS + # because that's "good enough" for the use of ToolSurrogates + # (see test/ToolSurrogate.py) to generate documentation. + dict['CHANGED_TARGETS'] = '$TARGETS' + dict['UNCHANGED_TARGETS'] = '$TARGETS' + else: + dict['TARGETS'] = NullNodesList + dict['TARGET'] = NullNodesList + + if source: + def get_src_subst_proxy(node): + try: + rfile = node.rfile + except AttributeError: + pass + else: + node = rfile() + try: + return node.get_subst_proxy() + except AttributeError: + return node # probably a String, just return it + snl = NLWrapper(source, get_src_subst_proxy) + dict['SOURCES'] = Targets_or_Sources(snl) + dict['SOURCE'] = Target_or_Source(snl) + + # This is a total cheat, but hopefully this dictionary goes + # away soon anyway. We just let these expand to $TARGETS + # because that's "good enough" for the use of ToolSurrogates + # (see test/ToolSurrogate.py) to generate documentation. + dict['CHANGED_SOURCES'] = '$SOURCES' + dict['UNCHANGED_SOURCES'] = '$SOURCES' + else: + dict['SOURCES'] = NullNodesList + dict['SOURCE'] = NullNodesList + + return dict + +# Constants for the "mode" parameter to scons_subst_list() and +# scons_subst(). SUBST_RAW gives the raw command line. SUBST_CMD +# gives a command line suitable for passing to a shell. SUBST_SIG +# gives a command line appropriate for calculating the signature +# of a command line...if this changes, we should rebuild. +SUBST_CMD = 0 +SUBST_RAW = 1 +SUBST_SIG = 2 + +_rm = re.compile(r'\$[()]') +_remove = re.compile(r'\$\([^\$]*(\$[^\)][^\$]*)*\$\)') + +# Indexed by the SUBST_* constants above. +_regex_remove = [ _rm, None, _remove ] + +def _rm_list(list): + #return [ l for l in list if not l in ('$(', '$)') ] + return filter(lambda l: not l in ('$(', '$)'), list) + +def _remove_list(list): + result = [] + do_append = result.append + for l in list: + if l == '$(': + do_append = lambda x: None + elif l == '$)': + do_append = result.append + else: + do_append(l) + return result + +# Indexed by the SUBST_* constants above. +_list_remove = [ _rm_list, None, _remove_list ] + +# Regular expressions for splitting strings and handling substitutions, +# for use by the scons_subst() and scons_subst_list() functions: +# +# The first expression compiled matches all of the $-introduced tokens +# that we need to process in some way, and is used for substitutions. +# The expressions it matches are: +# +# "$$" +# "$(" +# "$)" +# "$variable" [must begin with alphabetic or underscore] +# "${any stuff}" +# +# The second expression compiled is used for splitting strings into tokens +# to be processed, and it matches all of the tokens listed above, plus +# the following that affect how arguments do or don't get joined together: +# +# " " [white space] +# "non-white-space" [without any dollar signs] +# "$" [single dollar sign] +# +_dollar_exps_str = r'\$[\$\(\)]|\$[_a-zA-Z][\.\w]*|\${[^}]*}' +_dollar_exps = re.compile(r'(%s)' % _dollar_exps_str) +_separate_args = re.compile(r'(%s|\s+|[^\s\$]+|\$)' % _dollar_exps_str) + +# This regular expression is used to replace strings of multiple white +# space characters in the string result from the scons_subst() function. +_space_sep = re.compile(r'[\t ]+(?![^{]*})') + +def scons_subst(strSubst, env, mode=SUBST_RAW, target=None, source=None, gvars={}, lvars={}, conv=None): + """Expand a string or list containing construction variable + substitutions. + + This is the work-horse function for substitutions in file names + and the like. The companion scons_subst_list() function (below) + handles separating command lines into lists of arguments, so see + that function if that's what you're looking for. + """ + if type(strSubst) == types.StringType and string.find(strSubst, '$') < 0: + return strSubst + + class StringSubber: + """A class to construct the results of a scons_subst() call. + + This binds a specific construction environment, mode, target and + source with two methods (substitute() and expand()) that handle + the expansion. + """ + def __init__(self, env, mode, conv, gvars): + self.env = env + self.mode = mode + self.conv = conv + self.gvars = gvars + + def expand(self, s, lvars): + """Expand a single "token" as necessary, returning an + appropriate string containing the expansion. + + This handles expanding different types of things (strings, + lists, callables) appropriately. It calls the wrapper + substitute() method to re-expand things as necessary, so that + the results of expansions of side-by-side strings still get + re-evaluated separately, not smushed together. + """ + if is_String(s): + try: + s0, s1 = s[:2] + except (IndexError, ValueError): + return s + if s0 != '$': + return s + if s1 == '$': + return '$' + elif s1 in '()': + return s + else: + key = s[1:] + if key[0] == '{' or string.find(key, '.') >= 0: + if key[0] == '{': + key = key[1:-1] + try: + s = eval(key, self.gvars, lvars) + except KeyboardInterrupt: + raise + except Exception, e: + if e.__class__ in AllowableExceptions: + return '' + raise_exception(e, lvars['TARGETS'], s) + else: + if lvars.has_key(key): + s = lvars[key] + elif self.gvars.has_key(key): + s = self.gvars[key] + elif not NameError in AllowableExceptions: + raise_exception(NameError(key), lvars['TARGETS'], s) + else: + return '' + + # Before re-expanding the result, handle + # recursive expansion by copying the local + # variable dictionary and overwriting a null + # string for the value of the variable name + # we just expanded. + # + # This could potentially be optimized by only + # copying lvars when s contains more expansions, + # but lvars is usually supposed to be pretty + # small, and deeply nested variable expansions + # are probably more the exception than the norm, + # so it should be tolerable for now. + lv = lvars.copy() + var = string.split(key, '.')[0] + lv[var] = '' + return self.substitute(s, lv) + elif is_Sequence(s): + def func(l, conv=self.conv, substitute=self.substitute, lvars=lvars): + return conv(substitute(l, lvars)) + return map(func, s) + elif callable(s): + try: + s = s(target=lvars['TARGETS'], + source=lvars['SOURCES'], + env=self.env, + for_signature=(self.mode != SUBST_CMD)) + except TypeError: + # This probably indicates that it's a callable + # object that doesn't match our calling arguments + # (like an Action). + if self.mode == SUBST_RAW: + return s + s = self.conv(s) + return self.substitute(s, lvars) + elif s is None: + return '' + else: + return s + + def substitute(self, args, lvars): + """Substitute expansions in an argument or list of arguments. + + This serves as a wrapper for splitting up a string into + separate tokens. + """ + if is_String(args) and not isinstance(args, CmdStringHolder): + args = str(args) # In case it's a UserString. + try: + def sub_match(match, conv=self.conv, expand=self.expand, lvars=lvars): + return conv(expand(match.group(1), lvars)) + result = _dollar_exps.sub(sub_match, args) + except TypeError: + # If the internal conversion routine doesn't return + # strings (it could be overridden to return Nodes, for + # example), then the 1.5.2 re module will throw this + # exception. Back off to a slower, general-purpose + # algorithm that works for all data types. + args = _separate_args.findall(args) + result = [] + for a in args: + result.append(self.conv(self.expand(a, lvars))) + if len(result) == 1: + result = result[0] + else: + result = string.join(map(str, result), '') + return result + else: + return self.expand(args, lvars) + + if conv is None: + conv = _strconv[mode] + + # Doing this every time is a bit of a waste, since the Executor + # has typically already populated the OverrideEnvironment with + # $TARGET/$SOURCE variables. We're keeping this (for now), though, + # because it supports existing behavior that allows us to call + # an Action directly with an arbitrary target+source pair, which + # we use in Tool/tex.py to handle calling $BIBTEX when necessary. + # If we dropped that behavior (or found another way to cover it), + # we could get rid of this call completely and just rely on the + # Executor setting the variables. + if not lvars.has_key('TARGET'): + d = subst_dict(target, source) + if d: + lvars = lvars.copy() + lvars.update(d) + + # We're (most likely) going to eval() things. If Python doesn't + # find a __builtins__ value in the global dictionary used for eval(), + # it copies the current global values for you. Avoid this by + # setting it explicitly and then deleting, so we don't pollute the + # construction environment Dictionary(ies) that are typically used + # for expansion. + gvars['__builtins__'] = __builtins__ + + ss = StringSubber(env, mode, conv, gvars) + result = ss.substitute(strSubst, lvars) + + try: + del gvars['__builtins__'] + except KeyError: + pass + + if is_String(result): + # Remove $(-$) pairs and any stuff in between, + # if that's appropriate. + remove = _regex_remove[mode] + if remove: + result = remove.sub('', result) + if mode != SUBST_RAW: + # Compress strings of white space characters into + # a single space. + result = string.strip(_space_sep.sub(' ', result)) + elif is_Sequence(result): + remove = _list_remove[mode] + if remove: + result = remove(result) + + return result + +#Subst_List_Strings = {} + +def scons_subst_list(strSubst, env, mode=SUBST_RAW, target=None, source=None, gvars={}, lvars={}, conv=None): + """Substitute construction variables in a string (or list or other + object) and separate the arguments into a command list. + + The companion scons_subst() function (above) handles basic + substitutions within strings, so see that function instead + if that's what you're looking for. + """ +# try: +# Subst_List_Strings[strSubst] = Subst_List_Strings[strSubst] + 1 +# except KeyError: +# Subst_List_Strings[strSubst] = 1 +# import SCons.Debug +# SCons.Debug.caller_trace(1) + class ListSubber(UserList.UserList): + """A class to construct the results of a scons_subst_list() call. + + Like StringSubber, this class binds a specific construction + environment, mode, target and source with two methods + (substitute() and expand()) that handle the expansion. + + In addition, however, this class is used to track the state of + the result(s) we're gathering so we can do the appropriate thing + whenever we have to append another word to the result--start a new + line, start a new word, append to the current word, etc. We do + this by setting the "append" attribute to the right method so + that our wrapper methods only need ever call ListSubber.append(), + and the rest of the object takes care of doing the right thing + internally. + """ + def __init__(self, env, mode, conv, gvars): + UserList.UserList.__init__(self, []) + self.env = env + self.mode = mode + self.conv = conv + self.gvars = gvars + + if self.mode == SUBST_RAW: + self.add_strip = lambda x, s=self: s.append(x) + else: + self.add_strip = lambda x, s=self: None + self.in_strip = None + self.next_line() + + def expand(self, s, lvars, within_list): + """Expand a single "token" as necessary, appending the + expansion to the current result. + + This handles expanding different types of things (strings, + lists, callables) appropriately. It calls the wrapper + substitute() method to re-expand things as necessary, so that + the results of expansions of side-by-side strings still get + re-evaluated separately, not smushed together. + """ + + if is_String(s): + try: + s0, s1 = s[:2] + except (IndexError, ValueError): + self.append(s) + return + if s0 != '$': + self.append(s) + return + if s1 == '$': + self.append('$') + elif s1 == '(': + self.open_strip('$(') + elif s1 == ')': + self.close_strip('$)') + else: + key = s[1:] + if key[0] == '{' or string.find(key, '.') >= 0: + if key[0] == '{': + key = key[1:-1] + try: + s = eval(key, self.gvars, lvars) + except KeyboardInterrupt: + raise + except Exception, e: + if e.__class__ in AllowableExceptions: + return + raise_exception(e, lvars['TARGETS'], s) + else: + if lvars.has_key(key): + s = lvars[key] + elif self.gvars.has_key(key): + s = self.gvars[key] + elif not NameError in AllowableExceptions: + raise_exception(NameError(), lvars['TARGETS'], s) + else: + return + + # Before re-expanding the result, handle + # recursive expansion by copying the local + # variable dictionary and overwriting a null + # string for the value of the variable name + # we just expanded. + lv = lvars.copy() + var = string.split(key, '.')[0] + lv[var] = '' + self.substitute(s, lv, 0) + self.this_word() + elif is_Sequence(s): + for a in s: + self.substitute(a, lvars, 1) + self.next_word() + elif callable(s): + try: + s = s(target=lvars['TARGETS'], + source=lvars['SOURCES'], + env=self.env, + for_signature=(self.mode != SUBST_CMD)) + except TypeError: + # This probably indicates that it's a callable + # object that doesn't match our calling arguments + # (like an Action). + if self.mode == SUBST_RAW: + self.append(s) + return + s = self.conv(s) + self.substitute(s, lvars, within_list) + elif s is None: + self.this_word() + else: + self.append(s) + + def substitute(self, args, lvars, within_list): + """Substitute expansions in an argument or list of arguments. + + This serves as a wrapper for splitting up a string into + separate tokens. + """ + + if is_String(args) and not isinstance(args, CmdStringHolder): + args = str(args) # In case it's a UserString. + args = _separate_args.findall(args) + for a in args: + if a[0] in ' \t\n\r\f\v': + if '\n' in a: + self.next_line() + elif within_list: + self.append(a) + else: + self.next_word() + else: + self.expand(a, lvars, within_list) + else: + self.expand(args, lvars, within_list) + + def next_line(self): + """Arrange for the next word to start a new line. This + is like starting a new word, except that we have to append + another line to the result.""" + UserList.UserList.append(self, []) + self.next_word() + + def this_word(self): + """Arrange for the next word to append to the end of the + current last word in the result.""" + self.append = self.add_to_current_word + + def next_word(self): + """Arrange for the next word to start a new word.""" + self.append = self.add_new_word + + def add_to_current_word(self, x): + """Append the string x to the end of the current last word + in the result. If that is not possible, then just add + it as a new word. Make sure the entire concatenated string + inherits the object attributes of x (in particular, the + escape function) by wrapping it as CmdStringHolder.""" + + if not self.in_strip or self.mode != SUBST_SIG: + try: + current_word = self[-1][-1] + except IndexError: + self.add_new_word(x) + else: + # All right, this is a hack and it should probably + # be refactored out of existence in the future. + # The issue is that we want to smoosh words together + # and make one file name that gets escaped if + # we're expanding something like foo$EXTENSION, + # but we don't want to smoosh them together if + # it's something like >$TARGET, because then we'll + # treat the '>' like it's part of the file name. + # So for now, just hard-code looking for the special + # command-line redirection characters... + try: + last_char = str(current_word)[-1] + except IndexError: + last_char = '\0' + if last_char in '<>|': + self.add_new_word(x) + else: + y = current_word + x + + # We used to treat a word appended to a literal + # as a literal itself, but this caused problems + # with interpreting quotes around space-separated + # targets on command lines. Removing this makes + # none of the "substantive" end-to-end tests fail, + # so we'll take this out but leave it commented + # for now in case there's a problem not covered + # by the test cases and we need to resurrect this. + #literal1 = self.literal(self[-1][-1]) + #literal2 = self.literal(x) + y = self.conv(y) + if is_String(y): + #y = CmdStringHolder(y, literal1 or literal2) + y = CmdStringHolder(y, None) + self[-1][-1] = y + + def add_new_word(self, x): + if not self.in_strip or self.mode != SUBST_SIG: + literal = self.literal(x) + x = self.conv(x) + if is_String(x): + x = CmdStringHolder(x, literal) + self[-1].append(x) + self.append = self.add_to_current_word + + def literal(self, x): + try: + l = x.is_literal + except AttributeError: + return None + else: + return l() + + def open_strip(self, x): + """Handle the "open strip" $( token.""" + self.add_strip(x) + self.in_strip = 1 + + def close_strip(self, x): + """Handle the "close strip" $) token.""" + self.add_strip(x) + self.in_strip = None + + if conv is None: + conv = _strconv[mode] + + # Doing this every time is a bit of a waste, since the Executor + # has typically already populated the OverrideEnvironment with + # $TARGET/$SOURCE variables. We're keeping this (for now), though, + # because it supports existing behavior that allows us to call + # an Action directly with an arbitrary target+source pair, which + # we use in Tool/tex.py to handle calling $BIBTEX when necessary. + # If we dropped that behavior (or found another way to cover it), + # we could get rid of this call completely and just rely on the + # Executor setting the variables. + if not lvars.has_key('TARGET'): + d = subst_dict(target, source) + if d: + lvars = lvars.copy() + lvars.update(d) + + # We're (most likely) going to eval() things. If Python doesn't + # find a __builtins__ value in the global dictionary used for eval(), + # it copies the current global values for you. Avoid this by + # setting it explicitly and then deleting, so we don't pollute the + # construction environment Dictionary(ies) that are typically used + # for expansion. + gvars['__builtins__'] = __builtins__ + + ls = ListSubber(env, mode, conv, gvars) + ls.substitute(strSubst, lvars, 0) + + try: + del gvars['__builtins__'] + except KeyError: + pass + + return ls.data + +def scons_subst_once(strSubst, env, key): + """Perform single (non-recursive) substitution of a single + construction variable keyword. + + This is used when setting a variable when copying or overriding values + in an Environment. We want to capture (expand) the old value before + we override it, so people can do things like: + + env2 = env.Clone(CCFLAGS = '$CCFLAGS -g') + + We do this with some straightforward, brute-force code here... + """ + if type(strSubst) == types.StringType and string.find(strSubst, '$') < 0: + return strSubst + + matchlist = ['$' + key, '${' + key + '}'] + val = env.get(key, '') + def sub_match(match, val=val, matchlist=matchlist): + a = match.group(1) + if a in matchlist: + a = val + if is_Sequence(a): + return string.join(map(str, a)) + else: + return str(a) + + if is_Sequence(strSubst): + result = [] + for arg in strSubst: + if is_String(arg): + if arg in matchlist: + arg = val + if is_Sequence(arg): + result.extend(arg) + else: + result.append(arg) + else: + result.append(_dollar_exps.sub(sub_match, arg)) + else: + result.append(arg) + return result + elif is_String(strSubst): + return _dollar_exps.sub(sub_match, strSubst) + else: + return strSubst + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Taskmaster.py b/engine/SCons/Taskmaster.py new file mode 100644 index 0000000..f9e0857 --- /dev/null +++ b/engine/SCons/Taskmaster.py @@ -0,0 +1,1030 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__doc__ = """ +Generic Taskmaster module for the SCons build engine. + +This module contains the primary interface(s) between a wrapping user +interface and the SCons build engine. There are two key classes here: + + Taskmaster + This is the main engine for walking the dependency graph and + calling things to decide what does or doesn't need to be built. + + Task + This is the base class for allowing a wrapping interface to + decide what does or doesn't actually need to be done. The + intention is for a wrapping interface to subclass this as + appropriate for different types of behavior it may need. + + The canonical example is the SCons native Python interface, + which has Task subclasses that handle its specific behavior, + like printing "`foo' is up to date" when a top-level target + doesn't need to be built, and handling the -c option by removing + targets as its "build" action. There is also a separate subclass + for suppressing this output when the -q option is used. + + The Taskmaster instantiates a Task object for each (set of) + target(s) that it decides need to be evaluated and/or built. +""" + +__revision__ = "src/engine/SCons/Taskmaster.py 4577 2009/12/27 19:43:56 scons" + +from itertools import chain +import operator +import string +import sys +import traceback + +import SCons.Errors +import SCons.Node +import SCons.Warnings + +StateString = SCons.Node.StateString +NODE_NO_STATE = SCons.Node.no_state +NODE_PENDING = SCons.Node.pending +NODE_EXECUTING = SCons.Node.executing +NODE_UP_TO_DATE = SCons.Node.up_to_date +NODE_EXECUTED = SCons.Node.executed +NODE_FAILED = SCons.Node.failed + + +# A subsystem for recording stats about how different Nodes are handled by +# the main Taskmaster loop. There's no external control here (no need for +# a --debug= option); enable it by changing the value of CollectStats. + +CollectStats = None + +class Stats: + """ + A simple class for holding statistics about the disposition of a + Node by the Taskmaster. If we're collecting statistics, each Node + processed by the Taskmaster gets one of these attached, in which case + the Taskmaster records its decision each time it processes the Node. + (Ideally, that's just once per Node.) + """ + def __init__(self): + """ + Instantiates a Taskmaster.Stats object, initializing all + appropriate counters to zero. + """ + self.considered = 0 + self.already_handled = 0 + self.problem = 0 + self.child_failed = 0 + self.not_built = 0 + self.side_effects = 0 + self.build = 0 + +StatsNodes = [] + +fmt = "%(considered)3d "\ + "%(already_handled)3d " \ + "%(problem)3d " \ + "%(child_failed)3d " \ + "%(not_built)3d " \ + "%(side_effects)3d " \ + "%(build)3d " + +def dump_stats(): + StatsNodes.sort(lambda a, b: cmp(str(a), str(b))) + for n in StatsNodes: + print (fmt % n.stats.__dict__) + str(n) + + + +class Task: + """ + Default SCons build engine task. + + This controls the interaction of the actual building of node + and the rest of the engine. + + This is expected to handle all of the normally-customizable + aspects of controlling a build, so any given application + *should* be able to do what it wants by sub-classing this + class and overriding methods as appropriate. If an application + needs to customze something by sub-classing Taskmaster (or + some other build engine class), we should first try to migrate + that functionality into this class. + + Note that it's generally a good idea for sub-classes to call + these methods explicitly to update state, etc., rather than + roll their own interaction with Taskmaster from scratch. + """ + def __init__(self, tm, targets, top, node): + self.tm = tm + self.targets = targets + self.top = top + self.node = node + self.exc_clear() + + def trace_message(self, method, node, description='node'): + fmt = '%-20s %s %s\n' + return fmt % (method + ':', description, self.tm.trace_node(node)) + + def display(self, message): + """ + Hook to allow the calling interface to display a message. + + This hook gets called as part of preparing a task for execution + (that is, a Node to be built). As part of figuring out what Node + should be built next, the actually target list may be altered, + along with a message describing the alteration. The calling + interface can subclass Task and provide a concrete implementation + of this method to see those messages. + """ + pass + + def prepare(self): + """ + Called just before the task is executed. + + This is mainly intended to give the target Nodes a chance to + unlink underlying files and make all necessary directories before + the Action is actually called to build the targets. + """ + T = self.tm.trace + if T: T.write(self.trace_message('Task.prepare()', self.node)) + + # Now that it's the appropriate time, give the TaskMaster a + # chance to raise any exceptions it encountered while preparing + # this task. + self.exception_raise() + + if self.tm.message: + self.display(self.tm.message) + self.tm.message = None + + # Let the targets take care of any necessary preparations. + # This includes verifying that all of the necessary sources + # and dependencies exist, removing the target file(s), etc. + # + # As of April 2008, the get_executor().prepare() method makes + # sure that all of the aggregate sources necessary to build this + # Task's target(s) exist in one up-front check. The individual + # target t.prepare() methods check that each target's explicit + # or implicit dependencies exists, and also initialize the + # .sconsign info. + executor = self.targets[0].get_executor() + executor.prepare() + for t in executor.get_action_targets(): + t.prepare() + for s in t.side_effects: + s.prepare() + + def get_target(self): + """Fetch the target being built or updated by this task. + """ + return self.node + + def needs_execute(self): + # TODO(deprecate): "return True" is the old default behavior; + # change it to NotImplementedError (after running through the + # Deprecation Cycle) so the desired behavior is explicitly + # determined by which concrete subclass is used. + #raise NotImplementedError + msg = ('Direct use of the Taskmaster.Task class will be deprecated\n' + + '\tin a future release.') + SCons.Warnings.warn(SCons.Warnings.TaskmasterNeedsExecuteWarning, msg) + return True + + def execute(self): + """ + Called to execute the task. + + This method is called from multiple threads in a parallel build, + so only do thread safe stuff here. Do thread unsafe stuff in + prepare(), executed() or failed(). + """ + T = self.tm.trace + if T: T.write(self.trace_message('Task.execute()', self.node)) + + try: + everything_was_cached = 1 + for t in self.targets: + if t.retrieve_from_cache(): + # Call the .built() method without calling the + # .push_to_cache() method, since we just got the + # target from the cache and don't need to push + # it back there. + t.set_state(NODE_EXECUTED) + t.built() + else: + everything_was_cached = 0 + break + if not everything_was_cached: + self.targets[0].build() + except SystemExit: + exc_value = sys.exc_info()[1] + raise SCons.Errors.ExplicitExit(self.targets[0], exc_value.code) + except SCons.Errors.UserError: + raise + except SCons.Errors.BuildError: + raise + except Exception, e: + buildError = SCons.Errors.convert_to_BuildError(e) + buildError.node = self.targets[0] + buildError.exc_info = sys.exc_info() + raise buildError + + def executed_without_callbacks(self): + """ + Called when the task has been successfully executed + and the Taskmaster instance doesn't want to call + the Node's callback methods. + """ + T = self.tm.trace + if T: T.write(self.trace_message('Task.executed_without_callbacks()', + self.node)) + + for t in self.targets: + if t.get_state() == NODE_EXECUTING: + for side_effect in t.side_effects: + side_effect.set_state(NODE_NO_STATE) + t.set_state(NODE_EXECUTED) + + def executed_with_callbacks(self): + """ + Called when the task has been successfully executed and + the Taskmaster instance wants to call the Node's callback + methods. + + This may have been a do-nothing operation (to preserve build + order), so we must check the node's state before deciding whether + it was "built", in which case we call the appropriate Node method. + In any event, we always call "visited()", which will handle any + post-visit actions that must take place regardless of whether + or not the target was an actual built target or a source Node. + """ + T = self.tm.trace + if T: T.write(self.trace_message('Task.executed_with_callbacks()', + self.node)) + + for t in self.targets: + if t.get_state() == NODE_EXECUTING: + for side_effect in t.side_effects: + side_effect.set_state(NODE_NO_STATE) + t.set_state(NODE_EXECUTED) + t.push_to_cache() + t.built() + t.visited() + + executed = executed_with_callbacks + + def failed(self): + """ + Default action when a task fails: stop the build. + + Note: Although this function is normally invoked on nodes in + the executing state, it might also be invoked on up-to-date + nodes when using Configure(). + """ + self.fail_stop() + + def fail_stop(self): + """ + Explicit stop-the-build failure. + + This sets failure status on the target nodes and all of + their dependent parent nodes. + + Note: Although this function is normally invoked on nodes in + the executing state, it might also be invoked on up-to-date + nodes when using Configure(). + """ + T = self.tm.trace + if T: T.write(self.trace_message('Task.failed_stop()', self.node)) + + # Invoke will_not_build() to clean-up the pending children + # list. + self.tm.will_not_build(self.targets, lambda n: n.set_state(NODE_FAILED)) + + # Tell the taskmaster to not start any new tasks + self.tm.stop() + + # We're stopping because of a build failure, but give the + # calling Task class a chance to postprocess() the top-level + # target under which the build failure occurred. + self.targets = [self.tm.current_top] + self.top = 1 + + def fail_continue(self): + """ + Explicit continue-the-build failure. + + This sets failure status on the target nodes and all of + their dependent parent nodes. + + Note: Although this function is normally invoked on nodes in + the executing state, it might also be invoked on up-to-date + nodes when using Configure(). + """ + T = self.tm.trace + if T: T.write(self.trace_message('Task.failed_continue()', self.node)) + + self.tm.will_not_build(self.targets, lambda n: n.set_state(NODE_FAILED)) + + def make_ready_all(self): + """ + Marks all targets in a task ready for execution. + + This is used when the interface needs every target Node to be + visited--the canonical example being the "scons -c" option. + """ + T = self.tm.trace + if T: T.write(self.trace_message('Task.make_ready_all()', self.node)) + + self.out_of_date = self.targets[:] + for t in self.targets: + t.disambiguate().set_state(NODE_EXECUTING) + for s in t.side_effects: + s.set_state(NODE_EXECUTING) + + def make_ready_current(self): + """ + Marks all targets in a task ready for execution if any target + is not current. + + This is the default behavior for building only what's necessary. + """ + T = self.tm.trace + if T: T.write(self.trace_message('Task.make_ready_current()', + self.node)) + + self.out_of_date = [] + needs_executing = False + for t in self.targets: + try: + t.disambiguate().make_ready() + is_up_to_date = not t.has_builder() or \ + (not t.always_build and t.is_up_to_date()) + except EnvironmentError, e: + raise SCons.Errors.BuildError(node=t, errstr=e.strerror, filename=e.filename) + + if not is_up_to_date: + self.out_of_date.append(t) + needs_executing = True + + if needs_executing: + for t in self.targets: + t.set_state(NODE_EXECUTING) + for s in t.side_effects: + s.set_state(NODE_EXECUTING) + else: + for t in self.targets: + # We must invoke visited() to ensure that the node + # information has been computed before allowing the + # parent nodes to execute. (That could occur in a + # parallel build...) + t.visited() + t.set_state(NODE_UP_TO_DATE) + + make_ready = make_ready_current + + def postprocess(self): + """ + Post-processes a task after it's been executed. + + This examines all the targets just built (or not, we don't care + if the build was successful, or even if there was no build + because everything was up-to-date) to see if they have any + waiting parent Nodes, or Nodes waiting on a common side effect, + that can be put back on the candidates list. + """ + T = self.tm.trace + if T: T.write(self.trace_message('Task.postprocess()', self.node)) + + # We may have built multiple targets, some of which may have + # common parents waiting for this build. Count up how many + # targets each parent was waiting for so we can subtract the + # values later, and so we *don't* put waiting side-effect Nodes + # back on the candidates list if the Node is also a waiting + # parent. + + targets = set(self.targets) + + pending_children = self.tm.pending_children + parents = {} + for t in targets: + # A node can only be in the pending_children set if it has + # some waiting_parents. + if t.waiting_parents: + if T: T.write(self.trace_message('Task.postprocess()', + t, + 'removing')) + pending_children.discard(t) + for p in t.waiting_parents: + parents[p] = parents.get(p, 0) + 1 + + for t in targets: + for s in t.side_effects: + if s.get_state() == NODE_EXECUTING: + s.set_state(NODE_NO_STATE) + for p in s.waiting_parents: + parents[p] = parents.get(p, 0) + 1 + for p in s.waiting_s_e: + if p.ref_count == 0: + self.tm.candidates.append(p) + + for p, subtract in parents.items(): + p.ref_count = p.ref_count - subtract + if T: T.write(self.trace_message('Task.postprocess()', + p, + 'adjusted parent ref count')) + if p.ref_count == 0: + self.tm.candidates.append(p) + + for t in targets: + t.postprocess() + + # Exception handling subsystem. + # + # Exceptions that occur while walking the DAG or examining Nodes + # must be raised, but must be raised at an appropriate time and in + # a controlled manner so we can, if necessary, recover gracefully, + # possibly write out signature information for Nodes we've updated, + # etc. This is done by having the Taskmaster tell us about the + # exception, and letting + + def exc_info(self): + """ + Returns info about a recorded exception. + """ + return self.exception + + def exc_clear(self): + """ + Clears any recorded exception. + + This also changes the "exception_raise" attribute to point + to the appropriate do-nothing method. + """ + self.exception = (None, None, None) + self.exception_raise = self._no_exception_to_raise + + def exception_set(self, exception=None): + """ + Records an exception to be raised at the appropriate time. + + This also changes the "exception_raise" attribute to point + to the method that will, in fact + """ + if not exception: + exception = sys.exc_info() + self.exception = exception + self.exception_raise = self._exception_raise + + def _no_exception_to_raise(self): + pass + + def _exception_raise(self): + """ + Raises a pending exception that was recorded while getting a + Task ready for execution. + """ + exc = self.exc_info()[:] + try: + exc_type, exc_value, exc_traceback = exc + except ValueError: + exc_type, exc_value = exc + exc_traceback = None + raise exc_type, exc_value, exc_traceback + +class AlwaysTask(Task): + def needs_execute(self): + """ + Always returns True (indicating this Task should always + be executed). + + Subclasses that need this behavior (as opposed to the default + of only executing Nodes that are out of date w.r.t. their + dependencies) can use this as follows: + + class MyTaskSubclass(SCons.Taskmaster.Task): + needs_execute = SCons.Taskmaster.Task.execute_always + """ + return True + +class OutOfDateTask(Task): + def needs_execute(self): + """ + Returns True (indicating this Task should be executed) if this + Task's target state indicates it needs executing, which has + already been determined by an earlier up-to-date check. + """ + return self.targets[0].get_state() == SCons.Node.executing + + +def find_cycle(stack, visited): + if stack[-1] in visited: + return None + visited.add(stack[-1]) + for n in stack[-1].waiting_parents: + stack.append(n) + if stack[0] == stack[-1]: + return stack + if find_cycle(stack, visited): + return stack + stack.pop() + return None + + +class Taskmaster: + """ + The Taskmaster for walking the dependency DAG. + """ + + def __init__(self, targets=[], tasker=None, order=None, trace=None): + self.original_top = targets + self.top_targets_left = targets[:] + self.top_targets_left.reverse() + self.candidates = [] + if tasker is None: + tasker = OutOfDateTask + self.tasker = tasker + if not order: + order = lambda l: l + self.order = order + self.message = None + self.trace = trace + self.next_candidate = self.find_next_candidate + self.pending_children = set() + + def find_next_candidate(self): + """ + Returns the next candidate Node for (potential) evaluation. + + The candidate list (really a stack) initially consists of all of + the top-level (command line) targets provided when the Taskmaster + was initialized. While we walk the DAG, visiting Nodes, all the + children that haven't finished processing get pushed on to the + candidate list. Each child can then be popped and examined in + turn for whether *their* children are all up-to-date, in which + case a Task will be created for their actual evaluation and + potential building. + + Here is where we also allow candidate Nodes to alter the list of + Nodes that should be examined. This is used, for example, when + invoking SCons in a source directory. A source directory Node can + return its corresponding build directory Node, essentially saying, + "Hey, you really need to build this thing over here instead." + """ + try: + return self.candidates.pop() + except IndexError: + pass + try: + node = self.top_targets_left.pop() + except IndexError: + return None + self.current_top = node + alt, message = node.alter_targets() + if alt: + self.message = message + self.candidates.append(node) + self.candidates.extend(self.order(alt)) + node = self.candidates.pop() + return node + + def no_next_candidate(self): + """ + Stops Taskmaster processing by not returning a next candidate. + + Note that we have to clean-up the Taskmaster candidate list + because the cycle detection depends on the fact all nodes have + been processed somehow. + """ + while self.candidates: + candidates = self.candidates + self.candidates = [] + self.will_not_build(candidates) + return None + + def _validate_pending_children(self): + """ + Validate the content of the pending_children set. Assert if an + internal error is found. + + This function is used strictly for debugging the taskmaster by + checking that no invariants are violated. It is not used in + normal operation. + + The pending_children set is used to detect cycles in the + dependency graph. We call a "pending child" a child that is + found in the "pending" state when checking the dependencies of + its parent node. + + A pending child can occur when the Taskmaster completes a loop + through a cycle. For example, lets imagine a graph made of + three node (A, B and C) making a cycle. The evaluation starts + at node A. The taskmaster first consider whether node A's + child B is up-to-date. Then, recursively, node B needs to + check whether node C is up-to-date. This leaves us with a + dependency graph looking like: + + Next candidate \ + \ + Node A (Pending) --> Node B(Pending) --> Node C (NoState) + ^ | + | | + +-------------------------------------+ + + Now, when the Taskmaster examines the Node C's child Node A, + it finds that Node A is in the "pending" state. Therefore, + Node A is a pending child of node C. + + Pending children indicate that the Taskmaster has potentially + loop back through a cycle. We say potentially because it could + also occur when a DAG is evaluated in parallel. For example, + consider the following graph: + + + Node A (Pending) --> Node B(Pending) --> Node C (Pending) --> ... + | ^ + | | + +----------> Node D (NoState) --------+ + / + Next candidate / + + The Taskmaster first evaluates the nodes A, B, and C and + starts building some children of node C. Assuming, that the + maximum parallel level has not been reached, the Taskmaster + will examine Node D. It will find that Node C is a pending + child of Node D. + + In summary, evaluating a graph with a cycle will always + involve a pending child at one point. A pending child might + indicate either a cycle or a diamond-shaped DAG. Only a + fraction of the nodes ends-up being a "pending child" of + another node. This keeps the pending_children set small in + practice. + + We can differentiate between the two cases if we wait until + the end of the build. At this point, all the pending children + nodes due to a diamond-shaped DAG will have been properly + built (or will have failed to build). But, the pending + children involved in a cycle will still be in the pending + state. + + The taskmaster removes nodes from the pending_children set as + soon as a pending_children node moves out of the pending + state. This also helps to keep the pending_children set small. + """ + + for n in self.pending_children: + assert n.state in (NODE_PENDING, NODE_EXECUTING), \ + (str(n), StateString[n.state]) + assert len(n.waiting_parents) != 0, (str(n), len(n.waiting_parents)) + for p in n.waiting_parents: + assert p.ref_count > 0, (str(n), str(p), p.ref_count) + + + def trace_message(self, message): + return 'Taskmaster: %s\n' % message + + def trace_node(self, node): + return '<%-10s %-3s %s>' % (StateString[node.get_state()], + node.ref_count, + repr(str(node))) + + def _find_next_ready_node(self): + """ + Finds the next node that is ready to be built. + + This is *the* main guts of the DAG walk. We loop through the + list of candidates, looking for something that has no un-built + children (i.e., that is a leaf Node or has dependencies that are + all leaf Nodes or up-to-date). Candidate Nodes are re-scanned + (both the target Node itself and its sources, which are always + scanned in the context of a given target) to discover implicit + dependencies. A Node that must wait for some children to be + built will be put back on the candidates list after the children + have finished building. A Node that has been put back on the + candidates list in this way may have itself (or its sources) + re-scanned, in order to handle generated header files (e.g.) and + the implicit dependencies therein. + + Note that this method does not do any signature calculation or + up-to-date check itself. All of that is handled by the Task + class. This is purely concerned with the dependency graph walk. + """ + + self.ready_exc = None + + T = self.trace + if T: T.write('\n' + self.trace_message('Looking for a node to evaluate')) + + while 1: + node = self.next_candidate() + if node is None: + if T: T.write(self.trace_message('No candidate anymore.') + '\n') + return None + + node = node.disambiguate() + state = node.get_state() + + # For debugging only: + # + # try: + # self._validate_pending_children() + # except: + # self.ready_exc = sys.exc_info() + # return node + + if CollectStats: + if not hasattr(node, 'stats'): + node.stats = Stats() + StatsNodes.append(node) + S = node.stats + S.considered = S.considered + 1 + else: + S = None + + if T: T.write(self.trace_message(' Considering node %s and its children:' % self.trace_node(node))) + + if state == NODE_NO_STATE: + # Mark this node as being on the execution stack: + node.set_state(NODE_PENDING) + elif state > NODE_PENDING: + # Skip this node if it has already been evaluated: + if S: S.already_handled = S.already_handled + 1 + if T: T.write(self.trace_message(' already handled (executed)')) + continue + + executor = node.get_executor() + + try: + children = executor.get_all_children() + except SystemExit: + exc_value = sys.exc_info()[1] + e = SCons.Errors.ExplicitExit(node, exc_value.code) + self.ready_exc = (SCons.Errors.ExplicitExit, e) + if T: T.write(self.trace_message(' SystemExit')) + return node + except Exception, e: + # We had a problem just trying to figure out the + # children (like a child couldn't be linked in to a + # VariantDir, or a Scanner threw something). Arrange to + # raise the exception when the Task is "executed." + self.ready_exc = sys.exc_info() + if S: S.problem = S.problem + 1 + if T: T.write(self.trace_message(' exception %s while scanning children.\n' % e)) + return node + + children_not_visited = [] + children_pending = set() + children_not_ready = [] + children_failed = False + + for child in chain(executor.get_all_prerequisites(), children): + childstate = child.get_state() + + if T: T.write(self.trace_message(' ' + self.trace_node(child))) + + if childstate == NODE_NO_STATE: + children_not_visited.append(child) + elif childstate == NODE_PENDING: + children_pending.add(child) + elif childstate == NODE_FAILED: + children_failed = True + + if childstate <= NODE_EXECUTING: + children_not_ready.append(child) + + + # These nodes have not even been visited yet. Add + # them to the list so that on some next pass we can + # take a stab at evaluating them (or their children). + children_not_visited.reverse() + self.candidates.extend(self.order(children_not_visited)) + #if T and children_not_visited: + # T.write(self.trace_message(' adding to candidates: %s' % map(str, children_not_visited))) + # T.write(self.trace_message(' candidates now: %s\n' % map(str, self.candidates))) + + # Skip this node if any of its children have failed. + # + # This catches the case where we're descending a top-level + # target and one of our children failed while trying to be + # built by a *previous* descent of an earlier top-level + # target. + # + # It can also occur if a node is reused in multiple + # targets. One first descends though the one of the + # target, the next time occurs through the other target. + # + # Note that we can only have failed_children if the + # --keep-going flag was used, because without it the build + # will stop before diving in the other branch. + # + # Note that even if one of the children fails, we still + # added the other children to the list of candidate nodes + # to keep on building (--keep-going). + if children_failed: + for n in executor.get_action_targets(): + n.set_state(NODE_FAILED) + + if S: S.child_failed = S.child_failed + 1 + if T: T.write(self.trace_message('****** %s\n' % self.trace_node(node))) + continue + + if children_not_ready: + for child in children_not_ready: + # We're waiting on one or more derived targets + # that have not yet finished building. + if S: S.not_built = S.not_built + 1 + + # Add this node to the waiting parents lists of + # anything we're waiting on, with a reference + # count so we can be put back on the list for + # re-evaluation when they've all finished. + node.ref_count = node.ref_count + child.add_to_waiting_parents(node) + if T: T.write(self.trace_message(' adjusted ref count: %s, child %s' % + (self.trace_node(node), repr(str(child))))) + + if T: + for pc in children_pending: + T.write(self.trace_message(' adding %s to the pending children set\n' % + self.trace_node(pc))) + self.pending_children = self.pending_children | children_pending + + continue + + # Skip this node if it has side-effects that are + # currently being built: + wait_side_effects = False + for se in executor.get_action_side_effects(): + if se.get_state() == NODE_EXECUTING: + se.add_to_waiting_s_e(node) + wait_side_effects = True + + if wait_side_effects: + if S: S.side_effects = S.side_effects + 1 + continue + + # The default when we've gotten through all of the checks above: + # this node is ready to be built. + if S: S.build = S.build + 1 + if T: T.write(self.trace_message('Evaluating %s\n' % + self.trace_node(node))) + + # For debugging only: + # + # try: + # self._validate_pending_children() + # except: + # self.ready_exc = sys.exc_info() + # return node + + return node + + return None + + def next_task(self): + """ + Returns the next task to be executed. + + This simply asks for the next Node to be evaluated, and then wraps + it in the specific Task subclass with which we were initialized. + """ + node = self._find_next_ready_node() + + if node is None: + return None + + tlist = node.get_executor().get_all_targets() + + task = self.tasker(self, tlist, node in self.original_top, node) + try: + task.make_ready() + except: + # We had a problem just trying to get this task ready (like + # a child couldn't be linked in to a VariantDir when deciding + # whether this node is current). Arrange to raise the + # exception when the Task is "executed." + self.ready_exc = sys.exc_info() + + if self.ready_exc: + task.exception_set(self.ready_exc) + + self.ready_exc = None + + return task + + def will_not_build(self, nodes, node_func=lambda n: None): + """ + Perform clean-up about nodes that will never be built. Invokes + a user defined function on all of these nodes (including all + of their parents). + """ + + T = self.trace + + pending_children = self.pending_children + + to_visit = set(nodes) + pending_children = pending_children - to_visit + + if T: + for n in nodes: + T.write(self.trace_message(' removing node %s from the pending children set\n' % + self.trace_node(n))) + try: + while 1: + try: + node = to_visit.pop() + except AttributeError: + # Python 1.5.2 + if len(to_visit): + node = to_visit[0] + to_visit.remove(node) + else: + break + + node_func(node) + + # Prune recursion by flushing the waiting children + # list immediately. + parents = node.waiting_parents + node.waiting_parents = set() + + to_visit = to_visit | parents + pending_children = pending_children - parents + + for p in parents: + p.ref_count = p.ref_count - 1 + if T: T.write(self.trace_message(' removing parent %s from the pending children set\n' % + self.trace_node(p))) + except KeyError: + # The container to_visit has been emptied. + pass + + # We have the stick back the pending_children list into the + # task master because the python 1.5.2 compatibility does not + # allow us to use in-place updates + self.pending_children = pending_children + + def stop(self): + """ + Stops the current build completely. + """ + self.next_candidate = self.no_next_candidate + + def cleanup(self): + """ + Check for dependency cycles. + """ + if not self.pending_children: + return + + # TODO(1.5) + #nclist = [ (n, find_cycle([n], set())) for n in self.pending_children ] + nclist = map(lambda n: (n, find_cycle([n], set())), self.pending_children) + + # TODO(1.5) + #genuine_cycles = [ + # node for node, cycle in nclist + # if cycle or node.get_state() != NODE_EXECUTED + #] + genuine_cycles = filter(lambda t: t[1] or t[0].get_state() != NODE_EXECUTED, nclist) + if not genuine_cycles: + # All of the "cycles" found were single nodes in EXECUTED state, + # which is to say, they really weren't cycles. Just return. + return + + desc = 'Found dependency cycle(s):\n' + for node, cycle in nclist: + if cycle: + desc = desc + " " + string.join(map(str, cycle), " -> ") + "\n" + else: + desc = desc + \ + " Internal Error: no cycle found for node %s (%s) in state %s\n" % \ + (node, repr(node), StateString[node.get_state()]) + + raise SCons.Errors.UserError, desc + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/386asm.py b/engine/SCons/Tool/386asm.py new file mode 100644 index 0000000..ed7672b --- /dev/null +++ b/engine/SCons/Tool/386asm.py @@ -0,0 +1,61 @@ +"""SCons.Tool.386asm + +Tool specification for the 386ASM assembler for the Phar Lap ETS embedded +operating system. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/386asm.py 4577 2009/12/27 19:43:56 scons" + +from SCons.Tool.PharLapCommon import addPharLapPaths +import SCons.Util + +as_module = __import__('as', globals(), locals(), []) + +def generate(env): + """Add Builders and construction variables for ar to an Environment.""" + as_module.generate(env) + + env['AS'] = '386asm' + env['ASFLAGS'] = SCons.Util.CLVar('') + env['ASPPFLAGS'] = '$ASFLAGS' + env['ASCOM'] = '$AS $ASFLAGS $SOURCES -o $TARGET' + env['ASPPCOM'] = '$CC $ASPPFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS $SOURCES -o $TARGET' + + addPharLapPaths(env) + +def exists(env): + return env.Detect('386asm') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/BitKeeper.py b/engine/SCons/Tool/BitKeeper.py new file mode 100644 index 0000000..42cd096 --- /dev/null +++ b/engine/SCons/Tool/BitKeeper.py @@ -0,0 +1,65 @@ +"""SCons.Tool.BitKeeper.py + +Tool-specific initialization for the BitKeeper source code control +system. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/BitKeeper.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Action +import SCons.Builder +import SCons.Util + +def generate(env): + """Add a Builder factory function and construction variables for + BitKeeper to an Environment.""" + + def BitKeeperFactory(env=env): + """ """ + act = SCons.Action.Action("$BITKEEPERCOM", "$BITKEEPERCOMSTR") + return SCons.Builder.Builder(action = act, env = env) + + #setattr(env, 'BitKeeper', BitKeeperFactory) + env.BitKeeper = BitKeeperFactory + + env['BITKEEPER'] = 'bk' + env['BITKEEPERGET'] = '$BITKEEPER get' + env['BITKEEPERGETFLAGS'] = SCons.Util.CLVar('') + env['BITKEEPERCOM'] = '$BITKEEPERGET $BITKEEPERGETFLAGS $TARGET' + +def exists(env): + return env.Detect('bk') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/CVS.py b/engine/SCons/Tool/CVS.py new file mode 100644 index 0000000..412c3f1 --- /dev/null +++ b/engine/SCons/Tool/CVS.py @@ -0,0 +1,73 @@ +"""SCons.Tool.CVS.py + +Tool-specific initialization for CVS. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/CVS.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Action +import SCons.Builder +import SCons.Util + +def generate(env): + """Add a Builder factory function and construction variables for + CVS to an Environment.""" + + def CVSFactory(repos, module='', env=env): + """ """ + # fail if repos is not an absolute path name? + if module != '': + # Don't use os.path.join() because the name we fetch might + # be across a network and must use POSIX slashes as separators. + module = module + '/' + env['CVSCOM'] = '$CVS $CVSFLAGS co $CVSCOFLAGS -d ${TARGET.dir} $CVSMODULE${TARGET.posix}' + act = SCons.Action.Action('$CVSCOM', '$CVSCOMSTR') + return SCons.Builder.Builder(action = act, + env = env, + CVSREPOSITORY = repos, + CVSMODULE = module) + + #setattr(env, 'CVS', CVSFactory) + env.CVS = CVSFactory + + env['CVS'] = 'cvs' + env['CVSFLAGS'] = SCons.Util.CLVar('-d $CVSREPOSITORY') + env['CVSCOFLAGS'] = SCons.Util.CLVar('') + env['CVSCOM'] = '$CVS $CVSFLAGS co $CVSCOFLAGS ${TARGET.posix}' + +def exists(env): + return env.Detect('cvs') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/FortranCommon.py b/engine/SCons/Tool/FortranCommon.py new file mode 100644 index 0000000..ff01a07 --- /dev/null +++ b/engine/SCons/Tool/FortranCommon.py @@ -0,0 +1,247 @@ +"""SCons.Tool.FortranCommon + +Stuff for processing Fortran, common to all fortran dialects. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/FortranCommon.py 4577 2009/12/27 19:43:56 scons" + +import re +import string +import os.path + +import SCons.Action +import SCons.Defaults +import SCons.Scanner.Fortran +import SCons.Tool +import SCons.Util + +def isfortran(env, source): + """Return 1 if any of code in source has fortran files in it, 0 + otherwise.""" + try: + fsuffixes = env['FORTRANSUFFIXES'] + except KeyError: + # If no FORTRANSUFFIXES, no fortran tool, so there is no need to look + # for fortran sources. + return 0 + + if not source: + # Source might be None for unusual cases like SConf. + return 0 + for s in source: + if s.sources: + ext = os.path.splitext(str(s.sources[0]))[1] + if ext in fsuffixes: + return 1 + return 0 + +def _fortranEmitter(target, source, env): + node = source[0].rfile() + if not node.exists() and not node.is_derived(): + print "Could not locate " + str(node.name) + return ([], []) + mod_regex = """(?i)^\s*MODULE\s+(?!PROCEDURE)(\w+)""" + cre = re.compile(mod_regex,re.M) + # Retrieve all USE'd module names + modules = cre.findall(node.get_text_contents()) + # Remove unique items from the list + modules = SCons.Util.unique(modules) + # Convert module name to a .mod filename + suffix = env.subst('$FORTRANMODSUFFIX', target=target, source=source) + moddir = env.subst('$FORTRANMODDIR', target=target, source=source) + modules = map(lambda x, s=suffix: string.lower(x) + s, modules) + for m in modules: + target.append(env.fs.File(m, moddir)) + return (target, source) + +def FortranEmitter(target, source, env): + target, source = _fortranEmitter(target, source, env) + return SCons.Defaults.StaticObjectEmitter(target, source, env) + +def ShFortranEmitter(target, source, env): + target, source = _fortranEmitter(target, source, env) + return SCons.Defaults.SharedObjectEmitter(target, source, env) + +def ComputeFortranSuffixes(suffixes, ppsuffixes): + """suffixes are fortran source files, and ppsuffixes the ones to be + pre-processed. Both should be sequences, not strings.""" + assert len(suffixes) > 0 + s = suffixes[0] + sup = string.upper(s) + upper_suffixes = map(string.upper, suffixes) + if SCons.Util.case_sensitive_suffixes(s, sup): + ppsuffixes.extend(upper_suffixes) + else: + suffixes.extend(upper_suffixes) + +def CreateDialectActions(dialect): + """Create dialect specific actions.""" + CompAction = SCons.Action.Action('$%sCOM ' % dialect, '$%sCOMSTR' % dialect) + CompPPAction = SCons.Action.Action('$%sPPCOM ' % dialect, '$%sPPCOMSTR' % dialect) + ShCompAction = SCons.Action.Action('$SH%sCOM ' % dialect, '$SH%sCOMSTR' % dialect) + ShCompPPAction = SCons.Action.Action('$SH%sPPCOM ' % dialect, '$SH%sPPCOMSTR' % dialect) + + return CompAction, CompPPAction, ShCompAction, ShCompPPAction + +def DialectAddToEnv(env, dialect, suffixes, ppsuffixes, support_module = 0): + """Add dialect specific construction variables.""" + ComputeFortranSuffixes(suffixes, ppsuffixes) + + fscan = SCons.Scanner.Fortran.FortranScan("%sPATH" % dialect) + + for suffix in suffixes + ppsuffixes: + SCons.Tool.SourceFileScanner.add_scanner(suffix, fscan) + + env.AppendUnique(FORTRANSUFFIXES = suffixes + ppsuffixes) + + compaction, compppaction, shcompaction, shcompppaction = \ + CreateDialectActions(dialect) + + static_obj, shared_obj = SCons.Tool.createObjBuilders(env) + + for suffix in suffixes: + static_obj.add_action(suffix, compaction) + shared_obj.add_action(suffix, shcompaction) + static_obj.add_emitter(suffix, FortranEmitter) + shared_obj.add_emitter(suffix, ShFortranEmitter) + + for suffix in ppsuffixes: + static_obj.add_action(suffix, compppaction) + shared_obj.add_action(suffix, shcompppaction) + static_obj.add_emitter(suffix, FortranEmitter) + shared_obj.add_emitter(suffix, ShFortranEmitter) + + if not env.has_key('%sFLAGS' % dialect): + env['%sFLAGS' % dialect] = SCons.Util.CLVar('') + + if not env.has_key('SH%sFLAGS' % dialect): + env['SH%sFLAGS' % dialect] = SCons.Util.CLVar('$%sFLAGS' % dialect) + + # If a tool does not define fortran prefix/suffix for include path, use C ones + if not env.has_key('INC%sPREFIX' % dialect): + env['INC%sPREFIX' % dialect] = '$INCPREFIX' + + if not env.has_key('INC%sSUFFIX' % dialect): + env['INC%sSUFFIX' % dialect] = '$INCSUFFIX' + + env['_%sINCFLAGS' % dialect] = '$( ${_concat(INC%sPREFIX, %sPATH, INC%sSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)' % (dialect, dialect, dialect) + + if support_module == 1: + env['%sCOM' % dialect] = '$%s -o $TARGET -c $%sFLAGS $_%sINCFLAGS $_FORTRANMODFLAG $SOURCES' % (dialect, dialect, dialect) + env['%sPPCOM' % dialect] = '$%s -o $TARGET -c $%sFLAGS $CPPFLAGS $_CPPDEFFLAGS $_%sINCFLAGS $_FORTRANMODFLAG $SOURCES' % (dialect, dialect, dialect) + env['SH%sCOM' % dialect] = '$SH%s -o $TARGET -c $SH%sFLAGS $_%sINCFLAGS $_FORTRANMODFLAG $SOURCES' % (dialect, dialect, dialect) + env['SH%sPPCOM' % dialect] = '$SH%s -o $TARGET -c $SH%sFLAGS $CPPFLAGS $_CPPDEFFLAGS $_%sINCFLAGS $_FORTRANMODFLAG $SOURCES' % (dialect, dialect, dialect) + else: + env['%sCOM' % dialect] = '$%s -o $TARGET -c $%sFLAGS $_%sINCFLAGS $SOURCES' % (dialect, dialect, dialect) + env['%sPPCOM' % dialect] = '$%s -o $TARGET -c $%sFLAGS $CPPFLAGS $_CPPDEFFLAGS $_%sINCFLAGS $SOURCES' % (dialect, dialect, dialect) + env['SH%sCOM' % dialect] = '$SH%s -o $TARGET -c $SH%sFLAGS $_%sINCFLAGS $SOURCES' % (dialect, dialect, dialect) + env['SH%sPPCOM' % dialect] = '$SH%s -o $TARGET -c $SH%sFLAGS $CPPFLAGS $_CPPDEFFLAGS $_%sINCFLAGS $SOURCES' % (dialect, dialect, dialect) + +def add_fortran_to_env(env): + """Add Builders and construction variables for Fortran to an Environment.""" + try: + FortranSuffixes = env['FORTRANFILESUFFIXES'] + except KeyError: + FortranSuffixes = ['.f', '.for', '.ftn'] + + #print "Adding %s to fortran suffixes" % FortranSuffixes + try: + FortranPPSuffixes = env['FORTRANPPFILESUFFIXES'] + except KeyError: + FortranPPSuffixes = ['.fpp', '.FPP'] + + DialectAddToEnv(env, "FORTRAN", FortranSuffixes, + FortranPPSuffixes, support_module = 1) + + env['FORTRANMODPREFIX'] = '' # like $LIBPREFIX + env['FORTRANMODSUFFIX'] = '.mod' # like $LIBSUFFIX + + env['FORTRANMODDIR'] = '' # where the compiler should place .mod files + env['FORTRANMODDIRPREFIX'] = '' # some prefix to $FORTRANMODDIR - similar to $INCPREFIX + env['FORTRANMODDIRSUFFIX'] = '' # some suffix to $FORTRANMODDIR - similar to $INCSUFFIX + env['_FORTRANMODFLAG'] = '$( ${_concat(FORTRANMODDIRPREFIX, FORTRANMODDIR, FORTRANMODDIRSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)' + +def add_f77_to_env(env): + """Add Builders and construction variables for f77 to an Environment.""" + try: + F77Suffixes = env['F77FILESUFFIXES'] + except KeyError: + F77Suffixes = ['.f77'] + + #print "Adding %s to f77 suffixes" % F77Suffixes + try: + F77PPSuffixes = env['F77PPFILESUFFIXES'] + except KeyError: + F77PPSuffixes = [] + + DialectAddToEnv(env, "F77", F77Suffixes, F77PPSuffixes) + +def add_f90_to_env(env): + """Add Builders and construction variables for f90 to an Environment.""" + try: + F90Suffixes = env['F90FILESUFFIXES'] + except KeyError: + F90Suffixes = ['.f90'] + + #print "Adding %s to f90 suffixes" % F90Suffixes + try: + F90PPSuffixes = env['F90PPFILESUFFIXES'] + except KeyError: + F90PPSuffixes = [] + + DialectAddToEnv(env, "F90", F90Suffixes, F90PPSuffixes, + support_module = 1) + +def add_f95_to_env(env): + """Add Builders and construction variables for f95 to an Environment.""" + try: + F95Suffixes = env['F95FILESUFFIXES'] + except KeyError: + F95Suffixes = ['.f95'] + + #print "Adding %s to f95 suffixes" % F95Suffixes + try: + F95PPSuffixes = env['F95PPFILESUFFIXES'] + except KeyError: + F95PPSuffixes = [] + + DialectAddToEnv(env, "F95", F95Suffixes, F95PPSuffixes, + support_module = 1) + +def add_all_to_env(env): + """Add builders and construction variables for all supported fortran + dialects.""" + add_fortran_to_env(env) + add_f77_to_env(env) + add_f90_to_env(env) + add_f95_to_env(env) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/JavaCommon.py b/engine/SCons/Tool/JavaCommon.py new file mode 100644 index 0000000..76ae8c6 --- /dev/null +++ b/engine/SCons/Tool/JavaCommon.py @@ -0,0 +1,324 @@ +"""SCons.Tool.JavaCommon + +Stuff for processing Java. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/JavaCommon.py 4577 2009/12/27 19:43:56 scons" + +import os +import os.path +import re +import string + +java_parsing = 1 + +default_java_version = '1.4' + +if java_parsing: + # Parse Java files for class names. + # + # This is a really cool parser from Charles Crain + # that finds appropriate class names in Java source. + + # A regular expression that will find, in a java file: + # newlines; + # double-backslashes; + # a single-line comment "//"; + # single or double quotes preceeded by a backslash; + # single quotes, double quotes, open or close braces, semi-colons, + # periods, open or close parentheses; + # floating-point numbers; + # any alphanumeric token (keyword, class name, specifier); + # any alphanumeric token surrounded by angle brackets (generics); + # the multi-line comment begin and end tokens /* and */; + # array declarations "[]". + _reToken = re.compile(r'(\n|\\\\|//|\\[\'"]|[\'"\{\}\;\.\(\)]|' + + r'\d*\.\d*|[A-Za-z_][\w\$\.]*|<[A-Za-z_]\w+>|' + + r'/\*|\*/|\[\])') + + class OuterState: + """The initial state for parsing a Java file for classes, + interfaces, and anonymous inner classes.""" + def __init__(self, version=default_java_version): + + if not version in ('1.1', '1.2', '1.3','1.4', '1.5', '1.6', + '5', '6'): + msg = "Java version %s not supported" % version + raise NotImplementedError, msg + + self.version = version + self.listClasses = [] + self.listOutputs = [] + self.stackBrackets = [] + self.brackets = 0 + self.nextAnon = 1 + self.localClasses = [] + self.stackAnonClassBrackets = [] + self.anonStacksStack = [[0]] + self.package = None + + def trace(self): + pass + + def __getClassState(self): + try: + return self.classState + except AttributeError: + ret = ClassState(self) + self.classState = ret + return ret + + def __getPackageState(self): + try: + return self.packageState + except AttributeError: + ret = PackageState(self) + self.packageState = ret + return ret + + def __getAnonClassState(self): + try: + return self.anonState + except AttributeError: + self.outer_state = self + ret = SkipState(1, AnonClassState(self)) + self.anonState = ret + return ret + + def __getSkipState(self): + try: + return self.skipState + except AttributeError: + ret = SkipState(1, self) + self.skipState = ret + return ret + + def __getAnonStack(self): + return self.anonStacksStack[-1] + + def openBracket(self): + self.brackets = self.brackets + 1 + + def closeBracket(self): + self.brackets = self.brackets - 1 + if len(self.stackBrackets) and \ + self.brackets == self.stackBrackets[-1]: + self.listOutputs.append(string.join(self.listClasses, '$')) + self.localClasses.pop() + self.listClasses.pop() + self.anonStacksStack.pop() + self.stackBrackets.pop() + if len(self.stackAnonClassBrackets) and \ + self.brackets == self.stackAnonClassBrackets[-1]: + self.__getAnonStack().pop() + self.stackAnonClassBrackets.pop() + + def parseToken(self, token): + if token[:2] == '//': + return IgnoreState('\n', self) + elif token == '/*': + return IgnoreState('*/', self) + elif token == '{': + self.openBracket() + elif token == '}': + self.closeBracket() + elif token in [ '"', "'" ]: + return IgnoreState(token, self) + elif token == "new": + # anonymous inner class + if len(self.listClasses) > 0: + return self.__getAnonClassState() + return self.__getSkipState() # Skip the class name + elif token in ['class', 'interface', 'enum']: + if len(self.listClasses) == 0: + self.nextAnon = 1 + self.stackBrackets.append(self.brackets) + return self.__getClassState() + elif token == 'package': + return self.__getPackageState() + elif token == '.': + # Skip the attribute, it might be named "class", in which + # case we don't want to treat the following token as + # an inner class name... + return self.__getSkipState() + return self + + def addAnonClass(self): + """Add an anonymous inner class""" + if self.version in ('1.1', '1.2', '1.3', '1.4'): + clazz = self.listClasses[0] + self.listOutputs.append('%s$%d' % (clazz, self.nextAnon)) + elif self.version in ('1.5', '1.6', '5', '6'): + self.stackAnonClassBrackets.append(self.brackets) + className = [] + className.extend(self.listClasses) + self.__getAnonStack()[-1] = self.__getAnonStack()[-1] + 1 + for anon in self.__getAnonStack(): + className.append(str(anon)) + self.listOutputs.append(string.join(className, '$')) + + self.nextAnon = self.nextAnon + 1 + self.__getAnonStack().append(0) + + def setPackage(self, package): + self.package = package + + class AnonClassState: + """A state that looks for anonymous inner classes.""" + def __init__(self, old_state): + # outer_state is always an instance of OuterState + self.outer_state = old_state.outer_state + self.old_state = old_state + self.brace_level = 0 + def parseToken(self, token): + # This is an anonymous class if and only if the next + # non-whitespace token is a bracket. Everything between + # braces should be parsed as normal java code. + if token[:2] == '//': + return IgnoreState('\n', self) + elif token == '/*': + return IgnoreState('*/', self) + elif token == '\n': + return self + elif token[0] == '<' and token[-1] == '>': + return self + elif token == '(': + self.brace_level = self.brace_level + 1 + return self + if self.brace_level > 0: + if token == 'new': + # look further for anonymous inner class + return SkipState(1, AnonClassState(self)) + elif token in [ '"', "'" ]: + return IgnoreState(token, self) + elif token == ')': + self.brace_level = self.brace_level - 1 + return self + if token == '{': + self.outer_state.addAnonClass() + return self.old_state.parseToken(token) + + class SkipState: + """A state that will skip a specified number of tokens before + reverting to the previous state.""" + def __init__(self, tokens_to_skip, old_state): + self.tokens_to_skip = tokens_to_skip + self.old_state = old_state + def parseToken(self, token): + self.tokens_to_skip = self.tokens_to_skip - 1 + if self.tokens_to_skip < 1: + return self.old_state + return self + + class ClassState: + """A state we go into when we hit a class or interface keyword.""" + def __init__(self, outer_state): + # outer_state is always an instance of OuterState + self.outer_state = outer_state + def parseToken(self, token): + # the next non-whitespace token should be the name of the class + if token == '\n': + return self + # If that's an inner class which is declared in a method, it + # requires an index prepended to the class-name, e.g. + # 'Foo$1Inner' (Tigris Issue 2087) + if self.outer_state.localClasses and \ + self.outer_state.stackBrackets[-1] > \ + self.outer_state.stackBrackets[-2]+1: + locals = self.outer_state.localClasses[-1] + try: + idx = locals[token] + locals[token] = locals[token]+1 + except KeyError: + locals[token] = 1 + token = str(locals[token]) + token + self.outer_state.localClasses.append({}) + self.outer_state.listClasses.append(token) + self.outer_state.anonStacksStack.append([0]) + return self.outer_state + + class IgnoreState: + """A state that will ignore all tokens until it gets to a + specified token.""" + def __init__(self, ignore_until, old_state): + self.ignore_until = ignore_until + self.old_state = old_state + def parseToken(self, token): + if self.ignore_until == token: + return self.old_state + return self + + class PackageState: + """The state we enter when we encounter the package keyword. + We assume the next token will be the package name.""" + def __init__(self, outer_state): + # outer_state is always an instance of OuterState + self.outer_state = outer_state + def parseToken(self, token): + self.outer_state.setPackage(token) + return self.outer_state + + def parse_java_file(fn, version=default_java_version): + return parse_java(open(fn, 'r').read(), version) + + def parse_java(contents, version=default_java_version, trace=None): + """Parse a .java file and return a double of package directory, + plus a list of .class files that compiling that .java file will + produce""" + package = None + initial = OuterState(version) + currstate = initial + for token in _reToken.findall(contents): + # The regex produces a bunch of groups, but only one will + # have anything in it. + currstate = currstate.parseToken(token) + if trace: trace(token, currstate) + if initial.package: + package = string.replace(initial.package, '.', os.sep) + return (package, initial.listOutputs) + +else: + # Don't actually parse Java files for class names. + # + # We might make this a configurable option in the future if + # Java-file parsing takes too long (although it shouldn't relative + # to how long the Java compiler itself seems to take...). + + def parse_java_file(fn): + """ "Parse" a .java file. + + This actually just splits the file name, so the assumption here + is that the file name matches the public class name, and that + the path to the file is the same as the package name. + """ + return os.path.split(file) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/MSCommon/__init__.py b/engine/SCons/Tool/MSCommon/__init__.py new file mode 100644 index 0000000..4f56a4a --- /dev/null +++ b/engine/SCons/Tool/MSCommon/__init__.py @@ -0,0 +1,56 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/MSCommon/__init__.py 4577 2009/12/27 19:43:56 scons" + +__doc__ = """ +Common functions for Microsoft Visual Studio and Visual C/C++. +""" + +import copy +import os +import re +import subprocess + +import SCons.Errors +import SCons.Platform.win32 +import SCons.Util + +from SCons.Tool.MSCommon.sdk import mssdk_exists, \ + mssdk_setup_env + +from SCons.Tool.MSCommon.vc import msvc_exists, \ + msvc_setup_env, \ + msvc_setup_env_once + +from SCons.Tool.MSCommon.vs import get_default_version, \ + get_vs_by_version, \ + merge_default_version, \ + msvs_exists, \ + query_versions + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/MSCommon/arch.py b/engine/SCons/Tool/MSCommon/arch.py new file mode 100644 index 0000000..ce214c2 --- /dev/null +++ b/engine/SCons/Tool/MSCommon/arch.py @@ -0,0 +1,61 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/MSCommon/arch.py 4577 2009/12/27 19:43:56 scons" + +__doc__ = """Module to define supported Windows chip architectures. +""" + +import os + +class ArchDefinition: + """ + A class for defining architecture-specific settings and logic. + """ + def __init__(self, arch, synonyms=[]): + self.arch = arch + self.synonyms = synonyms + +SupportedArchitectureList = [ + ArchitectureDefinition( + 'x86', + ['i386', 'i486', 'i586', 'i686'], + ), + + ArchitectureDefinition( + 'x86_64', + ['AMD64', 'amd64', 'em64t', 'EM64T', 'x86_64'], + ), + + ArchitectureDefinition( + 'ia64', + ['IA64'], + ), +] + +SupportedArchitectureMap = {} +for a in SupportedArchitectureList: + SupportedArchitectureMap[a.arch] = a + for s in a.synonyms: + SupportedArchitectureMap[s] = a + diff --git a/engine/SCons/Tool/MSCommon/common.py b/engine/SCons/Tool/MSCommon/common.py new file mode 100644 index 0000000..219e680 --- /dev/null +++ b/engine/SCons/Tool/MSCommon/common.py @@ -0,0 +1,195 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/MSCommon/common.py 4577 2009/12/27 19:43:56 scons" + +__doc__ = """ +Common helper functions for working with the Microsoft tool chain. +""" + +import copy +import os +import subprocess +import re + +import SCons.Util + + +logfile = os.environ.get('SCONS_MSCOMMON_DEBUG') +if logfile == '-': + def debug(x): + print x +elif logfile: + try: + import logging + except ImportError: + debug = lambda x: open(logfile, 'a').write(x + '\n') + else: + logging.basicConfig(filename=logfile, level=logging.DEBUG) + debug = logging.debug +else: + debug = lambda x: None + + +_is_win64 = None + +def is_win64(): + """Return true if running on windows 64 bits. + + Works whether python itself runs in 64 bits or 32 bits.""" + # Unfortunately, python does not provide a useful way to determine + # if the underlying Windows OS is 32-bit or 64-bit. Worse, whether + # the Python itself is 32-bit or 64-bit affects what it returns, + # so nothing in sys.* or os.* help. So we go to the registry to + # look directly for a clue from Windows, caching the result to + # avoid repeated registry calls. + global _is_win64 + if _is_win64 is None: + _is_win64 = has_reg(r"Software\Wow6432Node") + return _is_win64 + + +def read_reg(value): + return SCons.Util.RegGetValue(SCons.Util.HKEY_LOCAL_MACHINE, value)[0] + +def has_reg(value): + """Return True if the given key exists in HKEY_LOCAL_MACHINE, False + otherwise.""" + try: + SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, value) + ret = True + except WindowsError: + ret = False + return ret + +# Functions for fetching environment variable settings from batch files. + +def normalize_env(env, keys): + """Given a dictionary representing a shell environment, add the variables + from os.environ needed for the processing of .bat files; the keys are + controlled by the keys argument. + + It also makes sure the environment values are correctly encoded. + + Note: the environment is copied""" + normenv = {} + if env: + for k in env.keys(): + normenv[k] = copy.deepcopy(env[k]).encode('mbcs') + + for k in keys: + if os.environ.has_key(k): + normenv[k] = os.environ[k].encode('mbcs') + + return normenv + +def get_output(vcbat, args = None, env = None): + """Parse the output of given bat file, with given args.""" + if args: + debug("Calling '%s %s'" % (vcbat, args)) + popen = subprocess.Popen('"%s" %s & set' % (vcbat, args), + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + env=env) + else: + debug("Calling '%s'" % vcbat) + popen = subprocess.Popen('"%s" & set' % vcbat, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + env=env) + + # Use the .stdout and .stderr attributes directly because the + # .communicate() method uses the threading module on Windows + # and won't work under Pythons not built with threading. + stdout = popen.stdout.read() + if popen.wait() != 0: + raise IOError(popen.stderr.read().decode("mbcs")) + + output = stdout.decode("mbcs") + return output + +def parse_output(output, keep = ("INCLUDE", "LIB", "LIBPATH", "PATH")): + # dkeep is a dict associating key: path_list, where key is one item from + # keep, and pat_list the associated list of paths + + # TODO(1.5): replace with the following list comprehension: + #dkeep = dict([(i, []) for i in keep]) + dkeep = dict(map(lambda i: (i, []), keep)) + + # rdk will keep the regex to match the .bat file output line starts + rdk = {} + for i in keep: + rdk[i] = re.compile('%s=(.*)' % i, re.I) + + def add_env(rmatch, key, dkeep=dkeep): + plist = rmatch.group(1).split(os.pathsep) + for p in plist: + # Do not add empty paths (when a var ends with ;) + if p: + p = p.encode('mbcs') + # XXX: For some reason, VC98 .bat file adds "" around the PATH + # values, and it screws up the environment later, so we strip + # it. + p = p.strip('"') + dkeep[key].append(p) + + for line in output.splitlines(): + for k,v in rdk.items(): + m = v.match(line) + if m: + add_env(m, k) + + return dkeep + +# TODO(sgk): unused +def output_to_dict(output): + """Given an output string, parse it to find env variables. + + Return a dict where keys are variables names, and values their content""" + envlinem = re.compile(r'^([a-zA-z0-9]+)=([\S\s]*)$') + parsedenv = {} + for line in output.splitlines(): + m = envlinem.match(line) + if m: + parsedenv[m.group(1)] = m.group(2) + return parsedenv + +# TODO(sgk): unused +def get_new(l1, l2): + """Given two list l1 and l2, return the items in l2 which are not in l1. + Order is maintained.""" + + # We don't try to be smart: lists are small, and this is not the bottleneck + # is any case + new = [] + for i in l2: + if i not in l1: + new.append(i) + + return new + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/MSCommon/netframework.py b/engine/SCons/Tool/MSCommon/netframework.py new file mode 100644 index 0000000..300f662 --- /dev/null +++ b/engine/SCons/Tool/MSCommon/netframework.py @@ -0,0 +1,84 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/MSCommon/netframework.py 4577 2009/12/27 19:43:56 scons" + +__doc__ = """ +""" + +import os +import re +import string + +from common import read_reg, debug + +# Original value recorded by dcournapeau +_FRAMEWORKDIR_HKEY_ROOT = r'Software\Microsoft\.NETFramework\InstallRoot' +# On SGK's system +_FRAMEWORKDIR_HKEY_ROOT = r'Software\Microsoft\Microsoft SDKs\.NETFramework\v2.0\InstallationFolder' + +def find_framework_root(): + # XXX: find it from environment (FrameworkDir) + try: + froot = read_reg(_FRAMEWORKDIR_HKEY_ROOT) + debug("Found framework install root in registry: %s" % froot) + except WindowsError, e: + debug("Could not read reg key %s" % _FRAMEWORKDIR_HKEY_ROOT) + return None + + if not os.path.exists(froot): + debug("%s not found on fs" % froot) + return None + + return froot + +def query_versions(): + froot = find_framework_root() + if froot: + contents = os.listdir(froot) + + l = re.compile('v[0-9]+.*') + versions = filter(lambda e, l=l: l.match(e), contents) + + def versrt(a,b): + # since version numbers aren't really floats... + aa = a[1:] + bb = b[1:] + aal = string.split(aa, '.') + bbl = string.split(bb, '.') + # sequence comparison in python is lexicographical + # which is exactly what we want. + # Note we sort backwards so the highest version is first. + return cmp(bbl,aal) + + versions.sort(versrt) + else: + versions = [] + + return versions + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/MSCommon/sdk.py b/engine/SCons/Tool/MSCommon/sdk.py new file mode 100644 index 0000000..f1345e4 --- /dev/null +++ b/engine/SCons/Tool/MSCommon/sdk.py @@ -0,0 +1,321 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/MSCommon/sdk.py 4577 2009/12/27 19:43:56 scons" + +__doc__ = """Module to detect the Platform/Windows SDK + +PSDK 2003 R1 is the earliest version detected. +""" + +import os + +import SCons.Errors +import SCons.Util + +import common + +debug = common.debug + +# SDK Checks. This is of course a mess as everything else on MS platforms. Here +# is what we do to detect the SDK: +# +# For Windows SDK >= 6.0: just look into the registry entries: +# HKLM\Software\Microsoft\Microsoft SDKs\Windows +# All the keys in there are the available versions. +# +# For Platform SDK before 6.0 (2003 server R1 and R2, etc...), there does not +# seem to be any sane registry key, so the precise location is hardcoded. +# +# For versions below 2003R1, it seems the PSDK is included with Visual Studio? +# +# Also, per the following: +# http://benjamin.smedbergs.us/blog/tag/atl/ +# VC++ Professional comes with the SDK, VC++ Express does not. + +# Location of the SDK (checked for 6.1 only) +_CURINSTALLED_SDK_HKEY_ROOT = \ + r"Software\Microsoft\Microsoft SDKs\Windows\CurrentInstallFolder" + + +class SDKDefinition: + """ + An abstract base class for trying to find installed SDK directories. + """ + def __init__(self, version, **kw): + self.version = version + self.__dict__.update(kw) + + def find_sdk_dir(self): + """Try to find the MS SDK from the registry. + + Return None if failed or the directory does not exist. + """ + if not SCons.Util.can_read_reg: + debug('find_sdk_dir(): can not read registry') + return None + + hkey = self.HKEY_FMT % self.hkey_data + + try: + sdk_dir = common.read_reg(hkey) + except WindowsError, e: + debug('find_sdk_dir(): no SDK registry key %s' % repr(hkey)) + return None + + if not os.path.exists(sdk_dir): + debug('find_sdk_dir(): %s not on file system' % sdk_dir) + return None + + ftc = os.path.join(sdk_dir, self.sanity_check_file) + if not os.path.exists(ftc): + debug("find_sdk_dir(): sanity check %s not found" % ftc) + return None + + return sdk_dir + + def get_sdk_dir(self): + """Return the MSSSDK given the version string.""" + try: + return self._sdk_dir + except AttributeError: + sdk_dir = self.find_sdk_dir() + self._sdk_dir = sdk_dir + return sdk_dir + +class WindowsSDK(SDKDefinition): + """ + A subclass for trying to find installed Windows SDK directories. + """ + HKEY_FMT = r'Software\Microsoft\Microsoft SDKs\Windows\v%s\InstallationFolder' + def __init__(self, *args, **kw): + apply(SDKDefinition.__init__, (self,)+args, kw) + self.hkey_data = self.version + +class PlatformSDK(SDKDefinition): + """ + A subclass for trying to find installed Platform SDK directories. + """ + HKEY_FMT = r'Software\Microsoft\MicrosoftSDK\InstalledSDKS\%s\Install Dir' + def __init__(self, *args, **kw): + apply(SDKDefinition.__init__, (self,)+args, kw) + self.hkey_data = self.uuid + +# The list of support SDKs which we know how to detect. +# +# The first SDK found in the list is the one used by default if there +# are multiple SDKs installed. Barring good reasons to the contrary, +# this means we should list SDKs with from most recent to oldest. +# +# If you update this list, update the documentation in Tool/mssdk.xml. +SupportedSDKList = [ + WindowsSDK('6.1', + sanity_check_file=r'bin\SetEnv.Cmd', + include_subdir='include', + lib_subdir={ + 'x86' : ['lib'], + 'x86_64' : [r'lib\x64'], + 'ia64' : [r'lib\ia64'], + }, + ), + + WindowsSDK('6.0A', + sanity_check_file=r'include\windows.h', + include_subdir='include', + lib_subdir={ + 'x86' : ['lib'], + 'x86_64' : [r'lib\x64'], + 'ia64' : [r'lib\ia64'], + }, + ), + + WindowsSDK('6.0', + sanity_check_file=r'bin\gacutil.exe', + include_subdir='include', + lib_subdir='lib', + ), + + PlatformSDK('2003R2', + sanity_check_file=r'SetEnv.Cmd', + uuid="D2FF9F89-8AA2-4373-8A31-C838BF4DBBE1" + ), + + PlatformSDK('2003R1', + sanity_check_file=r'SetEnv.Cmd', + uuid="8F9E5EF3-A9A5-491B-A889-C58EFFECE8B3", + ), +] + +SupportedSDKMap = {} +for sdk in SupportedSDKList: + SupportedSDKMap[sdk.version] = sdk + + +# Finding installed SDKs isn't cheap, because it goes not only to the +# registry but also to the disk to sanity-check that there is, in fact, +# an SDK installed there and that the registry entry isn't just stale. +# Find this information once, when requested, and cache it. + +InstalledSDKList = None +InstalledSDKMap = None + +def get_installed_sdks(): + global InstalledSDKList + global InstalledSDKMap + if InstalledSDKList is None: + InstalledSDKList = [] + InstalledSDKMap = {} + for sdk in SupportedSDKList: + debug('trying to find SDK %s' % sdk.version) + if sdk.get_sdk_dir(): + debug('found SDK %s' % sdk.version) + InstalledSDKList.append(sdk) + InstalledSDKMap[sdk.version] = sdk + return InstalledSDKList + + +# We may be asked to update multiple construction environments with +# SDK information. When doing this, we check on-disk for whether +# the SDK has 'mfc' and 'atl' subdirectories. Since going to disk +# is expensive, cache results by directory. + +SDKEnvironmentUpdates = {} + +def set_sdk_by_directory(env, sdk_dir): + global SDKEnvironmentUpdates + try: + env_tuple_list = SDKEnvironmentUpdates[sdk_dir] + except KeyError: + env_tuple_list = [] + SDKEnvironmentUpdates[sdk_dir] = env_tuple_list + + include_path = os.path.join(sdk_dir, 'include') + mfc_path = os.path.join(include_path, 'mfc') + atl_path = os.path.join(include_path, 'atl') + + if os.path.exists(mfc_path): + env_tuple_list.append(('INCLUDE', mfc_path)) + if os.path.exists(atl_path): + env_tuple_list.append(('INCLUDE', atl_path)) + env_tuple_list.append(('INCLUDE', include_path)) + + env_tuple_list.append(('LIB', os.path.join(sdk_dir, 'lib'))) + env_tuple_list.append(('LIBPATH', os.path.join(sdk_dir, 'lib'))) + env_tuple_list.append(('PATH', os.path.join(sdk_dir, 'bin'))) + + for variable, directory in env_tuple_list: + env.PrependENVPath(variable, directory) + + +# TODO(sgk): currently unused; remove? +def get_cur_sdk_dir_from_reg(): + """Try to find the platform sdk directory from the registry. + + Return None if failed or the directory does not exist""" + if not SCons.Util.can_read_reg: + debug('SCons cannot read registry') + return None + + try: + val = common.read_reg(_CURINSTALLED_SDK_HKEY_ROOT) + debug("Found current sdk dir in registry: %s" % val) + except WindowsError, e: + debug("Did not find current sdk in registry") + return None + + if not os.path.exists(val): + debug("Current sdk dir %s not on fs" % val) + return None + + return val + +def get_sdk_by_version(mssdk): + if not SupportedSDKMap.has_key(mssdk): + msg = "SDK version %s is not supported" % repr(mssdk) + raise SCons.Errors.UserError, msg + get_installed_sdks() + return InstalledSDKMap.get(mssdk) + +def get_default_sdk(): + """Set up the default Platform/Windows SDK.""" + get_installed_sdks() + if not InstalledSDKList: + return None + return InstalledSDKList[0] + +def mssdk_setup_env(env): + debug('msvs_setup_env()') + if env.has_key('MSSDK_DIR'): + sdk_dir = env['MSSDK_DIR'] + if sdk_dir is None: + return + sdk_dir = env.subst(sdk_dir) + elif env.has_key('MSSDK_VERSION'): + sdk_version = env['MSSDK_VERSION'] + if sdk_version is None: + msg = "SDK version %s is not installed" % repr(mssdk) + raise SCons.Errors.UserError, msg + sdk_version = env.subst(sdk_version) + mssdk = get_sdk_by_version(sdk_version) + sdk_dir = mssdk.get_sdk_dir() + elif env.has_key('MSVS_VERSION'): + msvs_version = env['MSVS_VERSION'] + debug('Getting MSVS_VERSION from env:%s'%msvs_version) + if msvs_version is None: + return + msvs_version = env.subst(msvs_version) + import vs + msvs = vs.get_vs_by_version(msvs_version) + debug('msvs is :%s'%msvs) + if not msvs: + return + sdk_version = msvs.sdk_version + if not sdk_version: + return + mssdk = get_sdk_by_version(sdk_version) + if not mssdk: + mssdk = get_default_sdk() + if not mssdk: + return + sdk_dir = mssdk.get_sdk_dir() + else: + mssdk = get_default_sdk() + if not mssdk: + return + sdk_dir = mssdk.get_sdk_dir() + + set_sdk_by_directory(env, sdk_dir) + + #print "No MSVS_VERSION: this is likely to be a bug" + +def mssdk_exists(version=None): + sdks = get_installed_sdks() + if version is None: + return len(sdks) > 0 + return sdks.has_key(version) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/MSCommon/vc.py b/engine/SCons/Tool/MSCommon/vc.py new file mode 100644 index 0000000..c02f15c --- /dev/null +++ b/engine/SCons/Tool/MSCommon/vc.py @@ -0,0 +1,367 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +# TODO: +# * supported arch for versions: for old versions of batch file without +# argument, giving bogus argument cannot be detected, so we have to hardcode +# this here +# * print warning when msvc version specified but not found +# * find out why warning do not print +# * test on 64 bits XP + VS 2005 (and VS 6 if possible) +# * SDK +# * Assembly +__revision__ = "src/engine/SCons/Tool/MSCommon/vc.py 4577 2009/12/27 19:43:56 scons" + +__doc__ = """Module for Visual C/C++ detection and configuration. +""" +import SCons.compat + +import os +import platform + +import SCons.Warnings + +import common + +debug = common.debug + +class VisualCException(Exception): + pass + +class UnsupportedVersion(VisualCException): + pass + +class UnsupportedArch(VisualCException): + pass + +class MissingConfiguration(VisualCException): + pass + +class NoVersionFound(VisualCException): + pass + +class BatchFileExecutionError(VisualCException): + pass + +# Dict to 'canonalize' the arch +_ARCH_TO_CANONICAL = { + "x86": "x86", + "amd64": "amd64", + "i386": "x86", + "emt64": "amd64", + "x86_64": "amd64", + "itanium": "ia64", + "ia64": "ia64", +} + +# Given a (host, target) tuple, return the argument for the bat file. Both host +# and targets should be canonalized. +_HOST_TARGET_ARCH_TO_BAT_ARCH = { + ("x86", "x86"): "x86", + ("x86", "amd64"): "x86_amd64", + ("amd64", "amd64"): "amd64", + ("amd64", "x86"): "x86", + ("x86", "ia64"): "x86_ia64" +} + +def get_host_target(env): + host_platform = env.get('HOST_ARCH') + if not host_platform: + host_platform = platform.machine() + # TODO(2.5): the native Python platform.machine() function returns + # '' on all Python versions before 2.6, after which it also uses + # PROCESSOR_ARCHITECTURE. + if not host_platform: + host_platform = os.environ.get('PROCESSOR_ARCHITECTURE', '') + target_platform = env.get('TARGET_ARCH') + if not target_platform: + target_platform = host_platform + + try: + host = _ARCH_TO_CANONICAL[host_platform] + except KeyError, e: + msg = "Unrecognized host architecture %s" + raise ValueError(msg % repr(host_platform)) + + try: + target = _ARCH_TO_CANONICAL[target_platform] + except KeyError, e: + raise ValueError("Unrecognized target architecture %s" % target_platform) + + return (host, target) + +_VCVER = ["10.0", "9.0", "8.0", "7.1", "7.0", "6.0"] + +_VCVER_TO_PRODUCT_DIR = { + '10.0': [ + r'Microsoft\VisualStudio\10.0\Setup\VC\ProductDir'], + '9.0': [ + r'Microsoft\VisualStudio\9.0\Setup\VC\ProductDir', + r'Microsoft\VCExpress\9.0\Setup\VC\ProductDir'], + '8.0': [ + r'Microsoft\VisualStudio\8.0\Setup\VC\ProductDir', + r'Microsoft\VCExpress\8.0\Setup\VC\ProductDir'], + '7.1': [ + r'Microsoft\VisualStudio\7.1\Setup\VC\ProductDir'], + '7.0': [ + r'Microsoft\VisualStudio\7.0\Setup\VC\ProductDir'], + '6.0': [ + r'Microsoft\VisualStudio\6.0\Setup\Microsoft Visual C++\ProductDir'] +} + +def msvc_version_to_maj_min(msvc_version): + t = msvc_version.split(".") + if not len(t) == 2: + raise ValueError("Unrecognized version %s" % msvc_version) + try: + maj = int(t[0]) + min = int(t[1]) + return maj, min + except ValueError, e: + raise ValueError("Unrecognized version %s" % msvc_version) + +def is_host_target_supported(host_target, msvc_version): + """Return True if the given (host, target) tuple is supported given the + msvc version. + + Parameters + ---------- + host_target: tuple + tuple of (canonalized) host-target, e.g. ("x86", "amd64") for cross + compilation from 32 bits windows to 64 bits. + msvc_version: str + msvc version (major.minor, e.g. 10.0) + + Note + ---- + This only check whether a given version *may* support the given (host, + target), not that the toolchain is actually present on the machine. + """ + # We assume that any Visual Studio version supports x86 as a target + if host_target[1] != "x86": + maj, min = msvc_version_to_maj_min(msvc_version) + if maj < 8: + return False + + return True + +def find_vc_pdir(msvc_version): + """Try to find the product directory for the given + version. + + Note + ---- + If for some reason the requested version could not be found, an + exception which inherits from VisualCException will be raised.""" + root = 'Software\\' + if common.is_win64(): + root = root + 'Wow6432Node\\' + try: + hkeys = _VCVER_TO_PRODUCT_DIR[msvc_version] + except KeyError: + debug("Unknown version of MSVC: %s" % msvc_version) + raise UnsupportedVersion("Unknown version %s" % msvc_version) + + for key in hkeys: + key = root + key + try: + comps = common.read_reg(key) + except WindowsError, e: + debug('find_vc_dir(): no VC registry key %s' % repr(key)) + else: + debug('find_vc_dir(): found VC in registry: %s' % comps) + if os.path.exists(comps): + return comps + else: + debug('find_vc_dir(): reg says dir is %s, but it does not exist. (ignoring)'\ + % comps) + raise MissingConfiguration("registry dir %s not found on the filesystem" % comps) + return None + +def find_batch_file(msvc_version): + pdir = find_vc_pdir(msvc_version) + if pdir is None: + raise NoVersionFound("No version of Visual Studio found") + + vernum = float(msvc_version) + if 7 <= vernum < 8: + pdir = os.path.join(pdir, os.pardir, "Common7", "Tools") + batfilename = os.path.join(pdir, "vsvars32.bat") + elif vernum < 7: + pdir = os.path.join(pdir, "Bin") + batfilename = os.path.join(pdir, "vcvars32.bat") + else: # >= 8 + batfilename = os.path.join(pdir, "vcvarsall.bat") + + if os.path.exists(batfilename): + return batfilename + else: + debug("Not found: %s" % batfilename) + return None + +__INSTALLED_VCS_RUN = None + +def cached_get_installed_vcs(): + global __INSTALLED_VCS_RUN + + if __INSTALLED_VCS_RUN is None: + ret = get_installed_vcs() + __INSTALLED_VCS_RUN = ret + + return __INSTALLED_VCS_RUN + +def get_installed_vcs(): + installed_versions = [] + for ver in _VCVER: + debug('trying to find VC %s' % ver) + try: + if find_vc_pdir(ver): + debug('found VC %s' % ver) + installed_versions.append(ver) + else: + debug('find_vc_pdir return None for ver %s' % ver) + except VisualCException, e: + debug('did not find VC %s: caught exception %s' % (ver, str(e))) + return installed_versions + +def reset_installed_vcs(): + """Make it try again to find VC. This is just for the tests.""" + __INSTALLED_VCS_RUN = None + +def script_env(script, args=None): + stdout = common.get_output(script, args) + # Stupid batch files do not set return code: we take a look at the + # beginning of the output for an error message instead + olines = stdout.splitlines() + if olines[0].startswith("The specified configuration type is missing"): + raise BatchFileExecutionError("\n".join(olines[:2])) + + return common.parse_output(stdout) + +def get_default_version(env): + debug('get_default_version()') + + msvc_version = env.get('MSVC_VERSION') + msvs_version = env.get('MSVS_VERSION') + + if msvs_version and not msvc_version: + SCons.Warnings.warn( + SCons.Warnings.DeprecatedWarning, + "MSVS_VERSION is deprecated: please use MSVC_VERSION instead ") + return msvs_version + elif msvc_version and msvs_version: + if not msvc_version == msvs_version: + SCons.Warnings.warn( + SCons.Warnings.VisualVersionMismatch, + "Requested msvc version (%s) and msvs version (%s) do " \ + "not match: please use MSVC_VERSION only to request a " \ + "visual studio version, MSVS_VERSION is deprecated" \ + % (msvc_version, msvs_version)) + return msvs_version + if not msvc_version: + installed_vcs = cached_get_installed_vcs() + debug('installed_vcs:%s' % installed_vcs) + if not installed_vcs: + msg = 'No installed VCs' + debug('msv %s\n' % repr(msg)) + SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, msg) + return None + msvc_version = installed_vcs[0] + debug('msvc_setup_env: using default installed MSVC version %s\n' % repr(msvc_version)) + + return msvc_version + +def msvc_setup_env_once(env): + try: + has_run = env["MSVC_SETUP_RUN"] + except KeyError: + has_run = False + + if not has_run: + msvc_setup_env(env) + env["MSVC_SETUP_RUN"] = True + +def msvc_setup_env(env): + debug('msvc_setup_env()') + + version = get_default_version(env) + if version is None: + warn_msg = "No version of Visual Studio compiler found - C/C++ " \ + "compilers most likely not set correctly" + SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, warn_msg) + return None + debug('msvc_setup_env: using specified MSVC version %s\n' % repr(version)) + + # XXX: we set-up both MSVS version for backward + # compatibility with the msvs tool + env['MSVC_VERSION'] = version + env['MSVS_VERSION'] = version + env['MSVS'] = {} + + try: + script = find_batch_file(version) + except VisualCException, e: + msg = str(e) + debug('Caught exception while looking for batch file (%s)' % msg) + warn_msg = "VC version %s not installed. " + \ + "C/C++ compilers are most likely not set correctly.\n" + \ + " Installed versions are: %s" + warn_msg = warn_msg % (version, cached_get_installed_vcs()) + SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, warn_msg) + return None + + use_script = env.get('MSVC_USE_SCRIPT', True) + if SCons.Util.is_String(use_script): + debug('use_script 1 %s\n' % repr(use_script)) + d = script_env(use_script) + elif use_script: + host_platform, target_platform = get_host_target(env) + host_target = (host_platform, target_platform) + if not is_host_target_supported(host_target, version): + warn_msg = "host, target = %s not supported for MSVC version %s" % \ + (host_target, version) + SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, warn_msg) + arg = _HOST_TARGET_ARCH_TO_BAT_ARCH[host_target] + debug('use_script 2 %s, args:%s\n' % (repr(script), arg)) + try: + d = script_env(script, args=arg) + except BatchFileExecutionError, e: + msg = "MSVC error while executing %s with args %s (error was %s)" % \ + (script, arg, str(e)) + raise SCons.Errors.UserError(msg) + else: + debug('MSVC_USE_SCRIPT set to False') + warn_msg = "MSVC_USE_SCRIPT set to False, assuming environment " \ + "set correctly." + SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, warn_msg) + return None + + for k, v in d.items(): + env.PrependENVPath(k, v, delete_existing=True) + +def msvc_exists(version=None): + vcs = cached_get_installed_vcs() + if version is None: + return len(vcs) > 0 + return version in vcs + diff --git a/engine/SCons/Tool/MSCommon/vs.py b/engine/SCons/Tool/MSCommon/vs.py new file mode 100644 index 0000000..f9531c0 --- /dev/null +++ b/engine/SCons/Tool/MSCommon/vs.py @@ -0,0 +1,497 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/MSCommon/vs.py 4577 2009/12/27 19:43:56 scons" + +__doc__ = """Module to detect Visual Studio and/or Visual C/C++ +""" + +import os + +import SCons.Errors +import SCons.Util + +from common import debug, \ + get_output, \ + is_win64, \ + normalize_env, \ + parse_output, \ + read_reg + +import SCons.Tool.MSCommon.vc + +class VisualStudio: + """ + An abstract base class for trying to find installed versions of + Visual Studio. + """ + def __init__(self, version, **kw): + self.version = version + kw['vc_version'] = kw.get('vc_version', version) + kw['sdk_version'] = kw.get('sdk_version', version) + self.__dict__.update(kw) + self._cache = {} + + # + + def find_batch_file(self): + vs_dir = self.get_vs_dir() + if not vs_dir: + debug('find_executable(): no vs_dir') + return None + batch_file = os.path.join(vs_dir, self.batch_file_path) + batch_file = os.path.normpath(batch_file) + if not os.path.isfile(batch_file): + debug('find_batch_file(): %s not on file system' % batch_file) + return None + return batch_file + + def find_vs_dir_by_vc(self): + SCons.Tool.MSCommon.vc.get_installed_vcs() + dir = SCons.Tool.MSCommon.vc.find_vc_pdir(self.vc_version) + if not dir: + debug('find_vs_dir(): no installed VC %s' % self.vc_version) + return None + return dir + + def find_vs_dir_by_reg(self): + root = 'Software\\' + + if is_win64(): + root = root + 'Wow6432Node\\' + for key in self.hkeys: + if key=='use_dir': + return self.find_vs_dir_by_vc() + key = root + key + try: + comps = read_reg(key) + except WindowsError, e: + debug('find_vs_dir_by_reg(): no VS registry key %s' % repr(key)) + else: + debug('find_vs_dir_by_reg(): found VS in registry: %s' % comps) + return comps + return None + + def find_vs_dir(self): + """ Can use registry or location of VC to find vs dir + First try to find by registry, and if that fails find via VC dir + """ + + + if True: + vs_dir=self.find_vs_dir_by_reg() + return vs_dir + else: + return self.find_vs_dir_by_vc() + + def find_executable(self): + vs_dir = self.get_vs_dir() + if not vs_dir: + debug('find_executable(): no vs_dir (%s)'%vs_dir) + return None + executable = os.path.join(vs_dir, self.executable_path) + executable = os.path.normpath(executable) + if not os.path.isfile(executable): + debug('find_executable(): %s not on file system' % executable) + return None + return executable + + # + + def get_batch_file(self): + try: + return self._cache['batch_file'] + except KeyError: + batch_file = self.find_batch_file() + self._cache['batch_file'] = batch_file + return batch_file + + def get_executable(self): + try: + debug('get_executable using cache:%s'%self._cache['executable']) + return self._cache['executable'] + except KeyError: + executable = self.find_executable() + self._cache['executable'] = executable + debug('get_executable not in cache:%s'%executable) + return executable + + def get_vs_dir(self): + try: + return self._cache['vs_dir'] + except KeyError: + vs_dir = self.find_vs_dir() + self._cache['vs_dir'] = vs_dir + return vs_dir + + def get_supported_arch(self): + try: + return self._cache['supported_arch'] + except KeyError: + # RDEVE: for the time being use hardcoded lists + # supported_arch = self.find_supported_arch() + self._cache['supported_arch'] = self.supported_arch + return self.supported_arch + + def reset(self): + self._cache = {} + +# The list of supported Visual Studio versions we know how to detect. +# +# How to look for .bat file ? +# - VS 2008 Express (x86): +# * from registry key productdir, gives the full path to vsvarsall.bat. In +# HKEY_LOCAL_MACHINE): +# Software\Microsoft\VCEpress\9.0\Setup\VC\productdir +# * from environmnent variable VS90COMNTOOLS: the path is then ..\..\VC +# relatively to the path given by the variable. +# +# - VS 2008 Express (WoW6432: 32 bits on windows x64): +# Software\Wow6432Node\Microsoft\VCEpress\9.0\Setup\VC\productdir +# +# - VS 2005 Express (x86): +# * from registry key productdir, gives the full path to vsvarsall.bat. In +# HKEY_LOCAL_MACHINE): +# Software\Microsoft\VCEpress\8.0\Setup\VC\productdir +# * from environmnent variable VS80COMNTOOLS: the path is then ..\..\VC +# relatively to the path given by the variable. +# +# - VS 2005 Express (WoW6432: 32 bits on windows x64): does not seem to have a +# productdir ? +# +# - VS 2003 .Net (pro edition ? x86): +# * from registry key productdir. The path is then ..\Common7\Tools\ +# relatively to the key. The key is in HKEY_LOCAL_MACHINE): +# Software\Microsoft\VisualStudio\7.1\Setup\VC\productdir +# * from environmnent variable VS71COMNTOOLS: the path is the full path to +# vsvars32.bat +# +# - VS 98 (VS 6): +# * from registry key productdir. The path is then Bin +# relatively to the key. The key is in HKEY_LOCAL_MACHINE): +# Software\Microsoft\VisualStudio\6.0\Setup\VC98\productdir +# +# The first version found in the list is the one used by default if +# there are multiple versions installed. Barring good reasons to +# the contrary, this means we should list versions from most recent +# to oldest. Pro versions get listed before Express versions on the +# assumption that, by default, you'd rather use the version you paid +# good money for in preference to whatever Microsoft makes available +# for free. +# +# If you update this list, update the documentation in Tool/msvs.xml. + +SupportedVSList = [ + # Visual Studio 2010 + # TODO: find the settings, perhaps from someone with a CTP copy? + #VisualStudio('TBD', + # hkey_root=r'TBD', + # common_tools_var='TBD', + # executable_path=r'TBD', + # default_dirname='TBD', + #), + + # Visual Studio 2008 + # The batch file we look for is in the VC directory, + # so the devenv.com executable is up in ..\..\Common7\IDE. + VisualStudio('9.0', + sdk_version='6.1', + hkeys=[r'Microsoft\VisualStudio\9.0\Setup\VS\ProductDir'], + common_tools_var='VS90COMNTOOLS', + executable_path=r'Common7\IDE\devenv.com', + batch_file_path=r'Common7\Tools\vsvars32.bat', + default_dirname='Microsoft Visual Studio 9', + supported_arch=['x86', 'amd64'], + ), + + # Visual C++ 2008 Express Edition + # The batch file we look for is in the VC directory, + # so the VCExpress.exe executable is up in ..\..\Common7\IDE. + VisualStudio('9.0Exp', + vc_version='9.0', + sdk_version='6.1', + hkeys=[r'Microsoft\VCExpress\9.0\Setup\VS\ProductDir'], + common_tools_var='VS90COMNTOOLS', + executable_path=r'Common7\IDE\VCExpress.exe', + batch_file_path=r'Common7\Tools\vsvars32.bat', + default_dirname='Microsoft Visual Studio 9', + supported_arch=['x86'], + ), + + # Visual Studio 2005 + # The batch file we look for is in the VC directory, + # so the devenv.com executable is up in ..\..\Common7\IDE. + VisualStudio('8.0', + sdk_version='6.0A', + hkeys=[r'Microsoft\VisualStudio\8.0\Setup\VS\ProductDir'], + common_tools_var='VS80COMNTOOLS', + executable_path=r'Common7\IDE\devenv.com', + batch_file_path=r'Common7\Tools\vsvars32.bat', + default_dirname='Microsoft Visual Studio 8', + supported_arch=['x86', 'amd64'], + ), + + # Visual C++ 2005 Express Edition + # The batch file we look for is in the VC directory, + # so the VCExpress.exe executable is up in ..\..\Common7\IDE. + VisualStudio('8.0Exp', + vc_version='8.0', + sdk_version='6.0A', + hkeys=[r'Microsoft\VCExpress\8.0\Setup\VS\ProductDir'], + common_tools_var='VS80COMNTOOLS', + executable_path=r'Common7\IDE\VCExpress.exe', + batch_file_path=r'Common7\Tools\vsvars32.bat', + default_dirname='Microsoft Visual Studio 8', + supported_arch=['x86'], + ), + + # Visual Studio .NET 2003 + # The batch file we look for is in the Common7\Tools directory, + # so the devenv.com executable is next door in ..\IDE. + VisualStudio('7.1', + sdk_version='6.0', + hkeys=[r'Microsoft\VisualStudio\7.1\Setup\VS\ProductDir'], + common_tools_var='VS71COMNTOOLS', + executable_path=r'Common7\IDE\devenv.com', + batch_file_path=r'Common7\Tools\vsvars32.bat', + default_dirname='Microsoft Visual Studio .NET 2003', + supported_arch=['x86'], + ), + + # Visual Studio .NET + # The batch file we look for is in the Common7\Tools directory, + # so the devenv.com executable is next door in ..\IDE. + VisualStudio('7.0', + sdk_version='2003R2', + hkeys=[r'Microsoft\VisualStudio\7.0\Setup\VS\ProductDir'], + common_tools_var='VS70COMNTOOLS', + executable_path=r'IDE\devenv.com', + batch_file_path=r'Common7\Tools\vsvars32.bat', + default_dirname='Microsoft Visual Studio .NET', + supported_arch=['x86'], + ), + + # Visual Studio 6.0 + VisualStudio('6.0', + sdk_version='2003R1', + hkeys=[r'Microsoft\VisualStudio\6.0\Setup\Microsoft Visual Studio\ProductDir', + 'use_dir'], + common_tools_var='VS60COMNTOOLS', + executable_path=r'Common\MSDev98\Bin\MSDEV.COM', + batch_file_path=r'Common7\Tools\vsvars32.bat', + default_dirname='Microsoft Visual Studio', + supported_arch=['x86'], + ), +] + +SupportedVSMap = {} +for vs in SupportedVSList: + SupportedVSMap[vs.version] = vs + + +# Finding installed versions of Visual Studio isn't cheap, because it +# goes not only to the registry but also to the disk to sanity-check +# that there is, in fact, a Visual Studio directory there and that the +# registry entry isn't just stale. Find this information once, when +# requested, and cache it. + +InstalledVSList = None +InstalledVSMap = None + +def get_installed_visual_studios(): + global InstalledVSList + global InstalledVSMap + if InstalledVSList is None: + InstalledVSList = [] + InstalledVSMap = {} + for vs in SupportedVSList: + debug('trying to find VS %s' % vs.version) + if vs.get_executable(): + debug('found VS %s' % vs.version) + InstalledVSList.append(vs) + InstalledVSMap[vs.version] = vs + return InstalledVSList + +def reset_installed_visual_studios(): + global InstalledVSList + global InstalledVSMap + InstalledVSList = None + InstalledVSMap = None + for vs in SupportedVSList: + vs.reset() + + # Need to clear installed VC's as well as they are used in finding + # installed VS's + SCons.Tool.MSCommon.vc.reset_installed_vcs() + + +# We may be asked to update multiple construction environments with +# SDK information. When doing this, we check on-disk for whether +# the SDK has 'mfc' and 'atl' subdirectories. Since going to disk +# is expensive, cache results by directory. + +#SDKEnvironmentUpdates = {} +# +#def set_sdk_by_directory(env, sdk_dir): +# global SDKEnvironmentUpdates +# try: +# env_tuple_list = SDKEnvironmentUpdates[sdk_dir] +# except KeyError: +# env_tuple_list = [] +# SDKEnvironmentUpdates[sdk_dir] = env_tuple_list +# +# include_path = os.path.join(sdk_dir, 'include') +# mfc_path = os.path.join(include_path, 'mfc') +# atl_path = os.path.join(include_path, 'atl') +# +# if os.path.exists(mfc_path): +# env_tuple_list.append(('INCLUDE', mfc_path)) +# if os.path.exists(atl_path): +# env_tuple_list.append(('INCLUDE', atl_path)) +# env_tuple_list.append(('INCLUDE', include_path)) +# +# env_tuple_list.append(('LIB', os.path.join(sdk_dir, 'lib'))) +# env_tuple_list.append(('LIBPATH', os.path.join(sdk_dir, 'lib'))) +# env_tuple_list.append(('PATH', os.path.join(sdk_dir, 'bin'))) +# +# for variable, directory in env_tuple_list: +# env.PrependENVPath(variable, directory) + +def msvs_exists(): + return (len(get_installed_visual_studios()) > 0) + +def get_vs_by_version(msvs): + global InstalledVSMap + global SupportedVSMap + + if not SupportedVSMap.has_key(msvs): + msg = "Visual Studio version %s is not supported" % repr(msvs) + raise SCons.Errors.UserError, msg + get_installed_visual_studios() + vs = InstalledVSMap.get(msvs) + debug('InstalledVSMap:%s'%InstalledVSMap) + # Some check like this would let us provide a useful error message + # if they try to set a Visual Studio version that's not installed. + # However, we also want to be able to run tests (like the unit + # tests) on systems that don't, or won't ever, have it installed. + # It might be worth resurrecting this, with some configurable + # setting that the tests can use to bypass the check. + #if not vs: + # msg = "Visual Studio version %s is not installed" % repr(msvs) + # raise SCons.Errors.UserError, msg + return vs + +def get_default_version(env): + """Returns the default version string to use for MSVS. + + If no version was requested by the user through the MSVS environment + variable, query all the available the visual studios through + query_versions, and take the highest one. + + Return + ------ + version: str + the default version. + """ + if not env.has_key('MSVS') or not SCons.Util.is_Dict(env['MSVS']): + # TODO(1.5): + #versions = [vs.version for vs in get_installed_visual_studios()] + versions = map(lambda vs: vs.version, get_installed_visual_studios()) + env['MSVS'] = {'VERSIONS' : versions} + else: + versions = env['MSVS'].get('VERSIONS', []) + + if not env.has_key('MSVS_VERSION'): + if versions: + env['MSVS_VERSION'] = versions[0] #use highest version by default + else: + env['MSVS_VERSION'] = SupportedVSList[0].version + + env['MSVS']['VERSION'] = env['MSVS_VERSION'] + + return env['MSVS_VERSION'] + +def get_default_arch(env): + """Return the default arch to use for MSVS + + if no version was requested by the user through the MSVS_ARCH environment + variable, select x86 + + Return + ------ + arch: str + """ + arch = env.get('MSVS_ARCH', 'x86') + + msvs = InstalledVSMap.get(env['MSVS_VERSION']) + + if not msvs: + arch = 'x86' + elif not arch in msvs.get_supported_arch(): + fmt = "Visual Studio version %s does not support architecture %s" + raise SCons.Errors.UserError, fmt % (env['MSVS_VERSION'], arch) + + return arch + +def merge_default_version(env): + version = get_default_version(env) + arch = get_default_arch(env) + +def msvs_setup_env(env): + batfilename = msvs.get_batch_file() + msvs = get_vs_by_version(version) + if msvs is None: + return + + # XXX: I think this is broken. This will silently set a bogus tool instead + # of failing, but there is no other way with the current scons tool + # framework + if batfilename is not None: + + vars = ('LIB', 'LIBPATH', 'PATH', 'INCLUDE') + + msvs_list = get_installed_visual_studios() + # TODO(1.5): + #vscommonvarnames = [ vs.common_tools_var for vs in msvs_list ] + vscommonvarnames = map(lambda vs: vs.common_tools_var, msvs_list) + nenv = normalize_env(env['ENV'], vscommonvarnames + ['COMSPEC']) + output = get_output(batfilename, arch, env=nenv) + vars = parse_output(output, vars) + + for k, v in vars.items(): + env.PrependENVPath(k, v, delete_existing=1) + +def query_versions(): + """Query the system to get available versions of VS. A version is + considered when a batfile is found.""" + msvs_list = get_installed_visual_studios() + # TODO(1.5) + #versions = [ msvs.version for msvs in msvs_list ] + versions = map(lambda msvs: msvs.version, msvs_list) + return versions + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/Perforce.py b/engine/SCons/Tool/Perforce.py new file mode 100644 index 0000000..8a22aee --- /dev/null +++ b/engine/SCons/Tool/Perforce.py @@ -0,0 +1,104 @@ +"""SCons.Tool.Perforce.py + +Tool-specific initialization for Perforce Source Code Management system. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/Perforce.py 4577 2009/12/27 19:43:56 scons" + +import os + +import SCons.Action +import SCons.Builder +import SCons.Node.FS +import SCons.Util + +# This function should maybe be moved to SCons.Util? +from SCons.Tool.PharLapCommon import addPathIfNotExists + + + +# Variables that we want to import from the base OS environment. +_import_env = [ 'P4PORT', 'P4CLIENT', 'P4USER', 'USER', 'USERNAME', 'P4PASSWD', + 'P4CHARSET', 'P4LANGUAGE', 'SystemRoot' ] + +PerforceAction = SCons.Action.Action('$P4COM', '$P4COMSTR') + +def generate(env): + """Add a Builder factory function and construction variables for + Perforce to an Environment.""" + + def PerforceFactory(env=env): + """ """ + return SCons.Builder.Builder(action = PerforceAction, env = env) + + #setattr(env, 'Perforce', PerforceFactory) + env.Perforce = PerforceFactory + + env['P4'] = 'p4' + env['P4FLAGS'] = SCons.Util.CLVar('') + env['P4COM'] = '$P4 $P4FLAGS sync $TARGET' + try: + environ = env['ENV'] + except KeyError: + environ = {} + env['ENV'] = environ + + # Perforce seems to use the PWD environment variable rather than + # calling getcwd() for itself, which is odd. If no PWD variable + # is present, p4 WILL call getcwd, but this seems to cause problems + # with good ol' Windows's tilde-mangling for long file names. + environ['PWD'] = env.Dir('#').get_abspath() + + for var in _import_env: + v = os.environ.get(var) + if v: + environ[var] = v + + if SCons.Util.can_read_reg: + # If we can read the registry, add the path to Perforce to our environment. + try: + k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE, + 'Software\\Perforce\\environment') + val, tok = SCons.Util.RegQueryValueEx(k, 'P4INSTROOT') + addPathIfNotExists(environ, 'PATH', val) + except SCons.Util.RegError: + # Can't detect where Perforce is, hope the user has it set in the + # PATH. + pass + +def exists(env): + return env.Detect('p4') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/PharLapCommon.py b/engine/SCons/Tool/PharLapCommon.py new file mode 100644 index 0000000..8b6086e --- /dev/null +++ b/engine/SCons/Tool/PharLapCommon.py @@ -0,0 +1,138 @@ +"""SCons.Tool.PharLapCommon + +This module contains common code used by all Tools for the +Phar Lap ETS tool chain. Right now, this is linkloc and +386asm. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/PharLapCommon.py 4577 2009/12/27 19:43:56 scons" + +import os +import os.path +import SCons.Errors +import SCons.Util +import re +import string + +def getPharLapPath(): + """Reads the registry to find the installed path of the Phar Lap ETS + development kit. + + Raises UserError if no installed version of Phar Lap can + be found.""" + + if not SCons.Util.can_read_reg: + raise SCons.Errors.InternalError, "No Windows registry module was found" + try: + k=SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, + 'SOFTWARE\\Pharlap\\ETS') + val, type = SCons.Util.RegQueryValueEx(k, 'BaseDir') + + # The following is a hack...there is (not surprisingly) + # an odd issue in the Phar Lap plug in that inserts + # a bunch of junk data after the phar lap path in the + # registry. We must trim it. + idx=val.find('\0') + if idx >= 0: + val = val[:idx] + + return os.path.normpath(val) + except SCons.Util.RegError: + raise SCons.Errors.UserError, "Cannot find Phar Lap ETS path in the registry. Is it installed properly?" + +REGEX_ETS_VER = re.compile(r'#define\s+ETS_VER\s+([0-9]+)') + +def getPharLapVersion(): + """Returns the version of the installed ETS Tool Suite as a + decimal number. This version comes from the ETS_VER #define in + the embkern.h header. For example, '#define ETS_VER 1010' (which + is what Phar Lap 10.1 defines) would cause this method to return + 1010. Phar Lap 9.1 does not have such a #define, but this method + will return 910 as a default. + + Raises UserError if no installed version of Phar Lap can + be found.""" + + include_path = os.path.join(getPharLapPath(), os.path.normpath("include/embkern.h")) + if not os.path.exists(include_path): + raise SCons.Errors.UserError, "Cannot find embkern.h in ETS include directory.\nIs Phar Lap ETS installed properly?" + mo = REGEX_ETS_VER.search(open(include_path, 'r').read()) + if mo: + return int(mo.group(1)) + # Default return for Phar Lap 9.1 + return 910 + +def addPathIfNotExists(env_dict, key, path, sep=os.pathsep): + """This function will take 'key' out of the dictionary + 'env_dict', then add the path 'path' to that key if it is not + already there. This treats the value of env_dict[key] as if it + has a similar format to the PATH variable...a list of paths + separated by tokens. The 'path' will get added to the list if it + is not already there.""" + try: + is_list = 1 + paths = env_dict[key] + if not SCons.Util.is_List(env_dict[key]): + paths = string.split(paths, sep) + is_list = 0 + if not os.path.normcase(path) in map(os.path.normcase, paths): + paths = [ path ] + paths + if is_list: + env_dict[key] = paths + else: + env_dict[key] = string.join(paths, sep) + except KeyError: + env_dict[key] = path + +def addPharLapPaths(env): + """This function adds the path to the Phar Lap binaries, includes, + and libraries, if they are not already there.""" + ph_path = getPharLapPath() + + try: + env_dict = env['ENV'] + except KeyError: + env_dict = {} + env['ENV'] = env_dict + addPathIfNotExists(env_dict, 'PATH', + os.path.join(ph_path, 'bin')) + addPathIfNotExists(env_dict, 'INCLUDE', + os.path.join(ph_path, 'include')) + addPathIfNotExists(env_dict, 'LIB', + os.path.join(ph_path, 'lib')) + addPathIfNotExists(env_dict, 'LIB', + os.path.join(ph_path, os.path.normpath('lib/vclib'))) + + env['PHARLAP_PATH'] = getPharLapPath() + env['PHARLAP_VERSION'] = str(getPharLapVersion()) + + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/RCS.py b/engine/SCons/Tool/RCS.py new file mode 100644 index 0000000..d60be1e --- /dev/null +++ b/engine/SCons/Tool/RCS.py @@ -0,0 +1,64 @@ +"""SCons.Tool.RCS.py + +Tool-specific initialization for RCS. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/RCS.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Action +import SCons.Builder +import SCons.Util + +def generate(env): + """Add a Builder factory function and construction variables for + RCS to an Environment.""" + + def RCSFactory(env=env): + """ """ + act = SCons.Action.Action('$RCS_COCOM', '$RCS_COCOMSTR') + return SCons.Builder.Builder(action = act, env = env) + + #setattr(env, 'RCS', RCSFactory) + env.RCS = RCSFactory + + env['RCS'] = 'rcs' + env['RCS_CO'] = 'co' + env['RCS_COFLAGS'] = SCons.Util.CLVar('') + env['RCS_COCOM'] = '$RCS_CO $RCS_COFLAGS $TARGET' + +def exists(env): + return env.Detect('rcs') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/SCCS.py b/engine/SCons/Tool/SCCS.py new file mode 100644 index 0000000..66cb4df --- /dev/null +++ b/engine/SCons/Tool/SCCS.py @@ -0,0 +1,64 @@ +"""SCons.Tool.SCCS.py + +Tool-specific initialization for SCCS. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/SCCS.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Action +import SCons.Builder +import SCons.Util + +def generate(env): + """Add a Builder factory function and construction variables for + SCCS to an Environment.""" + + def SCCSFactory(env=env): + """ """ + act = SCons.Action.Action('$SCCSCOM', '$SCCSCOMSTR') + return SCons.Builder.Builder(action = act, env = env) + + #setattr(env, 'SCCS', SCCSFactory) + env.SCCS = SCCSFactory + + env['SCCS'] = 'sccs' + env['SCCSFLAGS'] = SCons.Util.CLVar('') + env['SCCSGETFLAGS'] = SCons.Util.CLVar('') + env['SCCSCOM'] = '$SCCS $SCCSFLAGS get $SCCSGETFLAGS $TARGET' + +def exists(env): + return env.Detect('sccs') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/Subversion.py b/engine/SCons/Tool/Subversion.py new file mode 100644 index 0000000..57397ce --- /dev/null +++ b/engine/SCons/Tool/Subversion.py @@ -0,0 +1,71 @@ +"""SCons.Tool.Subversion.py + +Tool-specific initialization for Subversion. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/Subversion.py 4577 2009/12/27 19:43:56 scons" + +import os.path + +import SCons.Action +import SCons.Builder +import SCons.Util + +def generate(env): + """Add a Builder factory function and construction variables for + Subversion to an Environment.""" + + def SubversionFactory(repos, module='', env=env): + """ """ + # fail if repos is not an absolute path name? + if module != '': + module = os.path.join(module, '') + act = SCons.Action.Action('$SVNCOM', '$SVNCOMSTR') + return SCons.Builder.Builder(action = act, + env = env, + SVNREPOSITORY = repos, + SVNMODULE = module) + + #setattr(env, 'Subversion', SubversionFactory) + env.Subversion = SubversionFactory + + env['SVN'] = 'svn' + env['SVNFLAGS'] = SCons.Util.CLVar('') + env['SVNCOM'] = '$SVN $SVNFLAGS cat $SVNREPOSITORY/$SVNMODULE$TARGET > $TARGET' + +def exists(env): + return env.Detect('svn') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/__init__.py b/engine/SCons/Tool/__init__.py new file mode 100644 index 0000000..5317fe8 --- /dev/null +++ b/engine/SCons/Tool/__init__.py @@ -0,0 +1,675 @@ +"""SCons.Tool + +SCons tool selection. + +This looks for modules that define a callable object that can modify +a construction environment as appropriate for a given tool (or tool +chain). + +Note that because this subsystem just *selects* a callable that can +modify a construction environment, it's possible for people to define +their own "tool specification" in an arbitrary callable function. No +one needs to use or tie in to this subsystem in order to roll their own +tool definition. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/__init__.py 4577 2009/12/27 19:43:56 scons" + +import imp +import sys + +import SCons.Builder +import SCons.Errors +import SCons.Node.FS +import SCons.Scanner +import SCons.Scanner.C +import SCons.Scanner.D +import SCons.Scanner.LaTeX +import SCons.Scanner.Prog + +DefaultToolpath=[] + +CScanner = SCons.Scanner.C.CScanner() +DScanner = SCons.Scanner.D.DScanner() +LaTeXScanner = SCons.Scanner.LaTeX.LaTeXScanner() +PDFLaTeXScanner = SCons.Scanner.LaTeX.PDFLaTeXScanner() +ProgramScanner = SCons.Scanner.Prog.ProgramScanner() +SourceFileScanner = SCons.Scanner.Base({}, name='SourceFileScanner') + +CSuffixes = [".c", ".C", ".cxx", ".cpp", ".c++", ".cc", + ".h", ".H", ".hxx", ".hpp", ".hh", + ".F", ".fpp", ".FPP", + ".m", ".mm", + ".S", ".spp", ".SPP"] + +DSuffixes = ['.d'] + +IDLSuffixes = [".idl", ".IDL"] + +LaTeXSuffixes = [".tex", ".ltx", ".latex"] + +for suffix in CSuffixes: + SourceFileScanner.add_scanner(suffix, CScanner) + +for suffix in DSuffixes: + SourceFileScanner.add_scanner(suffix, DScanner) + +# FIXME: what should be done here? Two scanners scan the same extensions, +# but look for different files, e.g., "picture.eps" vs. "picture.pdf". +# The builders for DVI and PDF explicitly reference their scanners +# I think that means this is not needed??? +for suffix in LaTeXSuffixes: + SourceFileScanner.add_scanner(suffix, LaTeXScanner) + SourceFileScanner.add_scanner(suffix, PDFLaTeXScanner) + +class Tool: + def __init__(self, name, toolpath=[], **kw): + self.name = name + self.toolpath = toolpath + DefaultToolpath + # remember these so we can merge them into the call + self.init_kw = kw + + module = self._tool_module() + self.generate = module.generate + self.exists = module.exists + if hasattr(module, 'options'): + self.options = module.options + + def _tool_module(self): + # TODO: Interchange zipimport with normal initilization for better error reporting + oldpythonpath = sys.path + sys.path = self.toolpath + sys.path + + try: + try: + file, path, desc = imp.find_module(self.name, self.toolpath) + try: + return imp.load_module(self.name, file, path, desc) + finally: + if file: + file.close() + except ImportError, e: + if str(e)!="No module named %s"%self.name: + raise SCons.Errors.EnvironmentError, e + try: + import zipimport + except ImportError: + pass + else: + for aPath in self.toolpath: + try: + importer = zipimport.zipimporter(aPath) + return importer.load_module(self.name) + except ImportError, e: + pass + finally: + sys.path = oldpythonpath + + full_name = 'SCons.Tool.' + self.name + try: + return sys.modules[full_name] + except KeyError: + try: + smpath = sys.modules['SCons.Tool'].__path__ + try: + file, path, desc = imp.find_module(self.name, smpath) + module = imp.load_module(full_name, file, path, desc) + setattr(SCons.Tool, self.name, module) + if file: + file.close() + return module + except ImportError, e: + if str(e)!="No module named %s"%self.name: + raise SCons.Errors.EnvironmentError, e + try: + import zipimport + importer = zipimport.zipimporter( sys.modules['SCons.Tool'].__path__[0] ) + module = importer.load_module(full_name) + setattr(SCons.Tool, self.name, module) + return module + except ImportError, e: + m = "No tool named '%s': %s" % (self.name, e) + raise SCons.Errors.EnvironmentError, m + except ImportError, e: + m = "No tool named '%s': %s" % (self.name, e) + raise SCons.Errors.EnvironmentError, m + + def __call__(self, env, *args, **kw): + if self.init_kw is not None: + # Merge call kws into init kws; + # but don't bash self.init_kw. + if kw is not None: + call_kw = kw + kw = self.init_kw.copy() + kw.update(call_kw) + else: + kw = self.init_kw + env.Append(TOOLS = [ self.name ]) + if hasattr(self, 'options'): + import SCons.Variables + if not env.has_key('options'): + from SCons.Script import ARGUMENTS + env['options']=SCons.Variables.Variables(args=ARGUMENTS) + opts=env['options'] + + self.options(opts) + opts.Update(env) + + apply(self.generate, ( env, ) + args, kw) + + def __str__(self): + return self.name + +########################################################################## +# Create common executable program / library / object builders + +def createProgBuilder(env): + """This is a utility function that creates the Program + Builder in an Environment if it is not there already. + + If it is already there, we return the existing one. + """ + + try: + program = env['BUILDERS']['Program'] + except KeyError: + import SCons.Defaults + program = SCons.Builder.Builder(action = SCons.Defaults.LinkAction, + emitter = '$PROGEMITTER', + prefix = '$PROGPREFIX', + suffix = '$PROGSUFFIX', + src_suffix = '$OBJSUFFIX', + src_builder = 'Object', + target_scanner = ProgramScanner) + env['BUILDERS']['Program'] = program + + return program + +def createStaticLibBuilder(env): + """This is a utility function that creates the StaticLibrary + Builder in an Environment if it is not there already. + + If it is already there, we return the existing one. + """ + + try: + static_lib = env['BUILDERS']['StaticLibrary'] + except KeyError: + action_list = [ SCons.Action.Action("$ARCOM", "$ARCOMSTR") ] + if env.Detect('ranlib'): + ranlib_action = SCons.Action.Action("$RANLIBCOM", "$RANLIBCOMSTR") + action_list.append(ranlib_action) + + static_lib = SCons.Builder.Builder(action = action_list, + emitter = '$LIBEMITTER', + prefix = '$LIBPREFIX', + suffix = '$LIBSUFFIX', + src_suffix = '$OBJSUFFIX', + src_builder = 'StaticObject') + env['BUILDERS']['StaticLibrary'] = static_lib + env['BUILDERS']['Library'] = static_lib + + return static_lib + +def createSharedLibBuilder(env): + """This is a utility function that creates the SharedLibrary + Builder in an Environment if it is not there already. + + If it is already there, we return the existing one. + """ + + try: + shared_lib = env['BUILDERS']['SharedLibrary'] + except KeyError: + import SCons.Defaults + action_list = [ SCons.Defaults.SharedCheck, + SCons.Defaults.ShLinkAction ] + shared_lib = SCons.Builder.Builder(action = action_list, + emitter = "$SHLIBEMITTER", + prefix = '$SHLIBPREFIX', + suffix = '$SHLIBSUFFIX', + target_scanner = ProgramScanner, + src_suffix = '$SHOBJSUFFIX', + src_builder = 'SharedObject') + env['BUILDERS']['SharedLibrary'] = shared_lib + + return shared_lib + +def createLoadableModuleBuilder(env): + """This is a utility function that creates the LoadableModule + Builder in an Environment if it is not there already. + + If it is already there, we return the existing one. + """ + + try: + ld_module = env['BUILDERS']['LoadableModule'] + except KeyError: + import SCons.Defaults + action_list = [ SCons.Defaults.SharedCheck, + SCons.Defaults.LdModuleLinkAction ] + ld_module = SCons.Builder.Builder(action = action_list, + emitter = "$LDMODULEEMITTER", + prefix = '$LDMODULEPREFIX', + suffix = '$LDMODULESUFFIX', + target_scanner = ProgramScanner, + src_suffix = '$SHOBJSUFFIX', + src_builder = 'SharedObject') + env['BUILDERS']['LoadableModule'] = ld_module + + return ld_module + +def createObjBuilders(env): + """This is a utility function that creates the StaticObject + and SharedObject Builders in an Environment if they + are not there already. + + If they are there already, we return the existing ones. + + This is a separate function because soooo many Tools + use this functionality. + + The return is a 2-tuple of (StaticObject, SharedObject) + """ + + + try: + static_obj = env['BUILDERS']['StaticObject'] + except KeyError: + static_obj = SCons.Builder.Builder(action = {}, + emitter = {}, + prefix = '$OBJPREFIX', + suffix = '$OBJSUFFIX', + src_builder = ['CFile', 'CXXFile'], + source_scanner = SourceFileScanner, + single_source = 1) + env['BUILDERS']['StaticObject'] = static_obj + env['BUILDERS']['Object'] = static_obj + + try: + shared_obj = env['BUILDERS']['SharedObject'] + except KeyError: + shared_obj = SCons.Builder.Builder(action = {}, + emitter = {}, + prefix = '$SHOBJPREFIX', + suffix = '$SHOBJSUFFIX', + src_builder = ['CFile', 'CXXFile'], + source_scanner = SourceFileScanner, + single_source = 1) + env['BUILDERS']['SharedObject'] = shared_obj + + return (static_obj, shared_obj) + +def createCFileBuilders(env): + """This is a utility function that creates the CFile/CXXFile + Builders in an Environment if they + are not there already. + + If they are there already, we return the existing ones. + + This is a separate function because soooo many Tools + use this functionality. + + The return is a 2-tuple of (CFile, CXXFile) + """ + + try: + c_file = env['BUILDERS']['CFile'] + except KeyError: + c_file = SCons.Builder.Builder(action = {}, + emitter = {}, + suffix = {None:'$CFILESUFFIX'}) + env['BUILDERS']['CFile'] = c_file + + env.SetDefault(CFILESUFFIX = '.c') + + try: + cxx_file = env['BUILDERS']['CXXFile'] + except KeyError: + cxx_file = SCons.Builder.Builder(action = {}, + emitter = {}, + suffix = {None:'$CXXFILESUFFIX'}) + env['BUILDERS']['CXXFile'] = cxx_file + env.SetDefault(CXXFILESUFFIX = '.cc') + + return (c_file, cxx_file) + +########################################################################## +# Create common Java builders + +def CreateJarBuilder(env): + try: + java_jar = env['BUILDERS']['Jar'] + except KeyError: + fs = SCons.Node.FS.get_default_fs() + jar_com = SCons.Action.Action('$JARCOM', '$JARCOMSTR') + java_jar = SCons.Builder.Builder(action = jar_com, + suffix = '$JARSUFFIX', + src_suffix = '$JAVACLASSSUFIX', + src_builder = 'JavaClassFile', + source_factory = fs.Entry) + env['BUILDERS']['Jar'] = java_jar + return java_jar + +def CreateJavaHBuilder(env): + try: + java_javah = env['BUILDERS']['JavaH'] + except KeyError: + fs = SCons.Node.FS.get_default_fs() + java_javah_com = SCons.Action.Action('$JAVAHCOM', '$JAVAHCOMSTR') + java_javah = SCons.Builder.Builder(action = java_javah_com, + src_suffix = '$JAVACLASSSUFFIX', + target_factory = fs.Entry, + source_factory = fs.File, + src_builder = 'JavaClassFile') + env['BUILDERS']['JavaH'] = java_javah + return java_javah + +def CreateJavaClassFileBuilder(env): + try: + java_class_file = env['BUILDERS']['JavaClassFile'] + except KeyError: + fs = SCons.Node.FS.get_default_fs() + javac_com = SCons.Action.Action('$JAVACCOM', '$JAVACCOMSTR') + java_class_file = SCons.Builder.Builder(action = javac_com, + emitter = {}, + #suffix = '$JAVACLASSSUFFIX', + src_suffix = '$JAVASUFFIX', + src_builder = ['JavaFile'], + target_factory = fs.Entry, + source_factory = fs.File) + env['BUILDERS']['JavaClassFile'] = java_class_file + return java_class_file + +def CreateJavaClassDirBuilder(env): + try: + java_class_dir = env['BUILDERS']['JavaClassDir'] + except KeyError: + fs = SCons.Node.FS.get_default_fs() + javac_com = SCons.Action.Action('$JAVACCOM', '$JAVACCOMSTR') + java_class_dir = SCons.Builder.Builder(action = javac_com, + emitter = {}, + target_factory = fs.Dir, + source_factory = fs.Dir) + env['BUILDERS']['JavaClassDir'] = java_class_dir + return java_class_dir + +def CreateJavaFileBuilder(env): + try: + java_file = env['BUILDERS']['JavaFile'] + except KeyError: + java_file = SCons.Builder.Builder(action = {}, + emitter = {}, + suffix = {None:'$JAVASUFFIX'}) + env['BUILDERS']['JavaFile'] = java_file + env['JAVASUFFIX'] = '.java' + return java_file + +class ToolInitializerMethod: + """ + This is added to a construction environment in place of a + method(s) normally called for a Builder (env.Object, env.StaticObject, + etc.). When called, it has its associated ToolInitializer + object search the specified list of tools and apply the first + one that exists to the construction environment. It then calls + whatever builder was (presumably) added to the construction + environment in place of this particular instance. + """ + def __init__(self, name, initializer): + """ + Note: we store the tool name as __name__ so it can be used by + the class that attaches this to a construction environment. + """ + self.__name__ = name + self.initializer = initializer + + def get_builder(self, env): + """ + Returns the appropriate real Builder for this method name + after having the associated ToolInitializer object apply + the appropriate Tool module. + """ + builder = getattr(env, self.__name__) + + self.initializer.apply_tools(env) + + builder = getattr(env, self.__name__) + if builder is self: + # There was no Builder added, which means no valid Tool + # for this name was found (or possibly there's a mismatch + # between the name we were called by and the Builder name + # added by the Tool module). + return None + + self.initializer.remove_methods(env) + + return builder + + def __call__(self, env, *args, **kw): + """ + """ + builder = self.get_builder(env) + if builder is None: + return [], [] + return apply(builder, args, kw) + +class ToolInitializer: + """ + A class for delayed initialization of Tools modules. + + Instances of this class associate a list of Tool modules with + a list of Builder method names that will be added by those Tool + modules. As part of instantiating this object for a particular + construction environment, we also add the appropriate + ToolInitializerMethod objects for the various Builder methods + that we want to use to delay Tool searches until necessary. + """ + def __init__(self, env, tools, names): + if not SCons.Util.is_List(tools): + tools = [tools] + if not SCons.Util.is_List(names): + names = [names] + self.env = env + self.tools = tools + self.names = names + self.methods = {} + for name in names: + method = ToolInitializerMethod(name, self) + self.methods[name] = method + env.AddMethod(method) + + def remove_methods(self, env): + """ + Removes the methods that were added by the tool initialization + so we no longer copy and re-bind them when the construction + environment gets cloned. + """ + for method in self.methods.values(): + env.RemoveMethod(method) + + def apply_tools(self, env): + """ + Searches the list of associated Tool modules for one that + exists, and applies that to the construction environment. + """ + for t in self.tools: + tool = SCons.Tool.Tool(t) + if tool.exists(env): + env.Tool(tool) + return + + # If we fall through here, there was no tool module found. + # This is where we can put an informative error message + # about the inability to find the tool. We'll start doing + # this as we cut over more pre-defined Builder+Tools to use + # the ToolInitializer class. + +def Initializers(env): + ToolInitializer(env, ['install'], ['_InternalInstall', '_InternalInstallAs']) + def Install(self, *args, **kw): + return apply(self._InternalInstall, args, kw) + def InstallAs(self, *args, **kw): + return apply(self._InternalInstallAs, args, kw) + env.AddMethod(Install) + env.AddMethod(InstallAs) + +def FindTool(tools, env): + for tool in tools: + t = Tool(tool) + if t.exists(env): + return tool + return None + +def FindAllTools(tools, env): + def ToolExists(tool, env=env): + return Tool(tool).exists(env) + return filter (ToolExists, tools) + +def tool_list(platform, env): + + other_plat_tools=[] + # XXX this logic about what tool to prefer on which platform + # should be moved into either the platform files or + # the tool files themselves. + # The search orders here are described in the man page. If you + # change these search orders, update the man page as well. + if str(platform) == 'win32': + "prefer Microsoft tools on Windows" + linkers = ['mslink', 'gnulink', 'ilink', 'linkloc', 'ilink32' ] + c_compilers = ['msvc', 'mingw', 'gcc', 'intelc', 'icl', 'icc', 'cc', 'bcc32' ] + cxx_compilers = ['msvc', 'intelc', 'icc', 'g++', 'c++', 'bcc32' ] + assemblers = ['masm', 'nasm', 'gas', '386asm' ] + fortran_compilers = ['gfortran', 'g77', 'ifl', 'cvf', 'f95', 'f90', 'fortran'] + ars = ['mslib', 'ar', 'tlib'] + other_plat_tools=['msvs','midl'] + elif str(platform) == 'os2': + "prefer IBM tools on OS/2" + linkers = ['ilink', 'gnulink', ]#'mslink'] + c_compilers = ['icc', 'gcc',]# 'msvc', 'cc'] + cxx_compilers = ['icc', 'g++',]# 'msvc', 'c++'] + assemblers = ['nasm',]# 'masm', 'gas'] + fortran_compilers = ['ifl', 'g77'] + ars = ['ar',]# 'mslib'] + elif str(platform) == 'irix': + "prefer MIPSPro on IRIX" + linkers = ['sgilink', 'gnulink'] + c_compilers = ['sgicc', 'gcc', 'cc'] + cxx_compilers = ['sgic++', 'g++', 'c++'] + assemblers = ['as', 'gas'] + fortran_compilers = ['f95', 'f90', 'f77', 'g77', 'fortran'] + ars = ['sgiar'] + elif str(platform) == 'sunos': + "prefer Forte tools on SunOS" + linkers = ['sunlink', 'gnulink'] + c_compilers = ['suncc', 'gcc', 'cc'] + cxx_compilers = ['sunc++', 'g++', 'c++'] + assemblers = ['as', 'gas'] + fortran_compilers = ['sunf95', 'sunf90', 'sunf77', 'f95', 'f90', 'f77', + 'gfortran', 'g77', 'fortran'] + ars = ['sunar'] + elif str(platform) == 'hpux': + "prefer aCC tools on HP-UX" + linkers = ['hplink', 'gnulink'] + c_compilers = ['hpcc', 'gcc', 'cc'] + cxx_compilers = ['hpc++', 'g++', 'c++'] + assemblers = ['as', 'gas'] + fortran_compilers = ['f95', 'f90', 'f77', 'g77', 'fortran'] + ars = ['ar'] + elif str(platform) == 'aix': + "prefer AIX Visual Age tools on AIX" + linkers = ['aixlink', 'gnulink'] + c_compilers = ['aixcc', 'gcc', 'cc'] + cxx_compilers = ['aixc++', 'g++', 'c++'] + assemblers = ['as', 'gas'] + fortran_compilers = ['f95', 'f90', 'aixf77', 'g77', 'fortran'] + ars = ['ar'] + elif str(platform) == 'darwin': + "prefer GNU tools on Mac OS X, except for some linkers and IBM tools" + linkers = ['applelink', 'gnulink'] + c_compilers = ['gcc', 'cc'] + cxx_compilers = ['g++', 'c++'] + assemblers = ['as'] + fortran_compilers = ['gfortran', 'f95', 'f90', 'g77'] + ars = ['ar'] + else: + "prefer GNU tools on all other platforms" + linkers = ['gnulink', 'mslink', 'ilink'] + c_compilers = ['gcc', 'msvc', 'intelc', 'icc', 'cc'] + cxx_compilers = ['g++', 'msvc', 'intelc', 'icc', 'c++'] + assemblers = ['gas', 'nasm', 'masm'] + fortran_compilers = ['gfortran', 'g77', 'ifort', 'ifl', 'f95', 'f90', 'f77'] + ars = ['ar', 'mslib'] + + c_compiler = FindTool(c_compilers, env) or c_compilers[0] + + # XXX this logic about what tool provides what should somehow be + # moved into the tool files themselves. + if c_compiler and c_compiler == 'mingw': + # MinGW contains a linker, C compiler, C++ compiler, + # Fortran compiler, archiver and assembler: + cxx_compiler = None + linker = None + assembler = None + fortran_compiler = None + ar = None + else: + # Don't use g++ if the C compiler has built-in C++ support: + if c_compiler in ('msvc', 'intelc', 'icc'): + cxx_compiler = None + else: + cxx_compiler = FindTool(cxx_compilers, env) or cxx_compilers[0] + linker = FindTool(linkers, env) or linkers[0] + assembler = FindTool(assemblers, env) or assemblers[0] + fortran_compiler = FindTool(fortran_compilers, env) or fortran_compilers[0] + ar = FindTool(ars, env) or ars[0] + + other_tools = FindAllTools(['BitKeeper', 'CVS', + 'dmd', + 'filesystem', + 'dvipdf', 'dvips', 'gs', + 'jar', 'javac', 'javah', + 'latex', 'lex', + 'm4', #'midl', 'msvs', + 'pdflatex', 'pdftex', 'Perforce', + 'RCS', 'rmic', 'rpcgen', + 'SCCS', + # 'Subversion', + 'swig', + 'tar', 'tex', + 'yacc', 'zip', 'rpm', 'wix']+other_plat_tools, + env) + + tools = ([linker, c_compiler, cxx_compiler, + fortran_compiler, assembler, ar] + + other_tools) + + return filter(lambda x: x, tools) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/aixc++.py b/engine/SCons/Tool/aixc++.py new file mode 100644 index 0000000..bef7b99 --- /dev/null +++ b/engine/SCons/Tool/aixc++.py @@ -0,0 +1,82 @@ +"""SCons.Tool.aixc++ + +Tool-specific initialization for IBM xlC / Visual Age C++ compiler. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/aixc++.py 4577 2009/12/27 19:43:56 scons" + +import os.path + +import SCons.Platform.aix + +cplusplus = __import__('c++', globals(), locals(), []) + +packages = ['vacpp.cmp.core', 'vacpp.cmp.batch', 'vacpp.cmp.C', 'ibmcxx.cmp'] + +def get_xlc(env): + xlc = env.get('CXX', 'xlC') + xlc_r = env.get('SHCXX', 'xlC_r') + return SCons.Platform.aix.get_xlc(env, xlc, xlc_r, packages) + +def smart_cxxflags(source, target, env, for_signature): + build_dir = env.GetBuildPath() + if build_dir: + return '-qtempinc=' + os.path.join(build_dir, 'tempinc') + return '' + +def generate(env): + """Add Builders and construction variables for xlC / Visual Age + suite to an Environment.""" + path, _cxx, _shcxx, version = get_xlc(env) + if path: + _cxx = os.path.join(path, _cxx) + _shcxx = os.path.join(path, _shcxx) + + cplusplus.generate(env) + + env['CXX'] = _cxx + env['SHCXX'] = _shcxx + env['CXXVERSION'] = version + env['SHOBJSUFFIX'] = '.pic.o' + +def exists(env): + path, _cxx, _shcxx, version = get_xlc(env) + if path and _cxx: + xlc = os.path.join(path, _cxx) + if os.path.exists(xlc): + return xlc + return None + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/aixcc.py b/engine/SCons/Tool/aixcc.py new file mode 100644 index 0000000..55d0d68 --- /dev/null +++ b/engine/SCons/Tool/aixcc.py @@ -0,0 +1,74 @@ +"""SCons.Tool.aixcc + +Tool-specific initialization for IBM xlc / Visual Age C compiler. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/aixcc.py 4577 2009/12/27 19:43:56 scons" + +import os.path + +import SCons.Platform.aix + +import cc + +packages = ['vac.C', 'ibmcxx.cmp'] + +def get_xlc(env): + xlc = env.get('CC', 'xlc') + xlc_r = env.get('SHCC', 'xlc_r') + return SCons.Platform.aix.get_xlc(env, xlc, xlc_r, packages) + +def generate(env): + """Add Builders and construction variables for xlc / Visual Age + suite to an Environment.""" + path, _cc, _shcc, version = get_xlc(env) + if path: + _cc = os.path.join(path, _cc) + _shcc = os.path.join(path, _shcc) + + cc.generate(env) + + env['CC'] = _cc + env['SHCC'] = _shcc + env['CCVERSION'] = version + +def exists(env): + path, _cc, _shcc, version = get_xlc(env) + if path and _cc: + xlc = os.path.join(path, _cc) + if os.path.exists(xlc): + return xlc + return None + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/aixf77.py b/engine/SCons/Tool/aixf77.py new file mode 100644 index 0000000..eeb4b06 --- /dev/null +++ b/engine/SCons/Tool/aixf77.py @@ -0,0 +1,80 @@ +"""engine.SCons.Tool.aixf77 + +Tool-specific initialization for IBM Visual Age f77 Fortran compiler. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/aixf77.py 4577 2009/12/27 19:43:56 scons" + +import os.path + +#import SCons.Platform.aix + +import f77 + +# It would be good to look for the AIX F77 package the same way we're now +# looking for the C and C++ packages. This should be as easy as supplying +# the correct package names in the following list and uncommenting the +# SCons.Platform.aix_get_xlc() call the in the function below. +packages = [] + +def get_xlf77(env): + xlf77 = env.get('F77', 'xlf77') + xlf77_r = env.get('SHF77', 'xlf77_r') + #return SCons.Platform.aix.get_xlc(env, xlf77, xlf77_r, packages) + return (None, xlf77, xlf77_r, None) + +def generate(env): + """ + Add Builders and construction variables for the Visual Age FORTRAN + compiler to an Environment. + """ + path, _f77, _shf77, version = get_xlf77(env) + if path: + _f77 = os.path.join(path, _f77) + _shf77 = os.path.join(path, _shf77) + + f77.generate(env) + + env['F77'] = _f77 + env['SHF77'] = _shf77 + +def exists(env): + path, _f77, _shf77, version = get_xlf77(env) + if path and _f77: + xlf77 = os.path.join(path, _f77) + if os.path.exists(xlf77): + return xlf77 + return None + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/aixlink.py b/engine/SCons/Tool/aixlink.py new file mode 100644 index 0000000..f065978 --- /dev/null +++ b/engine/SCons/Tool/aixlink.py @@ -0,0 +1,76 @@ +"""SCons.Tool.aixlink + +Tool-specific initialization for the IBM Visual Age linker. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/aixlink.py 4577 2009/12/27 19:43:56 scons" + +import os +import os.path + +import SCons.Util + +import aixcc +import link + +cplusplus = __import__('c++', globals(), locals(), []) + +def smart_linkflags(source, target, env, for_signature): + if cplusplus.iscplusplus(source): + build_dir = env.subst('$BUILDDIR', target=target, source=source) + if build_dir: + return '-qtempinc=' + os.path.join(build_dir, 'tempinc') + return '' + +def generate(env): + """ + Add Builders and construction variables for Visual Age linker to + an Environment. + """ + link.generate(env) + + env['SMARTLINKFLAGS'] = smart_linkflags + env['LINKFLAGS'] = SCons.Util.CLVar('$SMARTLINKFLAGS') + env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -qmkshrobj -qsuppress=1501-218') + env['SHLIBSUFFIX'] = '.a' + +def exists(env): + path, _cc, _shcc, version = aixcc.get_xlc(env) + if path and _cc: + xlc = os.path.join(path, _cc) + if os.path.exists(xlc): + return xlc + return None + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/applelink.py b/engine/SCons/Tool/applelink.py new file mode 100644 index 0000000..6605017 --- /dev/null +++ b/engine/SCons/Tool/applelink.py @@ -0,0 +1,71 @@ +"""SCons.Tool.applelink + +Tool-specific initialization for the Apple gnu-like linker. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/applelink.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Util + +# Even though the Mac is based on the GNU toolchain, it doesn't understand +# the -rpath option, so we use the "link" tool instead of "gnulink". +import link + +def generate(env): + """Add Builders and construction variables for applelink to an + Environment.""" + link.generate(env) + + env['FRAMEWORKPATHPREFIX'] = '-F' + env['_FRAMEWORKPATH'] = '${_concat(FRAMEWORKPATHPREFIX, FRAMEWORKPATH, "", __env__)}' + env['_FRAMEWORKS'] = '${_concat("-framework ", FRAMEWORKS, "", __env__)}' + env['LINKCOM'] = env['LINKCOM'] + ' $_FRAMEWORKPATH $_FRAMEWORKS $FRAMEWORKSFLAGS' + env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -dynamiclib') + env['SHLINKCOM'] = env['SHLINKCOM'] + ' $_FRAMEWORKPATH $_FRAMEWORKS $FRAMEWORKSFLAGS' + + # override the default for loadable modules, which are different + # on OS X than dynamic shared libs. echoing what XCode does for + # pre/suffixes: + env['LDMODULEPREFIX'] = '' + env['LDMODULESUFFIX'] = '' + env['LDMODULEFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -bundle') + env['LDMODULECOM'] = '$LDMODULE -o ${TARGET} $LDMODULEFLAGS $SOURCES $_LIBDIRFLAGS $_LIBFLAGS $_FRAMEWORKPATH $_FRAMEWORKS $FRAMEWORKSFLAGS' + + + +def exists(env): + return env['PLATFORM'] == 'darwin' + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/ar.py b/engine/SCons/Tool/ar.py new file mode 100644 index 0000000..855985b --- /dev/null +++ b/engine/SCons/Tool/ar.py @@ -0,0 +1,63 @@ +"""SCons.Tool.ar + +Tool-specific initialization for ar (library archive). + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/ar.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Defaults +import SCons.Tool +import SCons.Util + + +def generate(env): + """Add Builders and construction variables for ar to an Environment.""" + SCons.Tool.createStaticLibBuilder(env) + + env['AR'] = 'ar' + env['ARFLAGS'] = SCons.Util.CLVar('rc') + env['ARCOM'] = '$AR $ARFLAGS $TARGET $SOURCES' + env['LIBPREFIX'] = 'lib' + env['LIBSUFFIX'] = '.a' + + if env.Detect('ranlib'): + env['RANLIB'] = 'ranlib' + env['RANLIBFLAGS'] = SCons.Util.CLVar('') + env['RANLIBCOM'] = '$RANLIB $RANLIBFLAGS $TARGET' + +def exists(env): + return env.Detect('ar') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/as.py b/engine/SCons/Tool/as.py new file mode 100644 index 0000000..0fe8d2b --- /dev/null +++ b/engine/SCons/Tool/as.py @@ -0,0 +1,78 @@ +"""SCons.Tool.as + +Tool-specific initialization for as, the generic Posix assembler. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/as.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Defaults +import SCons.Tool +import SCons.Util + +assemblers = ['as'] + +ASSuffixes = ['.s', '.asm', '.ASM'] +ASPPSuffixes = ['.spp', '.SPP', '.sx'] +if SCons.Util.case_sensitive_suffixes('.s', '.S'): + ASPPSuffixes.extend(['.S']) +else: + ASSuffixes.extend(['.S']) + +def generate(env): + """Add Builders and construction variables for as to an Environment.""" + static_obj, shared_obj = SCons.Tool.createObjBuilders(env) + + for suffix in ASSuffixes: + static_obj.add_action(suffix, SCons.Defaults.ASAction) + shared_obj.add_action(suffix, SCons.Defaults.ASAction) + static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter) + shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter) + + for suffix in ASPPSuffixes: + static_obj.add_action(suffix, SCons.Defaults.ASPPAction) + shared_obj.add_action(suffix, SCons.Defaults.ASPPAction) + static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter) + shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter) + + env['AS'] = env.Detect(assemblers) or 'as' + env['ASFLAGS'] = SCons.Util.CLVar('') + env['ASCOM'] = '$AS $ASFLAGS -o $TARGET $SOURCES' + env['ASPPFLAGS'] = '$ASFLAGS' + env['ASPPCOM'] = '$CC $ASPPFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -c -o $TARGET $SOURCES' + +def exists(env): + return env.Detect(assemblers) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/bcc32.py b/engine/SCons/Tool/bcc32.py new file mode 100644 index 0000000..bb02ced --- /dev/null +++ b/engine/SCons/Tool/bcc32.py @@ -0,0 +1,82 @@ +"""SCons.Tool.bcc32 + +XXX + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/bcc32.py 4577 2009/12/27 19:43:56 scons" + +import os +import os.path +import string + +import SCons.Defaults +import SCons.Tool +import SCons.Util + +def findIt(program, env): + # First search in the SCons path and then the OS path: + borwin = env.WhereIs(program) or SCons.Util.WhereIs(program) + if borwin: + dir = os.path.dirname(borwin) + env.PrependENVPath('PATH', dir) + return borwin + +def generate(env): + findIt('bcc32', env) + """Add Builders and construction variables for bcc to an + Environment.""" + static_obj, shared_obj = SCons.Tool.createObjBuilders(env) + for suffix in ['.c', '.cpp']: + static_obj.add_action(suffix, SCons.Defaults.CAction) + shared_obj.add_action(suffix, SCons.Defaults.ShCAction) + static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter) + shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter) + + env['CC'] = 'bcc32' + env['CCFLAGS'] = SCons.Util.CLVar('') + env['CFLAGS'] = SCons.Util.CLVar('') + env['CCCOM'] = '$CC -q $CFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -c -o$TARGET $SOURCES' + env['SHCC'] = '$CC' + env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS') + env['SHCFLAGS'] = SCons.Util.CLVar('$CFLAGS') + env['SHCCCOM'] = '$SHCC -WD $SHCFLAGS $SHCCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -c -o$TARGET $SOURCES' + env['CPPDEFPREFIX'] = '-D' + env['CPPDEFSUFFIX'] = '' + env['INCPREFIX'] = '-I' + env['INCSUFFIX'] = '' + env['SHOBJSUFFIX'] = '.dll' + env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 0 + env['CFILESUFFIX'] = '.cpp' + +def exists(env): + return findIt('bcc32', env) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/c++.py b/engine/SCons/Tool/c++.py new file mode 100644 index 0000000..ad7c46b --- /dev/null +++ b/engine/SCons/Tool/c++.py @@ -0,0 +1,99 @@ +"""SCons.Tool.c++ + +Tool-specific initialization for generic Posix C++ compilers. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/c++.py 4577 2009/12/27 19:43:56 scons" + +import os.path + +import SCons.Tool +import SCons.Defaults +import SCons.Util + +compilers = ['CC', 'c++'] + +CXXSuffixes = ['.cpp', '.cc', '.cxx', '.c++', '.C++', '.mm'] +if SCons.Util.case_sensitive_suffixes('.c', '.C'): + CXXSuffixes.append('.C') + +def iscplusplus(source): + if not source: + # Source might be None for unusual cases like SConf. + return 0 + for s in source: + if s.sources: + ext = os.path.splitext(str(s.sources[0]))[1] + if ext in CXXSuffixes: + return 1 + return 0 + +def generate(env): + """ + Add Builders and construction variables for Visual Age C++ compilers + to an Environment. + """ + import SCons.Tool + import SCons.Tool.cc + static_obj, shared_obj = SCons.Tool.createObjBuilders(env) + + for suffix in CXXSuffixes: + static_obj.add_action(suffix, SCons.Defaults.CXXAction) + shared_obj.add_action(suffix, SCons.Defaults.ShCXXAction) + static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter) + shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter) + + SCons.Tool.cc.add_common_cc_variables(env) + + env['CXX'] = 'c++' + env['CXXFLAGS'] = SCons.Util.CLVar('') + env['CXXCOM'] = '$CXX -o $TARGET -c $CXXFLAGS $CCFLAGS $_CCCOMCOM $SOURCES' + env['SHCXX'] = '$CXX' + env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS') + env['SHCXXCOM'] = '$SHCXX -o $TARGET -c $SHCXXFLAGS $SHCCFLAGS $_CCCOMCOM $SOURCES' + + env['CPPDEFPREFIX'] = '-D' + env['CPPDEFSUFFIX'] = '' + env['INCPREFIX'] = '-I' + env['INCSUFFIX'] = '' + env['SHOBJSUFFIX'] = '.os' + env['OBJSUFFIX'] = '.o' + env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 0 + + env['CXXFILESUFFIX'] = '.cc' + +def exists(env): + return env.Detect(compilers) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/cc.py b/engine/SCons/Tool/cc.py new file mode 100644 index 0000000..b8ecf9f --- /dev/null +++ b/engine/SCons/Tool/cc.py @@ -0,0 +1,114 @@ +"""SCons.Tool.cc + +Tool-specific initialization for generic Posix C compilers. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/cc.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Tool +import SCons.Defaults +import SCons.Util + +CSuffixes = ['.c', '.m'] +if not SCons.Util.case_sensitive_suffixes('.c', '.C'): + CSuffixes.append('.C') + +def add_common_cc_variables(env): + """ + Add underlying common "C compiler" variables that + are used by multiple tools (specifically, c++). + """ + if not env.has_key('_CCCOMCOM'): + env['_CCCOMCOM'] = '$CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS' + # It's a hack to test for darwin here, but the alternative + # of creating an applecc.py to contain this seems overkill. + # Maybe someday the Apple platform will require more setup and + # this logic will be moved. + env['FRAMEWORKS'] = SCons.Util.CLVar('') + env['FRAMEWORKPATH'] = SCons.Util.CLVar('') + if env['PLATFORM'] == 'darwin': + env['_CCCOMCOM'] = env['_CCCOMCOM'] + ' $_FRAMEWORKPATH' + + if not env.has_key('CCFLAGS'): + env['CCFLAGS'] = SCons.Util.CLVar('') + + if not env.has_key('SHCCFLAGS'): + env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS') + +def generate(env): + """ + Add Builders and construction variables for C compilers to an Environment. + """ + static_obj, shared_obj = SCons.Tool.createObjBuilders(env) + + for suffix in CSuffixes: + static_obj.add_action(suffix, SCons.Defaults.CAction) + shared_obj.add_action(suffix, SCons.Defaults.ShCAction) + static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter) + shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter) +#<<<<<<< .working +# +# env['_CCCOMCOM'] = '$CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS' +# # It's a hack to test for darwin here, but the alternative of creating +# # an applecc.py to contain this seems overkill. Maybe someday the Apple +# # platform will require more setup and this logic will be moved. +# env['FRAMEWORKS'] = SCons.Util.CLVar('') +# env['FRAMEWORKPATH'] = SCons.Util.CLVar('') +# if env['PLATFORM'] == 'darwin': +# env['_CCCOMCOM'] = env['_CCCOMCOM'] + ' $_FRAMEWORKPATH' +#======= +#>>>>>>> .merge-right.r1907 + + add_common_cc_variables(env) + + env['CC'] = 'cc' + env['CFLAGS'] = SCons.Util.CLVar('') + env['CCCOM'] = '$CC -o $TARGET -c $CFLAGS $CCFLAGS $_CCCOMCOM $SOURCES' + env['SHCC'] = '$CC' + env['SHCFLAGS'] = SCons.Util.CLVar('$CFLAGS') + env['SHCCCOM'] = '$SHCC -o $TARGET -c $SHCFLAGS $SHCCFLAGS $_CCCOMCOM $SOURCES' + + env['CPPDEFPREFIX'] = '-D' + env['CPPDEFSUFFIX'] = '' + env['INCPREFIX'] = '-I' + env['INCSUFFIX'] = '' + env['SHOBJSUFFIX'] = '.os' + env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 0 + + env['CFILESUFFIX'] = '.c' + +def exists(env): + return env.Detect('cc') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/cvf.py b/engine/SCons/Tool/cvf.py new file mode 100644 index 0000000..7855096 --- /dev/null +++ b/engine/SCons/Tool/cvf.py @@ -0,0 +1,58 @@ +"""engine.SCons.Tool.cvf + +Tool-specific initialization for the Compaq Visual Fortran compiler. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/cvf.py 4577 2009/12/27 19:43:56 scons" + +import fortran + +compilers = ['f90'] + +def generate(env): + """Add Builders and construction variables for compaq visual fortran to an Environment.""" + + fortran.generate(env) + + env['FORTRAN'] = 'f90' + env['FORTRANCOM'] = '$FORTRAN $FORTRANFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}' + env['FORTRANPPCOM'] = '$FORTRAN $FORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}' + env['SHFORTRANCOM'] = '$SHFORTRAN $SHFORTRANFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}' + env['SHFORTRANPPCOM'] = '$SHFORTRAN $SHFORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}' + env['OBJSUFFIX'] = '.obj' + env['FORTRANMODDIR'] = '${TARGET.dir}' + env['FORTRANMODDIRPREFIX'] = '/module:' + env['FORTRANMODDIRSUFFIX'] = '' + +def exists(env): + return env.Detect(compilers) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/default.py b/engine/SCons/Tool/default.py new file mode 100644 index 0000000..d5befef --- /dev/null +++ b/engine/SCons/Tool/default.py @@ -0,0 +1,50 @@ +"""SCons.Tool.default + +Initialization with a default tool list. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/default.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Tool + +def generate(env): + """Add default tools.""" + for t in SCons.Tool.tool_list(env['PLATFORM'], env): + SCons.Tool.Tool(t)(env) + +def exists(env): + return 1 + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/dmd.py b/engine/SCons/Tool/dmd.py new file mode 100644 index 0000000..3d8c257 --- /dev/null +++ b/engine/SCons/Tool/dmd.py @@ -0,0 +1,224 @@ +"""SCons.Tool.dmd + +Tool-specific initialization for the Digital Mars D compiler. +(http://digitalmars.com/d) + +Coded by Andy Friesen (andy@ikagames.com) +15 November 2003 + +There are a number of problems with this script at this point in time. +The one that irritates me the most is the Windows linker setup. The D +linker doesn't have a way to add lib paths on the commandline, as far +as I can see. You have to specify paths relative to the SConscript or +use absolute paths. To hack around it, add '#/blah'. This will link +blah.lib from the directory where SConstruct resides. + +Compiler variables: + DC - The name of the D compiler to use. Defaults to dmd or gdmd, + whichever is found. + DPATH - List of paths to search for import modules. + DVERSIONS - List of version tags to enable when compiling. + DDEBUG - List of debug tags to enable when compiling. + +Linker related variables: + LIBS - List of library files to link in. + DLINK - Name of the linker to use. Defaults to dmd or gdmd. + DLINKFLAGS - List of linker flags. + +Lib tool variables: + DLIB - Name of the lib tool to use. Defaults to lib. + DLIBFLAGS - List of flags to pass to the lib tool. + LIBS - Same as for the linker. (libraries to pull into the .lib) +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/dmd.py 4577 2009/12/27 19:43:56 scons" + +import os +import string + +import SCons.Action +import SCons.Builder +import SCons.Defaults +import SCons.Scanner.D +import SCons.Tool + +# Adapted from c++.py +def isD(source): + if not source: + return 0 + + for s in source: + if s.sources: + ext = os.path.splitext(str(s.sources[0]))[1] + if ext == '.d': + return 1 + return 0 + +smart_link = {} + +smart_lib = {} + +def generate(env): + global smart_link + global smart_lib + + static_obj, shared_obj = SCons.Tool.createObjBuilders(env) + + DAction = SCons.Action.Action('$DCOM', '$DCOMSTR') + + static_obj.add_action('.d', DAction) + shared_obj.add_action('.d', DAction) + static_obj.add_emitter('.d', SCons.Defaults.StaticObjectEmitter) + shared_obj.add_emitter('.d', SCons.Defaults.SharedObjectEmitter) + + dc = env.Detect(['dmd', 'gdmd']) + env['DC'] = dc + env['DCOM'] = '$DC $_DINCFLAGS $_DVERFLAGS $_DDEBUGFLAGS $_DFLAGS -c -of$TARGET $SOURCES' + env['_DINCFLAGS'] = '$( ${_concat(DINCPREFIX, DPATH, DINCSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)' + env['_DVERFLAGS'] = '$( ${_concat(DVERPREFIX, DVERSIONS, DVERSUFFIX, __env__)} $)' + env['_DDEBUGFLAGS'] = '$( ${_concat(DDEBUGPREFIX, DDEBUG, DDEBUGSUFFIX, __env__)} $)' + env['_DFLAGS'] = '$( ${_concat(DFLAGPREFIX, DFLAGS, DFLAGSUFFIX, __env__)} $)' + + env['DPATH'] = ['#/'] + env['DFLAGS'] = [] + env['DVERSIONS'] = [] + env['DDEBUG'] = [] + + if dc: + # Add the path to the standard library. + # This is merely for the convenience of the dependency scanner. + dmd_path = env.WhereIs(dc) + if dmd_path: + x = string.rindex(dmd_path, dc) + phobosDir = dmd_path[:x] + '/../src/phobos' + if os.path.isdir(phobosDir): + env.Append(DPATH = [phobosDir]) + + env['DINCPREFIX'] = '-I' + env['DINCSUFFIX'] = '' + env['DVERPREFIX'] = '-version=' + env['DVERSUFFIX'] = '' + env['DDEBUGPREFIX'] = '-debug=' + env['DDEBUGSUFFIX'] = '' + env['DFLAGPREFIX'] = '-' + env['DFLAGSUFFIX'] = '' + env['DFILESUFFIX'] = '.d' + + # Need to use the Digital Mars linker/lib on windows. + # *nix can just use GNU link. + if env['PLATFORM'] == 'win32': + env['DLINK'] = '$DC' + env['DLINKCOM'] = '$DLINK -of$TARGET $SOURCES $DFLAGS $DLINKFLAGS $_DLINKLIBFLAGS' + env['DLIB'] = 'lib' + env['DLIBCOM'] = '$DLIB $_DLIBFLAGS -c $TARGET $SOURCES $_DLINKLIBFLAGS' + + env['_DLINKLIBFLAGS'] = '$( ${_concat(DLIBLINKPREFIX, LIBS, DLIBLINKSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)' + env['_DLIBFLAGS'] = '$( ${_concat(DLIBFLAGPREFIX, DLIBFLAGS, DLIBFLAGSUFFIX, __env__)} $)' + env['DLINKFLAGS'] = [] + env['DLIBLINKPREFIX'] = '' + env['DLIBLINKSUFFIX'] = '.lib' + env['DLIBFLAGPREFIX'] = '-' + env['DLIBFLAGSUFFIX'] = '' + env['DLINKFLAGPREFIX'] = '-' + env['DLINKFLAGSUFFIX'] = '' + + SCons.Tool.createStaticLibBuilder(env) + + # Basically, we hijack the link and ar builders with our own. + # these builders check for the presence of D source, and swap out + # the system's defaults for the Digital Mars tools. If there's no D + # source, then we silently return the previous settings. + linkcom = env.get('LINKCOM') + try: + env['SMART_LINKCOM'] = smart_link[linkcom] + except KeyError: + def _smartLink(source, target, env, for_signature, + defaultLinker=linkcom): + if isD(source): + # XXX I'm not sure how to add a $DLINKCOMSTR variable + # so that it works with this _smartLink() logic, + # and I don't have a D compiler/linker to try it out, + # so we'll leave it alone for now. + return '$DLINKCOM' + else: + return defaultLinker + env['SMART_LINKCOM'] = smart_link[linkcom] = _smartLink + + arcom = env.get('ARCOM') + try: + env['SMART_ARCOM'] = smart_lib[arcom] + except KeyError: + def _smartLib(source, target, env, for_signature, + defaultLib=arcom): + if isD(source): + # XXX I'm not sure how to add a $DLIBCOMSTR variable + # so that it works with this _smartLib() logic, and + # I don't have a D compiler/archiver to try it out, + # so we'll leave it alone for now. + return '$DLIBCOM' + else: + return defaultLib + env['SMART_ARCOM'] = smart_lib[arcom] = _smartLib + + # It is worth noting that the final space in these strings is + # absolutely pivotal. SCons sees these as actions and not generators + # if it is not there. (very bad) + env['ARCOM'] = '$SMART_ARCOM ' + env['LINKCOM'] = '$SMART_LINKCOM ' + else: # assuming linux + linkcom = env.get('LINKCOM') + try: + env['SMART_LINKCOM'] = smart_link[linkcom] + except KeyError: + def _smartLink(source, target, env, for_signature, + defaultLinker=linkcom, dc=dc): + if isD(source): + try: + libs = env['LIBS'] + except KeyError: + libs = [] + if 'phobos' not in libs and 'gphobos' not in libs: + if dc is 'dmd': + env.Append(LIBS = ['phobos']) + elif dc is 'gdmd': + env.Append(LIBS = ['gphobos']) + if 'pthread' not in libs: + env.Append(LIBS = ['pthread']) + if 'm' not in libs: + env.Append(LIBS = ['m']) + return defaultLinker + env['SMART_LINKCOM'] = smart_link[linkcom] = _smartLink + + env['LINKCOM'] = '$SMART_LINKCOM ' + +def exists(env): + return env.Detect(['dmd', 'gdmd']) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/dvi.py b/engine/SCons/Tool/dvi.py new file mode 100644 index 0000000..c423e38 --- /dev/null +++ b/engine/SCons/Tool/dvi.py @@ -0,0 +1,64 @@ +"""SCons.Tool.dvi + +Common DVI Builder definition for various other Tool modules that use it. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/dvi.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Builder +import SCons.Tool + +DVIBuilder = None + +def generate(env): + try: + env['BUILDERS']['DVI'] + except KeyError: + global DVIBuilder + + if DVIBuilder is None: + # The suffix is hard-coded to '.dvi', not configurable via a + # construction variable like $DVISUFFIX, because the output + # file name is hard-coded within TeX. + DVIBuilder = SCons.Builder.Builder(action = {}, + source_scanner = SCons.Tool.LaTeXScanner, + suffix = '.dvi', + emitter = {}, + source_ext_match = None) + + env['BUILDERS']['DVI'] = DVIBuilder + +def exists(env): + # This only puts a skeleton Builder in place, so if someone + # references this Tool directly, it's always "available." + return 1 + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/dvipdf.py b/engine/SCons/Tool/dvipdf.py new file mode 100644 index 0000000..1ed9c73 --- /dev/null +++ b/engine/SCons/Tool/dvipdf.py @@ -0,0 +1,125 @@ +"""SCons.Tool.dvipdf + +Tool-specific initialization for dvipdf. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/dvipdf.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Action +import SCons.Defaults +import SCons.Tool.pdf +import SCons.Tool.tex +import SCons.Util + +_null = SCons.Scanner.LaTeX._null + +def DviPdfPsFunction(XXXDviAction, target = None, source= None, env=None): + """A builder for DVI files that sets the TEXPICTS environment + variable before running dvi2ps or dvipdf.""" + + try: + abspath = source[0].attributes.path + except AttributeError : + abspath = '' + + saved_env = SCons.Scanner.LaTeX.modify_env_var(env, 'TEXPICTS', abspath) + + result = XXXDviAction(target, source, env) + + if saved_env is _null: + try: + del env['ENV']['TEXPICTS'] + except KeyError: + pass # was never set + else: + env['ENV']['TEXPICTS'] = saved_env + + return result + +def DviPdfFunction(target = None, source= None, env=None): + result = DviPdfPsFunction(PDFAction,target,source,env) + return result + +def DviPdfStrFunction(target = None, source= None, env=None): + """A strfunction for dvipdf that returns the appropriate + command string for the no_exec options.""" + if env.GetOption("no_exec"): + result = env.subst('$DVIPDFCOM',0,target,source) + else: + result = '' + return result + +PDFAction = None +DVIPDFAction = None + +def PDFEmitter(target, source, env): + """Strips any .aux or .log files from the input source list. + These are created by the TeX Builder that in all likelihood was + used to generate the .dvi file we're using as input, and we only + care about the .dvi file. + """ + def strip_suffixes(n): + return not SCons.Util.splitext(str(n))[1] in ['.aux', '.log'] + source = filter(strip_suffixes, source) + return (target, source) + +def generate(env): + """Add Builders and construction variables for dvipdf to an Environment.""" + global PDFAction + if PDFAction is None: + PDFAction = SCons.Action.Action('$DVIPDFCOM', '$DVIPDFCOMSTR') + + global DVIPDFAction + if DVIPDFAction is None: + DVIPDFAction = SCons.Action.Action(DviPdfFunction, strfunction = DviPdfStrFunction) + + import pdf + pdf.generate(env) + + bld = env['BUILDERS']['PDF'] + bld.add_action('.dvi', DVIPDFAction) + bld.add_emitter('.dvi', PDFEmitter) + + env['DVIPDF'] = 'dvipdf' + env['DVIPDFFLAGS'] = SCons.Util.CLVar('') + env['DVIPDFCOM'] = 'cd ${TARGET.dir} && $DVIPDF $DVIPDFFLAGS ${SOURCE.file} ${TARGET.file}' + + # Deprecated synonym. + env['PDFCOM'] = ['$DVIPDFCOM'] + +def exists(env): + return env.Detect('dvipdf') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/dvips.py b/engine/SCons/Tool/dvips.py new file mode 100644 index 0000000..e1116e0 --- /dev/null +++ b/engine/SCons/Tool/dvips.py @@ -0,0 +1,94 @@ +"""SCons.Tool.dvips + +Tool-specific initialization for dvips. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/dvips.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Action +import SCons.Builder +import SCons.Tool.dvipdf +import SCons.Util + +def DviPsFunction(target = None, source= None, env=None): + result = SCons.Tool.dvipdf.DviPdfPsFunction(PSAction,target,source,env) + return result + +def DviPsStrFunction(target = None, source= None, env=None): + """A strfunction for dvipdf that returns the appropriate + command string for the no_exec options.""" + if env.GetOption("no_exec"): + result = env.subst('$PSCOM',0,target,source) + else: + result = '' + return result + +PSAction = None +DVIPSAction = None +PSBuilder = None + +def generate(env): + """Add Builders and construction variables for dvips to an Environment.""" + global PSAction + if PSAction is None: + PSAction = SCons.Action.Action('$PSCOM', '$PSCOMSTR') + + global DVIPSAction + if DVIPSAction is None: + DVIPSAction = SCons.Action.Action(DviPsFunction, strfunction = DviPsStrFunction) + + global PSBuilder + if PSBuilder is None: + PSBuilder = SCons.Builder.Builder(action = PSAction, + prefix = '$PSPREFIX', + suffix = '$PSSUFFIX', + src_suffix = '.dvi', + src_builder = 'DVI', + single_source=True) + + env['BUILDERS']['PostScript'] = PSBuilder + + env['DVIPS'] = 'dvips' + env['DVIPSFLAGS'] = SCons.Util.CLVar('') + # I'm not quite sure I got the directories and filenames right for variant_dir + # We need to be in the correct directory for the sake of latex \includegraphics eps included files. + env['PSCOM'] = 'cd ${TARGET.dir} && $DVIPS $DVIPSFLAGS -o ${TARGET.file} ${SOURCE.file}' + env['PSPREFIX'] = '' + env['PSSUFFIX'] = '.ps' + +def exists(env): + return env.Detect('dvips') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/f77.py b/engine/SCons/Tool/f77.py new file mode 100644 index 0000000..7b5607a --- /dev/null +++ b/engine/SCons/Tool/f77.py @@ -0,0 +1,62 @@ +"""engine.SCons.Tool.f77 + +Tool-specific initialization for the generic Posix f77 Fortran compiler. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/f77.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Defaults +import SCons.Scanner.Fortran +import SCons.Tool +import SCons.Util +from SCons.Tool.FortranCommon import add_all_to_env, add_f77_to_env + +compilers = ['f77'] + +def generate(env): + add_all_to_env(env) + add_f77_to_env(env) + + fcomp = env.Detect(compilers) or 'f77' + env['F77'] = fcomp + env['SHF77'] = fcomp + + env['FORTRAN'] = fcomp + env['SHFORTRAN'] = fcomp + +def exists(env): + return env.Detect(compilers) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/f90.py b/engine/SCons/Tool/f90.py new file mode 100644 index 0000000..e9c2cec --- /dev/null +++ b/engine/SCons/Tool/f90.py @@ -0,0 +1,62 @@ +"""engine.SCons.Tool.f90 + +Tool-specific initialization for the generic Posix f90 Fortran compiler. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/f90.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Defaults +import SCons.Scanner.Fortran +import SCons.Tool +import SCons.Util +from SCons.Tool.FortranCommon import add_all_to_env, add_f90_to_env + +compilers = ['f90'] + +def generate(env): + add_all_to_env(env) + add_f90_to_env(env) + + fc = env.Detect(compilers) or 'f90' + env['F90'] = fc + env['SHF90'] = fc + + env['FORTRAN'] = fc + env['SHFORTRAN'] = fc + +def exists(env): + return env.Detect(compilers) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/f95.py b/engine/SCons/Tool/f95.py new file mode 100644 index 0000000..2d78cea --- /dev/null +++ b/engine/SCons/Tool/f95.py @@ -0,0 +1,63 @@ +"""engine.SCons.Tool.f95 + +Tool-specific initialization for the generic Posix f95 Fortran compiler. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/f95.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Defaults +import SCons.Tool +import SCons.Util +import fortran +from SCons.Tool.FortranCommon import add_all_to_env, add_f95_to_env + +compilers = ['f95'] + +def generate(env): + add_all_to_env(env) + add_f95_to_env(env) + + fcomp = env.Detect(compilers) or 'f95' + env['F95'] = fcomp + env['SHF95'] = fcomp + + env['FORTRAN'] = fcomp + env['SHFORTRAN'] = fcomp + + +def exists(env): + return env.Detect(compilers) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/filesystem.py b/engine/SCons/Tool/filesystem.py new file mode 100644 index 0000000..263e9ff --- /dev/null +++ b/engine/SCons/Tool/filesystem.py @@ -0,0 +1,98 @@ +"""SCons.Tool.filesystem + +Tool-specific initialization for the filesystem tools. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/filesystem.py 4577 2009/12/27 19:43:56 scons" + +import SCons +from SCons.Tool.install import copyFunc + +copyToBuilder, copyAsBuilder = None, None + +def copyto_emitter(target, source, env): + """ changes the path of the source to be under the target (which + are assumed to be directories. + """ + n_target = [] + + for t in target: + n_target = n_target + map( lambda s, t=t: t.File( str( s ) ), source ) + + return (n_target, source) + +def copy_action_func(target, source, env): + assert( len(target) == len(source) ), "\ntarget: %s\nsource: %s" %(map(str, target),map(str, source)) + + for t, s in zip(target, source): + if copyFunc(t.get_path(), s.get_path(), env): + return 1 + + return 0 + +def copy_action_str(target, source, env): + return env.subst_target_source(env['COPYSTR'], 0, target, source) + +copy_action = SCons.Action.Action( copy_action_func, copy_action_str ) + +def generate(env): + try: + env['BUILDERS']['CopyTo'] + env['BUILDERS']['CopyAs'] + except KeyError, e: + global copyToBuilder + if copyToBuilder is None: + copyToBuilder = SCons.Builder.Builder( + action = copy_action, + target_factory = env.fs.Dir, + source_factory = env.fs.Entry, + multi = 1, + emitter = [ copyto_emitter, ] ) + + global copyAsBuilder + if copyAsBuilder is None: + copyAsBuilder = SCons.Builder.Builder( + action = copy_action, + target_factory = env.fs.Entry, + source_factory = env.fs.Entry ) + + env['BUILDERS']['CopyTo'] = copyToBuilder + env['BUILDERS']['CopyAs'] = copyAsBuilder + + env['COPYSTR'] = 'Copy file(s): "$SOURCES" to "$TARGETS"' + +def exists(env): + return 1 + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/fortran.py b/engine/SCons/Tool/fortran.py new file mode 100644 index 0000000..6ff348f --- /dev/null +++ b/engine/SCons/Tool/fortran.py @@ -0,0 +1,63 @@ +"""SCons.Tool.fortran + +Tool-specific initialization for a generic Posix f77/f90 Fortran compiler. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/fortran.py 4577 2009/12/27 19:43:56 scons" + +import re +import string + +import SCons.Action +import SCons.Defaults +import SCons.Scanner.Fortran +import SCons.Tool +import SCons.Util +from SCons.Tool.FortranCommon import add_all_to_env, add_fortran_to_env + +compilers = ['f95', 'f90', 'f77'] + +def generate(env): + add_all_to_env(env) + add_fortran_to_env(env) + + fc = env.Detect(compilers) or 'f77' + env['SHFORTRAN'] = fc + env['FORTRAN'] = fc + +def exists(env): + return env.Detect(compilers) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/g++.py b/engine/SCons/Tool/g++.py new file mode 100644 index 0000000..bdde6be --- /dev/null +++ b/engine/SCons/Tool/g++.py @@ -0,0 +1,90 @@ +"""SCons.Tool.g++ + +Tool-specific initialization for g++. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/g++.py 4577 2009/12/27 19:43:56 scons" + +import os.path +import re +import subprocess + +import SCons.Tool +import SCons.Util + +cplusplus = __import__('c++', globals(), locals(), []) + +compilers = ['g++'] + +def generate(env): + """Add Builders and construction variables for g++ to an Environment.""" + static_obj, shared_obj = SCons.Tool.createObjBuilders(env) + + cplusplus.generate(env) + + env['CXX'] = env.Detect(compilers) + + # platform specific settings + if env['PLATFORM'] == 'aix': + env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS -mminimal-toc') + env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1 + env['SHOBJSUFFIX'] = '$OBJSUFFIX' + elif env['PLATFORM'] == 'hpux': + env['SHOBJSUFFIX'] = '.pic.o' + elif env['PLATFORM'] == 'sunos': + env['SHOBJSUFFIX'] = '.pic.o' + # determine compiler version + if env['CXX']: + #pipe = SCons.Action._subproc(env, [env['CXX'], '-dumpversion'], + pipe = SCons.Action._subproc(env, [env['CXX'], '--version'], + stdin = 'devnull', + stderr = 'devnull', + stdout = subprocess.PIPE) + if pipe.wait() != 0: return + # -dumpversion was added in GCC 3.0. As long as we're supporting + # GCC versions older than that, we should use --version and a + # regular expression. + #line = pipe.stdout.read().strip() + #if line: + # env['CXXVERSION'] = line + line = pipe.stdout.readline() + match = re.search(r'[0-9]+(\.[0-9]+)+', line) + if match: + env['CXXVERSION'] = match.group(0) + +def exists(env): + return env.Detect(compilers) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/g77.py b/engine/SCons/Tool/g77.py new file mode 100644 index 0000000..79d610e --- /dev/null +++ b/engine/SCons/Tool/g77.py @@ -0,0 +1,73 @@ +"""engine.SCons.Tool.g77 + +Tool-specific initialization for g77. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/g77.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Util +from SCons.Tool.FortranCommon import add_all_to_env, add_f77_to_env + +compilers = ['g77', 'f77'] + +def generate(env): + """Add Builders and construction variables for g77 to an Environment.""" + add_all_to_env(env) + add_f77_to_env(env) + + fcomp = env.Detect(compilers) or 'g77' + if env['PLATFORM'] in ['cygwin', 'win32']: + env['SHFORTRANFLAGS'] = SCons.Util.CLVar('$FORTRANFLAGS') + env['SHF77FLAGS'] = SCons.Util.CLVar('$F77FLAGS') + else: + env['SHFORTRANFLAGS'] = SCons.Util.CLVar('$FORTRANFLAGS -fPIC') + env['SHF77FLAGS'] = SCons.Util.CLVar('$F77FLAGS -fPIC') + + env['FORTRAN'] = fcomp + env['SHFORTRAN'] = '$FORTRAN' + + env['F77'] = fcomp + env['SHF77'] = '$F77' + + env['INCFORTRANPREFIX'] = "-I" + env['INCFORTRANSUFFIX'] = "" + + env['INCF77PREFIX'] = "-I" + env['INCF77SUFFIX'] = "" + +def exists(env): + return env.Detect(compilers) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/gas.py b/engine/SCons/Tool/gas.py new file mode 100644 index 0000000..94280d6 --- /dev/null +++ b/engine/SCons/Tool/gas.py @@ -0,0 +1,53 @@ +"""SCons.Tool.gas + +Tool-specific initialization for as, the Gnu assembler. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/gas.py 4577 2009/12/27 19:43:56 scons" + +as_module = __import__('as', globals(), locals(), []) + +assemblers = ['as', 'gas'] + +def generate(env): + """Add Builders and construction variables for as to an Environment.""" + as_module.generate(env) + + env['AS'] = env.Detect(assemblers) or 'as' + +def exists(env): + return env.Detect(assemblers) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/gcc.py b/engine/SCons/Tool/gcc.py new file mode 100644 index 0000000..0690194 --- /dev/null +++ b/engine/SCons/Tool/gcc.py @@ -0,0 +1,80 @@ +"""SCons.Tool.gcc + +Tool-specific initialization for gcc. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/gcc.py 4577 2009/12/27 19:43:56 scons" + +import cc +import os +import re +import subprocess + +import SCons.Util + +compilers = ['gcc', 'cc'] + +def generate(env): + """Add Builders and construction variables for gcc to an Environment.""" + cc.generate(env) + + env['CC'] = env.Detect(compilers) or 'gcc' + if env['PLATFORM'] in ['cygwin', 'win32']: + env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS') + else: + env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS -fPIC') + # determine compiler version + if env['CC']: + #pipe = SCons.Action._subproc(env, [env['CC'], '-dumpversion'], + pipe = SCons.Action._subproc(env, [env['CC'], '--version'], + stdin = 'devnull', + stderr = 'devnull', + stdout = subprocess.PIPE) + if pipe.wait() != 0: return + # -dumpversion was added in GCC 3.0. As long as we're supporting + # GCC versions older than that, we should use --version and a + # regular expression. + #line = pipe.stdout.read().strip() + #if line: + # env['CCVERSION'] = line + line = pipe.stdout.readline() + match = re.search(r'[0-9]+(\.[0-9]+)+', line) + if match: + env['CCVERSION'] = match.group(0) + +def exists(env): + return env.Detect(compilers) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/gfortran.py b/engine/SCons/Tool/gfortran.py new file mode 100644 index 0000000..ddbdd69 --- /dev/null +++ b/engine/SCons/Tool/gfortran.py @@ -0,0 +1,64 @@ +"""SCons.Tool.gfortran + +Tool-specific initialization for gfortran, the GNU Fortran 95/Fortran +2003 compiler. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/gfortran.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Util + +import fortran + +def generate(env): + """Add Builders and construction variables for gfortran to an + Environment.""" + fortran.generate(env) + + for dialect in ['F77', 'F90', 'FORTRAN', 'F95']: + env['%s' % dialect] = 'gfortran' + env['SH%s' % dialect] = '$%s' % dialect + if env['PLATFORM'] in ['cygwin', 'win32']: + env['SH%sFLAGS' % dialect] = SCons.Util.CLVar('$%sFLAGS' % dialect) + else: + env['SH%sFLAGS' % dialect] = SCons.Util.CLVar('$%sFLAGS -fPIC' % dialect) + + env['INC%sPREFIX' % dialect] = "-I" + env['INC%sSUFFIX' % dialect] = "" + +def exists(env): + return env.Detect('gfortran') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/gnulink.py b/engine/SCons/Tool/gnulink.py new file mode 100644 index 0000000..d983f43 --- /dev/null +++ b/engine/SCons/Tool/gnulink.py @@ -0,0 +1,63 @@ +"""SCons.Tool.gnulink + +Tool-specific initialization for the gnu linker. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/gnulink.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Util + +import link + +linkers = ['g++', 'gcc'] + +def generate(env): + """Add Builders and construction variables for gnulink to an Environment.""" + link.generate(env) + + if env['PLATFORM'] == 'hpux': + env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -shared -fPIC') + + # __RPATH is set to $_RPATH in the platform specification if that + # platform supports it. + env.Append(LINKFLAGS=['$__RPATH']) + env['RPATHPREFIX'] = '-Wl,-rpath=' + env['RPATHSUFFIX'] = '' + env['_RPATH'] = '${_concat(RPATHPREFIX, RPATH, RPATHSUFFIX, __env__)}' + +def exists(env): + return env.Detect(linkers) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/gs.py b/engine/SCons/Tool/gs.py new file mode 100644 index 0000000..96982c0 --- /dev/null +++ b/engine/SCons/Tool/gs.py @@ -0,0 +1,81 @@ +"""SCons.Tool.gs + +Tool-specific initialization for Ghostscript. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/gs.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Action +import SCons.Platform +import SCons.Util + +# Ghostscript goes by different names on different platforms... +platform = SCons.Platform.platform_default() + +if platform == 'os2': + gs = 'gsos2' +elif platform == 'win32': + gs = 'gswin32c' +else: + gs = 'gs' + +GhostscriptAction = None + +def generate(env): + """Add Builders and construction variables for Ghostscript to an + Environment.""" + + global GhostscriptAction + if GhostscriptAction is None: + GhostscriptAction = SCons.Action.Action('$GSCOM', '$GSCOMSTR') + + import pdf + pdf.generate(env) + + bld = env['BUILDERS']['PDF'] + bld.add_action('.ps', GhostscriptAction) + + env['GS'] = gs + env['GSFLAGS'] = SCons.Util.CLVar('-dNOPAUSE -dBATCH -sDEVICE=pdfwrite') + env['GSCOM'] = '$GS $GSFLAGS -sOutputFile=$TARGET $SOURCES' + + +def exists(env): + if env.has_key('PS2PDF'): + return env.Detect(env['PS2PDF']) + else: + return env.Detect(gs) or SCons.Util.WhereIs(gs) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/hpc++.py b/engine/SCons/Tool/hpc++.py new file mode 100644 index 0000000..2eb671b --- /dev/null +++ b/engine/SCons/Tool/hpc++.py @@ -0,0 +1,85 @@ +"""SCons.Tool.hpc++ + +Tool-specific initialization for c++ on HP/UX. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/hpc++.py 4577 2009/12/27 19:43:56 scons" + +import os.path +import string + +import SCons.Util + +cplusplus = __import__('c++', globals(), locals(), []) + +acc = None + +# search for the acc compiler and linker front end + +try: + dirs = os.listdir('/opt') +except (IOError, OSError): + # Not being able to read the directory because it doesn't exist + # (IOError) or isn't readable (OSError) is okay. + dirs = [] + +for dir in dirs: + cc = '/opt/' + dir + '/bin/aCC' + if os.path.exists(cc): + acc = cc + break + + +def generate(env): + """Add Builders and construction variables for g++ to an Environment.""" + cplusplus.generate(env) + + if acc: + env['CXX'] = acc or 'aCC' + env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS +Z') + # determine version of aCC + line = os.popen(acc + ' -V 2>&1').readline().rstrip() + if string.find(line, 'aCC: HP ANSI C++') == 0: + env['CXXVERSION'] = string.split(line)[-1] + + if env['PLATFORM'] == 'cygwin': + env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS') + else: + env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS +Z') + +def exists(env): + return acc + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/hpcc.py b/engine/SCons/Tool/hpcc.py new file mode 100644 index 0000000..1d31dc4 --- /dev/null +++ b/engine/SCons/Tool/hpcc.py @@ -0,0 +1,53 @@ +"""SCons.Tool.hpcc + +Tool-specific initialization for HP aCC and cc. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/hpcc.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Util + +import cc + +def generate(env): + """Add Builders and construction variables for aCC & cc to an Environment.""" + cc.generate(env) + + env['CXX'] = 'aCC' + env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS +Z') + +def exists(env): + return env.Detect('aCC') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/hplink.py b/engine/SCons/Tool/hplink.py new file mode 100644 index 0000000..18070d6 --- /dev/null +++ b/engine/SCons/Tool/hplink.py @@ -0,0 +1,77 @@ +"""SCons.Tool.hplink + +Tool-specific initialization for the HP linker. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/hplink.py 4577 2009/12/27 19:43:56 scons" + +import os +import os.path + +import SCons.Util + +import link + +ccLinker = None + +# search for the acc compiler and linker front end + +try: + dirs = os.listdir('/opt') +except (IOError, OSError): + # Not being able to read the directory because it doesn't exist + # (IOError) or isn't readable (OSError) is okay. + dirs = [] + +for dir in dirs: + linker = '/opt/' + dir + '/bin/aCC' + if os.path.exists(linker): + ccLinker = linker + break + +def generate(env): + """ + Add Builders and construction variables for Visual Age linker to + an Environment. + """ + link.generate(env) + + env['LINKFLAGS'] = SCons.Util.CLVar('-Wl,+s -Wl,+vnocompatwarnings') + env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -b') + env['SHLIBSUFFIX'] = '.sl' + +def exists(env): + return ccLinker + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/icc.py b/engine/SCons/Tool/icc.py new file mode 100644 index 0000000..916f7c5 --- /dev/null +++ b/engine/SCons/Tool/icc.py @@ -0,0 +1,59 @@ +"""engine.SCons.Tool.icc + +Tool-specific initialization for the OS/2 icc compiler. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/icc.py 4577 2009/12/27 19:43:56 scons" + +import cc + +def generate(env): + """Add Builders and construction variables for the OS/2 to an Environment.""" + cc.generate(env) + + env['CC'] = 'icc' + env['CCCOM'] = '$CC $CFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c $SOURCES /Fo$TARGET' + env['CXXCOM'] = '$CXX $CXXFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c $SOURCES /Fo$TARGET' + env['CPPDEFPREFIX'] = '/D' + env['CPPDEFSUFFIX'] = '' + env['INCPREFIX'] = '/I' + env['INCSUFFIX'] = '' + env['CFILESUFFIX'] = '.c' + env['CXXFILESUFFIX'] = '.cc' + +def exists(env): + return env.Detect('icc') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/icl.py b/engine/SCons/Tool/icl.py new file mode 100644 index 0000000..3adee33 --- /dev/null +++ b/engine/SCons/Tool/icl.py @@ -0,0 +1,52 @@ +"""engine.SCons.Tool.icl + +Tool-specific initialization for the Intel C/C++ compiler. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/icl.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Tool.intelc + +# This has been completely superceded by intelc.py, which can +# handle both Windows and Linux versions. + +def generate(*args, **kw): + """Add Builders and construction variables for icl to an Environment.""" + return apply(SCons.Tool.intelc.generate, args, kw) + +def exists(*args, **kw): + return apply(SCons.Tool.intelc.exists, args, kw) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/ifl.py b/engine/SCons/Tool/ifl.py new file mode 100644 index 0000000..85eda07 --- /dev/null +++ b/engine/SCons/Tool/ifl.py @@ -0,0 +1,72 @@ +"""SCons.Tool.ifl + +Tool-specific initialization for the Intel Fortran compiler. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/ifl.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Defaults +from SCons.Scanner.Fortran import FortranScan +from FortranCommon import add_all_to_env + +def generate(env): + """Add Builders and construction variables for ifl to an Environment.""" + fscan = FortranScan("FORTRANPATH") + SCons.Tool.SourceFileScanner.add_scanner('.i', fscan) + SCons.Tool.SourceFileScanner.add_scanner('.i90', fscan) + + if not env.has_key('FORTRANFILESUFFIXES'): + env['FORTRANFILESUFFIXES'] = ['.i'] + else: + env['FORTRANFILESUFFIXES'].append('.i') + + if not env.has_key('F90FILESUFFIXES'): + env['F90FILESUFFIXES'] = ['.i90'] + else: + env['F90FILESUFFIXES'].append('.i90') + + add_all_to_env(env) + + env['FORTRAN'] = 'ifl' + env['SHFORTRAN'] = '$FORTRAN' + env['FORTRANCOM'] = '$FORTRAN $FORTRANFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET' + env['FORTRANPPCOM'] = '$FORTRAN $FORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET' + env['SHFORTRANCOM'] = '$SHFORTRAN $SHFORTRANFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET' + env['SHFORTRANPPCOM'] = '$SHFORTRAN $SHFORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET' + +def exists(env): + return env.Detect('ifl') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/ifort.py b/engine/SCons/Tool/ifort.py new file mode 100644 index 0000000..168f287 --- /dev/null +++ b/engine/SCons/Tool/ifort.py @@ -0,0 +1,90 @@ +"""SCons.Tool.ifort + +Tool-specific initialization for newer versions of the Intel Fortran Compiler +for Linux/Windows (and possibly Mac OS X). + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/ifort.py 4577 2009/12/27 19:43:56 scons" + +import string + +import SCons.Defaults +from SCons.Scanner.Fortran import FortranScan +from FortranCommon import add_all_to_env + +def generate(env): + """Add Builders and construction variables for ifort to an Environment.""" + # ifort supports Fortran 90 and Fortran 95 + # Additionally, ifort recognizes more file extensions. + fscan = FortranScan("FORTRANPATH") + SCons.Tool.SourceFileScanner.add_scanner('.i', fscan) + SCons.Tool.SourceFileScanner.add_scanner('.i90', fscan) + + if not env.has_key('FORTRANFILESUFFIXES'): + env['FORTRANFILESUFFIXES'] = ['.i'] + else: + env['FORTRANFILESUFFIXES'].append('.i') + + if not env.has_key('F90FILESUFFIXES'): + env['F90FILESUFFIXES'] = ['.i90'] + else: + env['F90FILESUFFIXES'].append('.i90') + + add_all_to_env(env) + + fc = 'ifort' + + for dialect in ['F77', 'F90', 'FORTRAN', 'F95']: + env['%s' % dialect] = fc + env['SH%s' % dialect] = '$%s' % dialect + if env['PLATFORM'] == 'posix': + env['SH%sFLAGS' % dialect] = SCons.Util.CLVar('$%sFLAGS -fPIC' % dialect) + + if env['PLATFORM'] == 'win32': + # On Windows, the ifort compiler specifies the object on the + # command line with -object:, not -o. Massage the necessary + # command-line construction variables. + for dialect in ['F77', 'F90', 'FORTRAN', 'F95']: + for var in ['%sCOM' % dialect, '%sPPCOM' % dialect, + 'SH%sCOM' % dialect, 'SH%sPPCOM' % dialect]: + env[var] = string.replace(env[var], '-o $TARGET', '-object:$TARGET') + env['FORTRANMODDIRPREFIX'] = "/module:" + else: + env['FORTRANMODDIRPREFIX'] = "-module " + +def exists(env): + return env.Detect('ifort') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/ilink.py b/engine/SCons/Tool/ilink.py new file mode 100644 index 0000000..274f688 --- /dev/null +++ b/engine/SCons/Tool/ilink.py @@ -0,0 +1,59 @@ +"""SCons.Tool.ilink + +Tool-specific initialization for the OS/2 ilink linker. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/ilink.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Defaults +import SCons.Tool +import SCons.Util + +def generate(env): + """Add Builders and construction variables for ilink to an Environment.""" + SCons.Tool.createProgBuilder(env) + + env['LINK'] = 'ilink' + env['LINKFLAGS'] = SCons.Util.CLVar('') + env['LINKCOM'] = '$LINK $LINKFLAGS /O:$TARGET $SOURCES $_LIBDIRFLAGS $_LIBFLAGS' + env['LIBDIRPREFIX']='/LIBPATH:' + env['LIBDIRSUFFIX']='' + env['LIBLINKPREFIX']='' + env['LIBLINKSUFFIX']='$LIBSUFFIX' + +def exists(env): + return env.Detect('ilink') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/ilink32.py b/engine/SCons/Tool/ilink32.py new file mode 100644 index 0000000..7a9e34b --- /dev/null +++ b/engine/SCons/Tool/ilink32.py @@ -0,0 +1,60 @@ +"""SCons.Tool.ilink32 + +XXX + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/ilink32.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Tool +import SCons.Tool.bcc32 +import SCons.Util + +def generate(env): + """Add Builders and construction variables for Borland ilink to an + Environment.""" + SCons.Tool.createSharedLibBuilder(env) + SCons.Tool.createProgBuilder(env) + + env['LINK'] = '$CC' + env['LINKFLAGS'] = SCons.Util.CLVar('') + env['LINKCOM'] = '$LINK -q $LINKFLAGS -e$TARGET $SOURCES $LIBS' + env['LIBDIRPREFIX']='' + env['LIBDIRSUFFIX']='' + env['LIBLINKPREFIX']='' + env['LIBLINKSUFFIX']='$LIBSUFFIX' + + +def exists(env): + # Uses bcc32 to do linking as it generally knows where the standard + # LIBS are and set up the linking correctly + return SCons.Tool.bcc32.findIt('bcc32', env) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/install.py b/engine/SCons/Tool/install.py new file mode 100644 index 0000000..f724219 --- /dev/null +++ b/engine/SCons/Tool/install.py @@ -0,0 +1,229 @@ +"""SCons.Tool.install + +Tool-specific initialization for the install tool. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/install.py 4577 2009/12/27 19:43:56 scons" + +import os +import shutil +import stat + +import SCons.Action +from SCons.Util import make_path_relative + +# +# We keep track of *all* installed files. +_INSTALLED_FILES = [] +_UNIQUE_INSTALLED_FILES = None + +# +# Functions doing the actual work of the Install Builder. +# +def copyFunc(dest, source, env): + """Install a source file or directory into a destination by copying, + (including copying permission/mode bits).""" + + if os.path.isdir(source): + if os.path.exists(dest): + if not os.path.isdir(dest): + raise SCons.Errors.UserError, "cannot overwrite non-directory `%s' with a directory `%s'" % (str(dest), str(source)) + else: + parent = os.path.split(dest)[0] + if not os.path.exists(parent): + os.makedirs(parent) + shutil.copytree(source, dest) + else: + shutil.copy2(source, dest) + st = os.stat(source) + os.chmod(dest, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE) + + return 0 + +def installFunc(target, source, env): + """Install a source file into a target using the function specified + as the INSTALL construction variable.""" + try: + install = env['INSTALL'] + except KeyError: + raise SCons.Errors.UserError('Missing INSTALL construction variable.') + + assert len(target)==len(source), \ + "Installing source %s into target %s: target and source lists must have same length."%(map(str, source), map(str, target)) + for t,s in zip(target,source): + if install(t.get_path(),s.get_path(),env): + return 1 + + return 0 + +def stringFunc(target, source, env): + installstr = env.get('INSTALLSTR') + if installstr: + return env.subst_target_source(installstr, 0, target, source) + target = str(target[0]) + source = str(source[0]) + if os.path.isdir(source): + type = 'directory' + else: + type = 'file' + return 'Install %s: "%s" as "%s"' % (type, source, target) + +# +# Emitter functions +# +def add_targets_to_INSTALLED_FILES(target, source, env): + """ an emitter that adds all target files to the list stored in the + _INSTALLED_FILES global variable. This way all installed files of one + scons call will be collected. + """ + global _INSTALLED_FILES, _UNIQUE_INSTALLED_FILES + _INSTALLED_FILES.extend(target) + _UNIQUE_INSTALLED_FILES = None + return (target, source) + +class DESTDIR_factory: + """ a node factory, where all files will be relative to the dir supplied + in the constructor. + """ + def __init__(self, env, dir): + self.env = env + self.dir = env.arg2nodes( dir, env.fs.Dir )[0] + + def Entry(self, name): + name = make_path_relative(name) + return self.dir.Entry(name) + + def Dir(self, name): + name = make_path_relative(name) + return self.dir.Dir(name) + +# +# The Builder Definition +# +install_action = SCons.Action.Action(installFunc, stringFunc) +installas_action = SCons.Action.Action(installFunc, stringFunc) + +BaseInstallBuilder = None + +def InstallBuilderWrapper(env, target=None, source=None, dir=None, **kw): + if target and dir: + import SCons.Errors + raise SCons.Errors.UserError, "Both target and dir defined for Install(), only one may be defined." + if not dir: + dir=target + + import SCons.Script + install_sandbox = SCons.Script.GetOption('install_sandbox') + if install_sandbox: + target_factory = DESTDIR_factory(env, install_sandbox) + else: + target_factory = env.fs + + try: + dnodes = env.arg2nodes(dir, target_factory.Dir) + except TypeError: + raise SCons.Errors.UserError, "Target `%s' of Install() is a file, but should be a directory. Perhaps you have the Install() arguments backwards?" % str(dir) + sources = env.arg2nodes(source, env.fs.Entry) + tgt = [] + for dnode in dnodes: + for src in sources: + # Prepend './' so the lookup doesn't interpret an initial + # '#' on the file name portion as meaning the Node should + # be relative to the top-level SConstruct directory. + target = env.fs.Entry('.'+os.sep+src.name, dnode) + #tgt.extend(BaseInstallBuilder(env, target, src, **kw)) + tgt.extend(apply(BaseInstallBuilder, (env, target, src), kw)) + return tgt + +def InstallAsBuilderWrapper(env, target=None, source=None, **kw): + result = [] + for src, tgt in map(lambda x, y: (x, y), source, target): + #result.extend(BaseInstallBuilder(env, tgt, src, **kw)) + result.extend(apply(BaseInstallBuilder, (env, tgt, src), kw)) + return result + +added = None + +def generate(env): + + from SCons.Script import AddOption, GetOption + global added + if not added: + added = 1 + AddOption('--install-sandbox', + dest='install_sandbox', + type="string", + action="store", + help='A directory under which all installed files will be placed.') + + global BaseInstallBuilder + if BaseInstallBuilder is None: + install_sandbox = GetOption('install_sandbox') + if install_sandbox: + target_factory = DESTDIR_factory(env, install_sandbox) + else: + target_factory = env.fs + + BaseInstallBuilder = SCons.Builder.Builder( + action = install_action, + target_factory = target_factory.Entry, + source_factory = env.fs.Entry, + multi = 1, + emitter = [ add_targets_to_INSTALLED_FILES, ], + name = 'InstallBuilder') + + env['BUILDERS']['_InternalInstall'] = InstallBuilderWrapper + env['BUILDERS']['_InternalInstallAs'] = InstallAsBuilderWrapper + + # We'd like to initialize this doing something like the following, + # but there isn't yet support for a ${SOURCE.type} expansion that + # will print "file" or "directory" depending on what's being + # installed. For now we punt by not initializing it, and letting + # the stringFunc() that we put in the action fall back to the + # hand-crafted default string if it's not set. + # + #try: + # env['INSTALLSTR'] + #except KeyError: + # env['INSTALLSTR'] = 'Install ${SOURCE.type}: "$SOURCES" as "$TARGETS"' + + try: + env['INSTALL'] + except KeyError: + env['INSTALL'] = copyFunc + +def exists(env): + return 1 + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/intelc.py b/engine/SCons/Tool/intelc.py new file mode 100644 index 0000000..7ae3ccd --- /dev/null +++ b/engine/SCons/Tool/intelc.py @@ -0,0 +1,490 @@ +"""SCons.Tool.icl + +Tool-specific initialization for the Intel C/C++ compiler. +Supports Linux and Windows compilers, v7 and up. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/intelc.py 4577 2009/12/27 19:43:56 scons" + +import math, sys, os.path, glob, string, re + +is_windows = sys.platform == 'win32' +is_win64 = is_windows and (os.environ['PROCESSOR_ARCHITECTURE'] == 'AMD64' or + (os.environ.has_key('PROCESSOR_ARCHITEW6432') and + os.environ['PROCESSOR_ARCHITEW6432'] == 'AMD64')) +is_linux = sys.platform == 'linux2' +is_mac = sys.platform == 'darwin' + +if is_windows: + import SCons.Tool.msvc +elif is_linux: + import SCons.Tool.gcc +elif is_mac: + import SCons.Tool.gcc +import SCons.Util +import SCons.Warnings + +# Exceptions for this tool +class IntelCError(SCons.Errors.InternalError): + pass +class MissingRegistryError(IntelCError): # missing registry entry + pass +class MissingDirError(IntelCError): # dir not found + pass +class NoRegistryModuleError(IntelCError): # can't read registry at all + pass + +def uniquify(s): + """Return a sequence containing only one copy of each unique element from input sequence s. + Does not preserve order. + Input sequence must be hashable (i.e. must be usable as a dictionary key).""" + u = {} + for x in s: + u[x] = 1 + return u.keys() + +def linux_ver_normalize(vstr): + """Normalize a Linux compiler version number. + Intel changed from "80" to "9.0" in 2005, so we assume if the number + is greater than 60 it's an old-style number and otherwise new-style. + Always returns an old-style float like 80 or 90 for compatibility with Windows. + Shades of Y2K!""" + # Check for version number like 9.1.026: return 91.026 + m = re.match(r'([0-9]+)\.([0-9]+)\.([0-9]+)', vstr) + if m: + vmaj,vmin,build = m.groups() + return float(vmaj) * 10 + float(vmin) + float(build) / 1000.; + else: + f = float(vstr) + if is_windows: + return f + else: + if f < 60: return f * 10.0 + else: return f + +def check_abi(abi): + """Check for valid ABI (application binary interface) name, + and map into canonical one""" + if not abi: + return None + abi = abi.lower() + # valid_abis maps input name to canonical name + if is_windows: + valid_abis = {'ia32' : 'ia32', + 'x86' : 'ia32', + 'ia64' : 'ia64', + 'em64t' : 'em64t', + 'amd64' : 'em64t'} + if is_linux: + valid_abis = {'ia32' : 'ia32', + 'x86' : 'ia32', + 'x86_64' : 'x86_64', + 'em64t' : 'x86_64', + 'amd64' : 'x86_64'} + if is_mac: + valid_abis = {'ia32' : 'ia32', + 'x86' : 'ia32', + 'x86_64' : 'x86_64', + 'em64t' : 'x86_64'} + try: + abi = valid_abis[abi] + except KeyError: + raise SCons.Errors.UserError, \ + "Intel compiler: Invalid ABI %s, valid values are %s"% \ + (abi, valid_abis.keys()) + return abi + +def vercmp(a, b): + """Compare strings as floats, + but Intel changed Linux naming convention at 9.0""" + return cmp(linux_ver_normalize(b), linux_ver_normalize(a)) + +def get_version_from_list(v, vlist): + """See if we can match v (string) in vlist (list of strings) + Linux has to match in a fuzzy way.""" + if is_windows: + # Simple case, just find it in the list + if v in vlist: return v + else: return None + else: + # Fuzzy match: normalize version number first, but still return + # original non-normalized form. + fuzz = 0.001 + for vi in vlist: + if math.fabs(linux_ver_normalize(vi) - linux_ver_normalize(v)) < fuzz: + return vi + # Not found + return None + +def get_intel_registry_value(valuename, version=None, abi=None): + """ + Return a value from the Intel compiler registry tree. (Windows only) + """ + # Open the key: + if is_win64: + K = 'Software\\Wow6432Node\\Intel\\Compilers\\C++\\' + version + '\\'+abi.upper() + else: + K = 'Software\\Intel\\Compilers\\C++\\' + version + '\\'+abi.upper() + try: + k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, K) + except SCons.Util.RegError: + raise MissingRegistryError, \ + "%s was not found in the registry, for Intel compiler version %s, abi='%s'"%(K, version,abi) + + # Get the value: + try: + v = SCons.Util.RegQueryValueEx(k, valuename)[0] + return v # or v.encode('iso-8859-1', 'replace') to remove unicode? + except SCons.Util.RegError: + raise MissingRegistryError, \ + "%s\\%s was not found in the registry."%(K, valuename) + + +def get_all_compiler_versions(): + """Returns a sorted list of strings, like "70" or "80" or "9.0" + with most recent compiler version first. + """ + versions=[] + if is_windows: + if is_win64: + keyname = 'Software\\WoW6432Node\\Intel\\Compilers\\C++' + else: + keyname = 'Software\\Intel\\Compilers\\C++' + try: + k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, + keyname) + except WindowsError: + return [] + i = 0 + versions = [] + try: + while i < 100: + subkey = SCons.Util.RegEnumKey(k, i) # raises EnvironmentError + # Check that this refers to an existing dir. + # This is not 100% perfect but should catch common + # installation issues like when the compiler was installed + # and then the install directory deleted or moved (rather + # than uninstalling properly), so the registry values + # are still there. + ok = False + for try_abi in ('IA32', 'IA32e', 'IA64', 'EM64T'): + try: + d = get_intel_registry_value('ProductDir', subkey, try_abi) + except MissingRegistryError: + continue # not found in reg, keep going + if os.path.exists(d): ok = True + if ok: + versions.append(subkey) + else: + try: + # Registry points to nonexistent dir. Ignore this + # version. + value = get_intel_registry_value('ProductDir', subkey, 'IA32') + except MissingRegistryError, e: + + # Registry key is left dangling (potentially + # after uninstalling). + + print \ + "scons: *** Ignoring the registry key for the Intel compiler version %s.\n" \ + "scons: *** It seems that the compiler was uninstalled and that the registry\n" \ + "scons: *** was not cleaned up properly.\n" % subkey + else: + print "scons: *** Ignoring "+str(value) + + i = i + 1 + except EnvironmentError: + # no more subkeys + pass + elif is_linux: + for d in glob.glob('/opt/intel_cc_*'): + # Typical dir here is /opt/intel_cc_80. + m = re.search(r'cc_(.*)$', d) + if m: + versions.append(m.group(1)) + for d in glob.glob('/opt/intel/cc*/*'): + # Typical dir here is /opt/intel/cc/9.0 for IA32, + # /opt/intel/cce/9.0 for EMT64 (AMD64) + m = re.search(r'([0-9.]+)$', d) + if m: + versions.append(m.group(1)) + elif is_mac: + for d in glob.glob('/opt/intel/cc*/*'): + # Typical dir here is /opt/intel/cc/9.0 for IA32, + # /opt/intel/cce/9.0 for EMT64 (AMD64) + m = re.search(r'([0-9.]+)$', d) + if m: + versions.append(m.group(1)) + versions = uniquify(versions) # remove dups + versions.sort(vercmp) + return versions + +def get_intel_compiler_top(version, abi): + """ + Return the main path to the top-level dir of the Intel compiler, + using the given version. + The compiler will be in <top>/bin/icl.exe (icc on linux), + the include dir is <top>/include, etc. + """ + + if is_windows: + if not SCons.Util.can_read_reg: + raise NoRegistryModuleError, "No Windows registry module was found" + top = get_intel_registry_value('ProductDir', version, abi) + # pre-11, icl was in Bin. 11 and later, it's in Bin/<abi> apparently. + if not os.path.exists(os.path.join(top, "Bin", "icl.exe")) \ + and not os.path.exists(os.path.join(top, "Bin", abi, "icl.exe")): + raise MissingDirError, \ + "Can't find Intel compiler in %s"%(top) + elif is_mac or is_linux: + # first dir is new (>=9.0) style, second is old (8.0) style. + dirs=('/opt/intel/cc/%s', '/opt/intel_cc_%s') + if abi == 'x86_64': + dirs=('/opt/intel/cce/%s',) # 'e' stands for 'em64t', aka x86_64 aka amd64 + top=None + for d in dirs: + if os.path.exists(os.path.join(d%version, "bin", "icc")): + top = d%version + break + if not top: + raise MissingDirError, \ + "Can't find version %s Intel compiler in %s (abi='%s')"%(version,top, abi) + return top + + +def generate(env, version=None, abi=None, topdir=None, verbose=0): + """Add Builders and construction variables for Intel C/C++ compiler + to an Environment. + args: + version: (string) compiler version to use, like "80" + abi: (string) 'win32' or whatever Itanium version wants + topdir: (string) compiler top dir, like + "c:\Program Files\Intel\Compiler70" + If topdir is used, version and abi are ignored. + verbose: (int) if >0, prints compiler version used. + """ + if not (is_mac or is_linux or is_windows): + # can't handle this platform + return + + if is_windows: + SCons.Tool.msvc.generate(env) + elif is_linux: + SCons.Tool.gcc.generate(env) + elif is_mac: + SCons.Tool.gcc.generate(env) + + # if version is unspecified, use latest + vlist = get_all_compiler_versions() + if not version: + if vlist: + version = vlist[0] + else: + # User may have specified '90' but we need to get actual dirname '9.0'. + # get_version_from_list does that mapping. + v = get_version_from_list(version, vlist) + if not v: + raise SCons.Errors.UserError, \ + "Invalid Intel compiler version %s: "%version + \ + "installed versions are %s"%(', '.join(vlist)) + version = v + + # if abi is unspecified, use ia32 + # alternatives are ia64 for Itanium, or amd64 or em64t or x86_64 (all synonyms here) + abi = check_abi(abi) + if abi is None: + if is_mac or is_linux: + # Check if we are on 64-bit linux, default to 64 then. + uname_m = os.uname()[4] + if uname_m == 'x86_64': + abi = 'x86_64' + else: + abi = 'ia32' + else: + if is_win64: + abi = 'em64t' + else: + abi = 'ia32' + + if version and not topdir: + try: + topdir = get_intel_compiler_top(version, abi) + except (SCons.Util.RegError, IntelCError): + topdir = None + + if not topdir: + # Normally this is an error, but it might not be if the compiler is + # on $PATH and the user is importing their env. + class ICLTopDirWarning(SCons.Warnings.Warning): + pass + if (is_mac or is_linux) and not env.Detect('icc') or \ + is_windows and not env.Detect('icl'): + + SCons.Warnings.enableWarningClass(ICLTopDirWarning) + SCons.Warnings.warn(ICLTopDirWarning, + "Failed to find Intel compiler for version='%s', abi='%s'"% + (str(version), str(abi))) + else: + # should be cleaned up to say what this other version is + # since in this case we have some other Intel compiler installed + SCons.Warnings.enableWarningClass(ICLTopDirWarning) + SCons.Warnings.warn(ICLTopDirWarning, + "Can't find Intel compiler top dir for version='%s', abi='%s'"% + (str(version), str(abi))) + + if topdir: + if verbose: + print "Intel C compiler: using version %s (%g), abi %s, in '%s'"%\ + (repr(version), linux_ver_normalize(version),abi,topdir) + if is_linux: + # Show the actual compiler version by running the compiler. + os.system('%s/bin/icc --version'%topdir) + if is_mac: + # Show the actual compiler version by running the compiler. + os.system('%s/bin/icc --version'%topdir) + + env['INTEL_C_COMPILER_TOP'] = topdir + if is_linux: + paths={'INCLUDE' : 'include', + 'LIB' : 'lib', + 'PATH' : 'bin', + 'LD_LIBRARY_PATH' : 'lib'} + for p in paths.keys(): + env.PrependENVPath(p, os.path.join(topdir, paths[p])) + if is_mac: + paths={'INCLUDE' : 'include', + 'LIB' : 'lib', + 'PATH' : 'bin', + 'LD_LIBRARY_PATH' : 'lib'} + for p in paths.keys(): + env.PrependENVPath(p, os.path.join(topdir, paths[p])) + if is_windows: + # env key reg valname default subdir of top + paths=(('INCLUDE', 'IncludeDir', 'Include'), + ('LIB' , 'LibDir', 'Lib'), + ('PATH' , 'BinDir', 'Bin')) + # We are supposed to ignore version if topdir is set, so set + # it to the emptry string if it's not already set. + if version is None: + version = '' + # Each path has a registry entry, use that or default to subdir + for p in paths: + try: + path=get_intel_registry_value(p[1], version, abi) + # These paths may have $(ICInstallDir) + # which needs to be substituted with the topdir. + path=path.replace('$(ICInstallDir)', topdir + os.sep) + except IntelCError: + # Couldn't get it from registry: use default subdir of topdir + env.PrependENVPath(p[0], os.path.join(topdir, p[2])) + else: + env.PrependENVPath(p[0], string.split(path, os.pathsep)) + # print "ICL %s: %s, final=%s"%(p[0], path, str(env['ENV'][p[0]])) + + if is_windows: + env['CC'] = 'icl' + env['CXX'] = 'icl' + env['LINK'] = 'xilink' + else: + env['CC'] = 'icc' + env['CXX'] = 'icpc' + # Don't reset LINK here; + # use smart_link which should already be here from link.py. + #env['LINK'] = '$CC' + env['AR'] = 'xiar' + env['LD'] = 'xild' # not used by default + + # This is not the exact (detailed) compiler version, + # just the major version as determined above or specified + # by the user. It is a float like 80 or 90, in normalized form for Linux + # (i.e. even for Linux 9.0 compiler, still returns 90 rather than 9.0) + if version: + env['INTEL_C_COMPILER_VERSION']=linux_ver_normalize(version) + + if is_windows: + # Look for license file dir + # in system environment, registry, and default location. + envlicdir = os.environ.get("INTEL_LICENSE_FILE", '') + K = ('SOFTWARE\Intel\Licenses') + try: + k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, K) + reglicdir = SCons.Util.RegQueryValueEx(k, "w_cpp")[0] + except (AttributeError, SCons.Util.RegError): + reglicdir = "" + defaultlicdir = r'C:\Program Files\Common Files\Intel\Licenses' + + licdir = None + for ld in [envlicdir, reglicdir]: + # If the string contains an '@', then assume it's a network + # license (port@system) and good by definition. + if ld and (string.find(ld, '@') != -1 or os.path.exists(ld)): + licdir = ld + break + if not licdir: + licdir = defaultlicdir + if not os.path.exists(licdir): + class ICLLicenseDirWarning(SCons.Warnings.Warning): + pass + SCons.Warnings.enableWarningClass(ICLLicenseDirWarning) + SCons.Warnings.warn(ICLLicenseDirWarning, + "Intel license dir was not found." + " Tried using the INTEL_LICENSE_FILE environment variable (%s), the registry (%s) and the default path (%s)." + " Using the default path as a last resort." + % (envlicdir, reglicdir, defaultlicdir)) + env['ENV']['INTEL_LICENSE_FILE'] = licdir + +def exists(env): + if not (is_mac or is_linux or is_windows): + # can't handle this platform + return 0 + + try: + versions = get_all_compiler_versions() + except (SCons.Util.RegError, IntelCError): + versions = None + detected = versions is not None and len(versions) > 0 + if not detected: + # try env.Detect, maybe that will work + if is_windows: + return env.Detect('icl') + elif is_linux: + return env.Detect('icc') + elif is_mac: + return env.Detect('icc') + return detected + +# end of file + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/ipkg.py b/engine/SCons/Tool/ipkg.py new file mode 100644 index 0000000..51da464 --- /dev/null +++ b/engine/SCons/Tool/ipkg.py @@ -0,0 +1,71 @@ +"""SCons.Tool.ipkg + +Tool-specific initialization for ipkg. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +The ipkg tool calls the ipkg-build. Its only argument should be the +packages fake_root. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/ipkg.py 4577 2009/12/27 19:43:56 scons" + +import os +import string + +import SCons.Builder + +def generate(env): + """Add Builders and construction variables for ipkg to an Environment.""" + try: + bld = env['BUILDERS']['Ipkg'] + except KeyError: + bld = SCons.Builder.Builder( action = '$IPKGCOM', + suffix = '$IPKGSUFFIX', + source_scanner = None, + target_scanner = None) + env['BUILDERS']['Ipkg'] = bld + + env['IPKG'] = 'ipkg-build' + env['IPKGCOM'] = '$IPKG $IPKGFLAGS ${SOURCE}' + # TODO(1.5) + #env['IPKGUSER'] = os.popen('id -un').read().strip() + #env['IPKGGROUP'] = os.popen('id -gn').read().strip() + env['IPKGUSER'] = string.strip(os.popen('id -un').read()) + env['IPKGGROUP'] = string.strip(os.popen('id -gn').read()) + env['IPKGFLAGS'] = SCons.Util.CLVar('-o $IPKGUSER -g $IPKGGROUP') + env['IPKGSUFFIX'] = '.ipk' + +def exists(env): + return env.Detect('ipkg-build') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/jar.py b/engine/SCons/Tool/jar.py new file mode 100644 index 0000000..47d10af --- /dev/null +++ b/engine/SCons/Tool/jar.py @@ -0,0 +1,110 @@ +"""SCons.Tool.jar + +Tool-specific initialization for jar. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/jar.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Subst +import SCons.Util + +def jarSources(target, source, env, for_signature): + """Only include sources that are not a manifest file.""" + try: + env['JARCHDIR'] + except KeyError: + jarchdir_set = False + else: + jarchdir_set = True + jarchdir = env.subst('$JARCHDIR', target=target, source=source) + if jarchdir: + jarchdir = env.fs.Dir(jarchdir) + result = [] + for src in source: + contents = src.get_text_contents() + if contents[:16] != "Manifest-Version": + if jarchdir_set: + _chdir = jarchdir + else: + try: + _chdir = src.attributes.java_classdir + except AttributeError: + _chdir = None + if _chdir: + # If we are changing the dir with -C, then sources should + # be relative to that directory. + src = SCons.Subst.Literal(src.get_path(_chdir)) + result.append('-C') + result.append(_chdir) + result.append(src) + return result + +def jarManifest(target, source, env, for_signature): + """Look in sources for a manifest file, if any.""" + for src in source: + contents = src.get_text_contents() + if contents[:16] == "Manifest-Version": + return src + return '' + +def jarFlags(target, source, env, for_signature): + """If we have a manifest, make sure that the 'm' + flag is specified.""" + jarflags = env.subst('$JARFLAGS', target=target, source=source) + for src in source: + contents = src.get_text_contents() + if contents[:16] == "Manifest-Version": + if not 'm' in jarflags: + return jarflags + 'm' + break + return jarflags + +def generate(env): + """Add Builders and construction variables for jar to an Environment.""" + SCons.Tool.CreateJarBuilder(env) + + env['JAR'] = 'jar' + env['JARFLAGS'] = SCons.Util.CLVar('cf') + env['_JARFLAGS'] = jarFlags + env['_JARMANIFEST'] = jarManifest + env['_JARSOURCES'] = jarSources + env['_JARCOM'] = '$JAR $_JARFLAGS $TARGET $_JARMANIFEST $_JARSOURCES' + env['JARCOM'] = "${TEMPFILE('$_JARCOM')}" + env['JARSUFFIX'] = '.jar' + +def exists(env): + return env.Detect('jar') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/javac.py b/engine/SCons/Tool/javac.py new file mode 100644 index 0000000..7e8186f --- /dev/null +++ b/engine/SCons/Tool/javac.py @@ -0,0 +1,234 @@ +"""SCons.Tool.javac + +Tool-specific initialization for javac. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/javac.py 4577 2009/12/27 19:43:56 scons" + +import os +import os.path +import string + +import SCons.Action +import SCons.Builder +from SCons.Node.FS import _my_normcase +from SCons.Tool.JavaCommon import parse_java_file +import SCons.Util + +def classname(path): + """Turn a string (path name) into a Java class name.""" + return string.replace(os.path.normpath(path), os.sep, '.') + +def emit_java_classes(target, source, env): + """Create and return lists of source java files + and their corresponding target class files. + """ + java_suffix = env.get('JAVASUFFIX', '.java') + class_suffix = env.get('JAVACLASSSUFFIX', '.class') + + target[0].must_be_same(SCons.Node.FS.Dir) + classdir = target[0] + + s = source[0].rentry().disambiguate() + if isinstance(s, SCons.Node.FS.File): + sourcedir = s.dir.rdir() + elif isinstance(s, SCons.Node.FS.Dir): + sourcedir = s.rdir() + else: + raise SCons.Errors.UserError("Java source must be File or Dir, not '%s'" % s.__class__) + + slist = [] + js = _my_normcase(java_suffix) + find_java = lambda n, js=js, ljs=len(js): _my_normcase(n[-ljs:]) == js + for entry in source: + entry = entry.rentry().disambiguate() + if isinstance(entry, SCons.Node.FS.File): + slist.append(entry) + elif isinstance(entry, SCons.Node.FS.Dir): + result = SCons.Util.OrderedDict() + def visit(arg, dirname, names, fj=find_java, dirnode=entry.rdir()): + java_files = filter(fj, names) + # The on-disk entries come back in arbitrary order. Sort + # them so our target and source lists are determinate. + java_files.sort() + mydir = dirnode.Dir(dirname) + java_paths = map(lambda f, d=mydir: d.File(f), java_files) + for jp in java_paths: + arg[jp] = True + + os.path.walk(entry.rdir().get_abspath(), visit, result) + entry.walk(visit, result) + + slist.extend(result.keys()) + else: + raise SCons.Errors.UserError("Java source must be File or Dir, not '%s'" % entry.__class__) + + version = env.get('JAVAVERSION', '1.4') + full_tlist = [] + for f in slist: + tlist = [] + source_file_based = True + pkg_dir = None + if not f.is_derived(): + pkg_dir, classes = parse_java_file(f.rfile().get_abspath(), version) + if classes: + source_file_based = False + if pkg_dir: + d = target[0].Dir(pkg_dir) + p = pkg_dir + os.sep + else: + d = target[0] + p = '' + for c in classes: + t = d.File(c + class_suffix) + t.attributes.java_classdir = classdir + t.attributes.java_sourcedir = sourcedir + t.attributes.java_classname = classname(p + c) + tlist.append(t) + + if source_file_based: + base = f.name[:-len(java_suffix)] + if pkg_dir: + t = target[0].Dir(pkg_dir).File(base + class_suffix) + else: + t = target[0].File(base + class_suffix) + t.attributes.java_classdir = classdir + t.attributes.java_sourcedir = f.dir + t.attributes.java_classname = classname(base) + tlist.append(t) + + for t in tlist: + t.set_specific_source([f]) + + full_tlist.extend(tlist) + + return full_tlist, slist + +JavaAction = SCons.Action.Action('$JAVACCOM', '$JAVACCOMSTR') + +JavaBuilder = SCons.Builder.Builder(action = JavaAction, + emitter = emit_java_classes, + target_factory = SCons.Node.FS.Entry, + source_factory = SCons.Node.FS.Entry) + +class pathopt: + """ + Callable object for generating javac-style path options from + a construction variable (e.g. -classpath, -sourcepath). + """ + def __init__(self, opt, var, default=None): + self.opt = opt + self.var = var + self.default = default + + def __call__(self, target, source, env, for_signature): + path = env[self.var] + if path and not SCons.Util.is_List(path): + path = [path] + if self.default: + path = path + [ env[self.default] ] + if path: + return [self.opt, string.join(path, os.pathsep)] + #return self.opt + " " + string.join(path, os.pathsep) + else: + return [] + #return "" + +def Java(env, target, source, *args, **kw): + """ + A pseudo-Builder wrapper around the separate JavaClass{File,Dir} + Builders. + """ + if not SCons.Util.is_List(target): + target = [target] + if not SCons.Util.is_List(source): + source = [source] + + # Pad the target list with repetitions of the last element in the + # list so we have a target for every source element. + target = target + ([target[-1]] * (len(source) - len(target))) + + java_suffix = env.subst('$JAVASUFFIX') + result = [] + + for t, s in zip(target, source): + if isinstance(s, SCons.Node.FS.Base): + if isinstance(s, SCons.Node.FS.File): + b = env.JavaClassFile + else: + b = env.JavaClassDir + else: + if os.path.isfile(s): + b = env.JavaClassFile + elif os.path.isdir(s): + b = env.JavaClassDir + elif s[-len(java_suffix):] == java_suffix: + b = env.JavaClassFile + else: + b = env.JavaClassDir + result.extend(apply(b, (t, s) + args, kw)) + + return result + +def generate(env): + """Add Builders and construction variables for javac to an Environment.""" + java_file = SCons.Tool.CreateJavaFileBuilder(env) + java_class = SCons.Tool.CreateJavaClassFileBuilder(env) + java_class_dir = SCons.Tool.CreateJavaClassDirBuilder(env) + java_class.add_emitter(None, emit_java_classes) + java_class.add_emitter(env.subst('$JAVASUFFIX'), emit_java_classes) + java_class_dir.emitter = emit_java_classes + + env.AddMethod(Java) + + env['JAVAC'] = 'javac' + env['JAVACFLAGS'] = SCons.Util.CLVar('') + env['JAVABOOTCLASSPATH'] = [] + env['JAVACLASSPATH'] = [] + env['JAVASOURCEPATH'] = [] + env['_javapathopt'] = pathopt + env['_JAVABOOTCLASSPATH'] = '${_javapathopt("-bootclasspath", "JAVABOOTCLASSPATH")} ' + env['_JAVACLASSPATH'] = '${_javapathopt("-classpath", "JAVACLASSPATH")} ' + env['_JAVASOURCEPATH'] = '${_javapathopt("-sourcepath", "JAVASOURCEPATH", "_JAVASOURCEPATHDEFAULT")} ' + env['_JAVASOURCEPATHDEFAULT'] = '${TARGET.attributes.java_sourcedir}' + env['_JAVACCOM'] = '$JAVAC $JAVACFLAGS $_JAVABOOTCLASSPATH $_JAVACLASSPATH -d ${TARGET.attributes.java_classdir} $_JAVASOURCEPATH $SOURCES' + env['JAVACCOM'] = "${TEMPFILE('$_JAVACCOM')}" + env['JAVACLASSSUFFIX'] = '.class' + env['JAVASUFFIX'] = '.java' + +def exists(env): + return 1 + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/javah.py b/engine/SCons/Tool/javah.py new file mode 100644 index 0000000..fad91b1 --- /dev/null +++ b/engine/SCons/Tool/javah.py @@ -0,0 +1,138 @@ +"""SCons.Tool.javah + +Tool-specific initialization for javah. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/javah.py 4577 2009/12/27 19:43:56 scons" + +import os.path +import string + +import SCons.Action +import SCons.Builder +import SCons.Node.FS +import SCons.Tool.javac +import SCons.Util + +def emit_java_headers(target, source, env): + """Create and return lists of Java stub header files that will + be created from a set of class files. + """ + class_suffix = env.get('JAVACLASSSUFFIX', '.class') + classdir = env.get('JAVACLASSDIR') + + if not classdir: + try: + s = source[0] + except IndexError: + classdir = '.' + else: + try: + classdir = s.attributes.java_classdir + except AttributeError: + classdir = '.' + classdir = env.Dir(classdir).rdir() + + if str(classdir) == '.': + c_ = None + else: + c_ = str(classdir) + os.sep + + slist = [] + for src in source: + try: + classname = src.attributes.java_classname + except AttributeError: + classname = str(src) + if c_ and classname[:len(c_)] == c_: + classname = classname[len(c_):] + if class_suffix and classname[-len(class_suffix):] == class_suffix: + classname = classname[:-len(class_suffix)] + classname = SCons.Tool.javac.classname(classname) + s = src.rfile() + s.attributes.java_classname = classname + slist.append(s) + + s = source[0].rfile() + if not hasattr(s.attributes, 'java_classdir'): + s.attributes.java_classdir = classdir + + if target[0].__class__ is SCons.Node.FS.File: + tlist = target + else: + if not isinstance(target[0], SCons.Node.FS.Dir): + target[0].__class__ = SCons.Node.FS.Dir + target[0]._morph() + tlist = [] + for s in source: + fname = string.replace(s.attributes.java_classname, '.', '_') + '.h' + t = target[0].File(fname) + t.attributes.java_lookupdir = target[0] + tlist.append(t) + + return tlist, source + +def JavaHOutFlagGenerator(target, source, env, for_signature): + try: + t = target[0] + except (AttributeError, IndexError, TypeError): + t = target + try: + return '-d ' + str(t.attributes.java_lookupdir) + except AttributeError: + return '-o ' + str(t) + +def getJavaHClassPath(env,target, source, for_signature): + path = "${SOURCE.attributes.java_classdir}" + if env.has_key('JAVACLASSPATH') and env['JAVACLASSPATH']: + path = SCons.Util.AppendPath(path, env['JAVACLASSPATH']) + return "-classpath %s" % (path) + +def generate(env): + """Add Builders and construction variables for javah to an Environment.""" + java_javah = SCons.Tool.CreateJavaHBuilder(env) + java_javah.emitter = emit_java_headers + + env['_JAVAHOUTFLAG'] = JavaHOutFlagGenerator + env['JAVAH'] = 'javah' + env['JAVAHFLAGS'] = SCons.Util.CLVar('') + env['_JAVAHCLASSPATH'] = getJavaHClassPath + env['JAVAHCOM'] = '$JAVAH $JAVAHFLAGS $_JAVAHOUTFLAG $_JAVAHCLASSPATH ${SOURCES.attributes.java_classname}' + env['JAVACLASSSUFFIX'] = '.class' + +def exists(env): + return env.Detect('javah') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/latex.py b/engine/SCons/Tool/latex.py new file mode 100644 index 0000000..2e6a83e --- /dev/null +++ b/engine/SCons/Tool/latex.py @@ -0,0 +1,79 @@ +"""SCons.Tool.latex + +Tool-specific initialization for LaTeX. +Generates .dvi files from .latex or .ltx files + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/latex.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Action +import SCons.Defaults +import SCons.Scanner.LaTeX +import SCons.Util +import SCons.Tool +import SCons.Tool.tex + +def LaTeXAuxFunction(target = None, source= None, env=None): + result = SCons.Tool.tex.InternalLaTeXAuxAction( SCons.Tool.tex.LaTeXAction, target, source, env ) + if result != 0: + print env['LATEX']," returned an error, check the log file" + return result + +LaTeXAuxAction = SCons.Action.Action(LaTeXAuxFunction, + strfunction=SCons.Tool.tex.TeXLaTeXStrFunction) + +def generate(env): + """Add Builders and construction variables for LaTeX to an Environment.""" + + env.AppendUnique(LATEXSUFFIXES=SCons.Tool.LaTeXSuffixes) + + import dvi + dvi.generate(env) + + import pdf + pdf.generate(env) + + bld = env['BUILDERS']['DVI'] + bld.add_action('.ltx', LaTeXAuxAction) + bld.add_action('.latex', LaTeXAuxAction) + bld.add_emitter('.ltx', SCons.Tool.tex.tex_eps_emitter) + bld.add_emitter('.latex', SCons.Tool.tex.tex_eps_emitter) + + SCons.Tool.tex.generate_common(env) + +def exists(env): + return env.Detect('latex') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/lex.py b/engine/SCons/Tool/lex.py new file mode 100644 index 0000000..35e28e3 --- /dev/null +++ b/engine/SCons/Tool/lex.py @@ -0,0 +1,99 @@ +"""SCons.Tool.lex + +Tool-specific initialization for lex. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/lex.py 4577 2009/12/27 19:43:56 scons" + +import os.path + +import string + +import SCons.Action +import SCons.Tool +import SCons.Util + +LexAction = SCons.Action.Action("$LEXCOM", "$LEXCOMSTR") + +def lexEmitter(target, source, env): + sourceBase, sourceExt = os.path.splitext(SCons.Util.to_String(source[0])) + + if sourceExt == ".lm": # If using Objective-C + target = [sourceBase + ".m"] # the extension is ".m". + + # This emitter essentially tries to add to the target all extra + # files generated by flex. + + # Different options that are used to trigger the creation of extra files. + fileGenOptions = ["--header-file=", "--tables-file="] + + lexflags = env.subst("$LEXFLAGS", target=target, source=source) + for option in SCons.Util.CLVar(lexflags): + for fileGenOption in fileGenOptions: + l = len(fileGenOption) + if option[:l] == fileGenOption: + # A file generating option is present, so add the + # file name to the target list. + fileName = string.strip(option[l:]) + target.append(fileName) + return (target, source) + +def generate(env): + """Add Builders and construction variables for lex to an Environment.""" + c_file, cxx_file = SCons.Tool.createCFileBuilders(env) + + # C + c_file.add_action(".l", LexAction) + c_file.add_emitter(".l", lexEmitter) + + c_file.add_action(".lex", LexAction) + c_file.add_emitter(".lex", lexEmitter) + + # Objective-C + cxx_file.add_action(".lm", LexAction) + cxx_file.add_emitter(".lm", lexEmitter) + + # C++ + cxx_file.add_action(".ll", LexAction) + cxx_file.add_emitter(".ll", lexEmitter) + + env["LEX"] = env.Detect("flex") or "lex" + env["LEXFLAGS"] = SCons.Util.CLVar("") + env["LEXCOM"] = "$LEX $LEXFLAGS -t $SOURCES > $TARGET" + +def exists(env): + return env.Detect(["flex", "lex"]) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/link.py b/engine/SCons/Tool/link.py new file mode 100644 index 0000000..55f7584 --- /dev/null +++ b/engine/SCons/Tool/link.py @@ -0,0 +1,121 @@ +"""SCons.Tool.link + +Tool-specific initialization for the generic Posix linker. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/link.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Defaults +import SCons.Tool +import SCons.Util +import SCons.Warnings + +from SCons.Tool.FortranCommon import isfortran + +cplusplus = __import__('c++', globals(), locals(), []) + +issued_mixed_link_warning = False + +def smart_link(source, target, env, for_signature): + has_cplusplus = cplusplus.iscplusplus(source) + has_fortran = isfortran(env, source) + if has_cplusplus and has_fortran: + global issued_mixed_link_warning + if not issued_mixed_link_warning: + msg = "Using $CXX to link Fortran and C++ code together.\n\t" + \ + "This may generate a buggy executable if the '%s'\n\t" + \ + "compiler does not know how to deal with Fortran runtimes." + SCons.Warnings.warn(SCons.Warnings.FortranCxxMixWarning, + msg % env.subst('$CXX')) + issued_mixed_link_warning = True + return '$CXX' + elif has_fortran: + return '$FORTRAN' + elif has_cplusplus: + return '$CXX' + return '$CC' + +def shlib_emitter(target, source, env): + for tgt in target: + tgt.attributes.shared = 1 + return (target, source) + +def generate(env): + """Add Builders and construction variables for gnulink to an Environment.""" + SCons.Tool.createSharedLibBuilder(env) + SCons.Tool.createProgBuilder(env) + + env['SHLINK'] = '$LINK' + env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -shared') + env['SHLINKCOM'] = '$SHLINK -o $TARGET $SHLINKFLAGS $SOURCES $_LIBDIRFLAGS $_LIBFLAGS' + # don't set up the emitter, cause AppendUnique will generate a list + # starting with None :-( + env.Append(SHLIBEMITTER = [shlib_emitter]) + env['SMARTLINK'] = smart_link + env['LINK'] = "$SMARTLINK" + env['LINKFLAGS'] = SCons.Util.CLVar('') + env['LINKCOM'] = '$LINK -o $TARGET $LINKFLAGS $SOURCES $_LIBDIRFLAGS $_LIBFLAGS' + env['LIBDIRPREFIX']='-L' + env['LIBDIRSUFFIX']='' + env['_LIBFLAGS']='${_stripixes(LIBLINKPREFIX, LIBS, LIBLINKSUFFIX, LIBPREFIXES, LIBSUFFIXES, __env__)}' + env['LIBLINKPREFIX']='-l' + env['LIBLINKSUFFIX']='' + + if env['PLATFORM'] == 'hpux': + env['SHLIBSUFFIX'] = '.sl' + elif env['PLATFORM'] == 'aix': + env['SHLIBSUFFIX'] = '.a' + + # For most platforms, a loadable module is the same as a shared + # library. Platforms which are different can override these, but + # setting them the same means that LoadableModule works everywhere. + SCons.Tool.createLoadableModuleBuilder(env) + env['LDMODULE'] = '$SHLINK' + # don't set up the emitter, cause AppendUnique will generate a list + # starting with None :-( + env.Append(LDMODULEEMITTER='$SHLIBEMITTER') + env['LDMODULEPREFIX'] = '$SHLIBPREFIX' + env['LDMODULESUFFIX'] = '$SHLIBSUFFIX' + env['LDMODULEFLAGS'] = '$SHLINKFLAGS' + env['LDMODULECOM'] = '$LDMODULE -o $TARGET $LDMODULEFLAGS $SOURCES $_LIBDIRFLAGS $_LIBFLAGS' + + + +def exists(env): + # This module isn't really a Tool on its own, it's common logic for + # other linkers. + return None + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/linkloc.py b/engine/SCons/Tool/linkloc.py new file mode 100644 index 0000000..9776c73 --- /dev/null +++ b/engine/SCons/Tool/linkloc.py @@ -0,0 +1,112 @@ +"""SCons.Tool.linkloc + +Tool specification for the LinkLoc linker for the Phar Lap ETS embedded +operating system. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/linkloc.py 4577 2009/12/27 19:43:56 scons" + +import os.path +import re + +import SCons.Action +import SCons.Defaults +import SCons.Errors +import SCons.Tool +import SCons.Util + +from SCons.Tool.MSCommon import msvs_exists, merge_default_version +from SCons.Tool.PharLapCommon import addPharLapPaths + +_re_linker_command = re.compile(r'(\s)@\s*([^\s]+)') + +def repl_linker_command(m): + # Replaces any linker command file directives (e.g. "@foo.lnk") with + # the actual contents of the file. + try: + f=open(m.group(2), "r") + return m.group(1) + f.read() + except IOError: + # the linker should return an error if it can't + # find the linker command file so we will remain quiet. + # However, we will replace the @ with a # so we will not continue + # to find it with recursive substitution + return m.group(1) + '#' + m.group(2) + +class LinklocGenerator: + def __init__(self, cmdline): + self.cmdline = cmdline + + def __call__(self, env, target, source, for_signature): + if for_signature: + # Expand the contents of any linker command files recursively + subs = 1 + strsub = env.subst(self.cmdline, target=target, source=source) + while subs: + strsub, subs = _re_linker_command.subn(repl_linker_command, strsub) + return strsub + else: + return "${TEMPFILE('" + self.cmdline + "')}" + +def generate(env): + """Add Builders and construction variables for ar to an Environment.""" + SCons.Tool.createSharedLibBuilder(env) + SCons.Tool.createProgBuilder(env) + + env['SUBST_CMD_FILE'] = LinklocGenerator + env['SHLINK'] = '$LINK' + env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS') + env['SHLINKCOM'] = '${SUBST_CMD_FILE("$SHLINK $SHLINKFLAGS $_LIBDIRFLAGS $_LIBFLAGS -dll $TARGET $SOURCES")}' + env['SHLIBEMITTER']= None + env['LINK'] = "linkloc" + env['LINKFLAGS'] = SCons.Util.CLVar('') + env['LINKCOM'] = '${SUBST_CMD_FILE("$LINK $LINKFLAGS $_LIBDIRFLAGS $_LIBFLAGS -exe $TARGET $SOURCES")}' + env['LIBDIRPREFIX']='-libpath ' + env['LIBDIRSUFFIX']='' + env['LIBLINKPREFIX']='-lib ' + env['LIBLINKSUFFIX']='$LIBSUFFIX' + + # Set-up ms tools paths for default version + merge_default_version(env) + + addPharLapPaths(env) + +def exists(env): + if msvs_exists(): + return env.Detect('linkloc') + else: + return 0 + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/m4.py b/engine/SCons/Tool/m4.py new file mode 100644 index 0000000..3c35f21 --- /dev/null +++ b/engine/SCons/Tool/m4.py @@ -0,0 +1,63 @@ +"""SCons.Tool.m4 + +Tool-specific initialization for m4. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/m4.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Action +import SCons.Builder +import SCons.Util + +def generate(env): + """Add Builders and construction variables for m4 to an Environment.""" + M4Action = SCons.Action.Action('$M4COM', '$M4COMSTR') + bld = SCons.Builder.Builder(action = M4Action, src_suffix = '.m4') + + env['BUILDERS']['M4'] = bld + + # .m4 files might include other files, and it would be pretty hard + # to write a scanner for it, so let's just cd to the dir of the m4 + # file and run from there. + # The src_suffix setup is like so: file.c.m4 -> file.c, + # file.cpp.m4 -> file.cpp etc. + env['M4'] = 'm4' + env['M4FLAGS'] = SCons.Util.CLVar('-E') + env['M4COM'] = 'cd ${SOURCE.rsrcdir} && $M4 $M4FLAGS < ${SOURCE.file} > ${TARGET.abspath}' + +def exists(env): + return env.Detect('m4') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/masm.py b/engine/SCons/Tool/masm.py new file mode 100644 index 0000000..81e3bf0 --- /dev/null +++ b/engine/SCons/Tool/masm.py @@ -0,0 +1,77 @@ +"""SCons.Tool.masm + +Tool-specific initialization for the Microsoft Assembler. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/masm.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Defaults +import SCons.Tool +import SCons.Util + +ASSuffixes = ['.s', '.asm', '.ASM'] +ASPPSuffixes = ['.spp', '.SPP', '.sx'] +if SCons.Util.case_sensitive_suffixes('.s', '.S'): + ASPPSuffixes.extend(['.S']) +else: + ASSuffixes.extend(['.S']) + +def generate(env): + """Add Builders and construction variables for masm to an Environment.""" + static_obj, shared_obj = SCons.Tool.createObjBuilders(env) + + for suffix in ASSuffixes: + static_obj.add_action(suffix, SCons.Defaults.ASAction) + shared_obj.add_action(suffix, SCons.Defaults.ASAction) + static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter) + shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter) + + for suffix in ASPPSuffixes: + static_obj.add_action(suffix, SCons.Defaults.ASPPAction) + shared_obj.add_action(suffix, SCons.Defaults.ASPPAction) + static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter) + shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter) + + env['AS'] = 'ml' + env['ASFLAGS'] = SCons.Util.CLVar('/nologo') + env['ASPPFLAGS'] = '$ASFLAGS' + env['ASCOM'] = '$AS $ASFLAGS /c /Fo$TARGET $SOURCES' + env['ASPPCOM'] = '$CC $ASPPFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c /Fo$TARGET $SOURCES' + env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1 + +def exists(env): + return env.Detect('ml') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/midl.py b/engine/SCons/Tool/midl.py new file mode 100644 index 0000000..4f6547c --- /dev/null +++ b/engine/SCons/Tool/midl.py @@ -0,0 +1,90 @@ +"""SCons.Tool.midl + +Tool-specific initialization for midl (Microsoft IDL compiler). + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/midl.py 4577 2009/12/27 19:43:56 scons" + +import string + +import SCons.Action +import SCons.Builder +import SCons.Defaults +import SCons.Scanner.IDL +import SCons.Util + +from MSCommon import msvc_exists + +def midl_emitter(target, source, env): + """Produces a list of outputs from the MIDL compiler""" + base, ext = SCons.Util.splitext(str(target[0])) + tlb = target[0] + incl = base + '.h' + interface = base + '_i.c' + t = [tlb, incl, interface] + + midlcom = env['MIDLCOM'] + + if string.find(midlcom, '/proxy') != -1: + proxy = base + '_p.c' + t.append(proxy) + if string.find(midlcom, '/dlldata') != -1: + dlldata = base + '_data.c' + t.append(dlldata) + + return (t,source) + +idl_scanner = SCons.Scanner.IDL.IDLScan() + +midl_action = SCons.Action.Action('$MIDLCOM', '$MIDLCOMSTR') + +midl_builder = SCons.Builder.Builder(action = midl_action, + src_suffix = '.idl', + suffix='.tlb', + emitter = midl_emitter, + source_scanner = idl_scanner) + +def generate(env): + """Add Builders and construction variables for midl to an Environment.""" + + env['MIDL'] = 'MIDL.EXE' + env['MIDLFLAGS'] = SCons.Util.CLVar('/nologo') + env['MIDLCOM'] = '$MIDL $MIDLFLAGS /tlb ${TARGETS[0]} /h ${TARGETS[1]} /iid ${TARGETS[2]} /proxy ${TARGETS[3]} /dlldata ${TARGETS[4]} $SOURCE 2> NUL' + env['BUILDERS']['TypeLibrary'] = midl_builder + +def exists(env): + return msvc_exists() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/mingw.py b/engine/SCons/Tool/mingw.py new file mode 100644 index 0000000..f341d42 --- /dev/null +++ b/engine/SCons/Tool/mingw.py @@ -0,0 +1,159 @@ +"""SCons.Tool.gcc + +Tool-specific initialization for MinGW (http://www.mingw.org/) + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/mingw.py 4577 2009/12/27 19:43:56 scons" + +import os +import os.path +import string + +import SCons.Action +import SCons.Builder +import SCons.Defaults +import SCons.Tool +import SCons.Util + +# This is what we search for to find mingw: +key_program = 'mingw32-gcc' + +def find(env): + # First search in the SCons path and then the OS path: + return env.WhereIs(key_program) or SCons.Util.WhereIs(key_program) + +def shlib_generator(target, source, env, for_signature): + cmd = SCons.Util.CLVar(['$SHLINK', '$SHLINKFLAGS']) + + dll = env.FindIxes(target, 'SHLIBPREFIX', 'SHLIBSUFFIX') + if dll: cmd.extend(['-o', dll]) + + cmd.extend(['$SOURCES', '$_LIBDIRFLAGS', '$_LIBFLAGS']) + + implib = env.FindIxes(target, 'LIBPREFIX', 'LIBSUFFIX') + if implib: cmd.append('-Wl,--out-implib,'+implib.get_string(for_signature)) + + def_target = env.FindIxes(target, 'WINDOWSDEFPREFIX', 'WINDOWSDEFSUFFIX') + insert_def = env.subst("$WINDOWS_INSERT_DEF") + if not insert_def in ['', '0', 0] and def_target: \ + cmd.append('-Wl,--output-def,'+def_target.get_string(for_signature)) + + return [cmd] + +def shlib_emitter(target, source, env): + dll = env.FindIxes(target, 'SHLIBPREFIX', 'SHLIBSUFFIX') + no_import_lib = env.get('no_import_lib', 0) + + if not dll: + raise SCons.Errors.UserError, "A shared library should have exactly one target with the suffix: %s" % env.subst("$SHLIBSUFFIX") + + if not no_import_lib and \ + not env.FindIxes(target, 'LIBPREFIX', 'LIBSUFFIX'): + + # Append an import library to the list of targets. + target.append(env.ReplaceIxes(dll, + 'SHLIBPREFIX', 'SHLIBSUFFIX', + 'LIBPREFIX', 'LIBSUFFIX')) + + # Append a def file target if there isn't already a def file target + # or a def file source. There is no option to disable def file + # target emitting, because I can't figure out why someone would ever + # want to turn it off. + def_source = env.FindIxes(source, 'WINDOWSDEFPREFIX', 'WINDOWSDEFSUFFIX') + def_target = env.FindIxes(target, 'WINDOWSDEFPREFIX', 'WINDOWSDEFSUFFIX') + if not def_source and not def_target: + target.append(env.ReplaceIxes(dll, + 'SHLIBPREFIX', 'SHLIBSUFFIX', + 'WINDOWSDEFPREFIX', 'WINDOWSDEFSUFFIX')) + + return (target, source) + + +shlib_action = SCons.Action.Action(shlib_generator, generator=1) + +res_action = SCons.Action.Action('$RCCOM', '$RCCOMSTR') + +res_builder = SCons.Builder.Builder(action=res_action, suffix='.o', + source_scanner=SCons.Tool.SourceFileScanner) +SCons.Tool.SourceFileScanner.add_scanner('.rc', SCons.Defaults.CScan) + +def generate(env): + mingw = find(env) + if mingw: + dir = os.path.dirname(mingw) + env.PrependENVPath('PATH', dir ) + + + # Most of mingw is the same as gcc and friends... + gnu_tools = ['gcc', 'g++', 'gnulink', 'ar', 'gas', 'm4'] + for tool in gnu_tools: + SCons.Tool.Tool(tool)(env) + + #... but a few things differ: + env['CC'] = 'gcc' + env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS') + env['CXX'] = 'g++' + env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS') + env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -shared') + env['SHLINKCOM'] = shlib_action + env['LDMODULECOM'] = shlib_action + env.Append(SHLIBEMITTER = [shlib_emitter]) + env['AS'] = 'as' + + env['WIN32DEFPREFIX'] = '' + env['WIN32DEFSUFFIX'] = '.def' + env['WINDOWSDEFPREFIX'] = '${WIN32DEFPREFIX}' + env['WINDOWSDEFSUFFIX'] = '${WIN32DEFSUFFIX}' + + env['SHOBJSUFFIX'] = '.o' + env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1 + + env['RC'] = 'windres' + env['RCFLAGS'] = SCons.Util.CLVar('') + env['RCINCFLAGS'] = '$( ${_concat(RCINCPREFIX, CPPPATH, RCINCSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)' + env['RCINCPREFIX'] = '--include-dir ' + env['RCINCSUFFIX'] = '' + env['RCCOM'] = '$RC $_CPPDEFFLAGS $RCINCFLAGS ${RCINCPREFIX} ${SOURCE.dir} $RCFLAGS -i $SOURCE -o $TARGET' + env['BUILDERS']['RES'] = res_builder + + # Some setting from the platform also have to be overridden: + env['OBJSUFFIX'] = '.o' + env['LIBPREFIX'] = 'lib' + env['LIBSUFFIX'] = '.a' + +def exists(env): + return find(env) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/mslib.py b/engine/SCons/Tool/mslib.py new file mode 100644 index 0000000..14b8f3c --- /dev/null +++ b/engine/SCons/Tool/mslib.py @@ -0,0 +1,64 @@ +"""SCons.Tool.mslib + +Tool-specific initialization for lib (MicroSoft library archiver). + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/mslib.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Defaults +import SCons.Tool +import SCons.Tool.msvs +import SCons.Tool.msvc +import SCons.Util + +from MSCommon import msvc_exists, msvc_setup_env_once + +def generate(env): + """Add Builders and construction variables for lib to an Environment.""" + SCons.Tool.createStaticLibBuilder(env) + + # Set-up ms tools paths + msvc_setup_env_once(env) + + env['AR'] = 'lib' + env['ARFLAGS'] = SCons.Util.CLVar('/nologo') + env['ARCOM'] = "${TEMPFILE('$AR $ARFLAGS /OUT:$TARGET $SOURCES')}" + env['LIBPREFIX'] = '' + env['LIBSUFFIX'] = '.lib' + +def exists(env): + return msvc_exists() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/mslink.py b/engine/SCons/Tool/mslink.py new file mode 100644 index 0000000..cda5ca7 --- /dev/null +++ b/engine/SCons/Tool/mslink.py @@ -0,0 +1,266 @@ +"""SCons.Tool.mslink + +Tool-specific initialization for the Microsoft linker. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/mslink.py 4577 2009/12/27 19:43:56 scons" + +import os.path + +import SCons.Action +import SCons.Defaults +import SCons.Errors +import SCons.Platform.win32 +import SCons.Tool +import SCons.Tool.msvc +import SCons.Tool.msvs +import SCons.Util + +from MSCommon import msvc_setup_env_once, msvc_exists + +def pdbGenerator(env, target, source, for_signature): + try: + return ['/PDB:%s' % target[0].attributes.pdb, '/DEBUG'] + except (AttributeError, IndexError): + return None + +def _dllTargets(target, source, env, for_signature, paramtp): + listCmd = [] + dll = env.FindIxes(target, '%sPREFIX' % paramtp, '%sSUFFIX' % paramtp) + if dll: listCmd.append("/out:%s"%dll.get_string(for_signature)) + + implib = env.FindIxes(target, 'LIBPREFIX', 'LIBSUFFIX') + if implib: listCmd.append("/implib:%s"%implib.get_string(for_signature)) + + return listCmd + +def _dllSources(target, source, env, for_signature, paramtp): + listCmd = [] + + deffile = env.FindIxes(source, "WINDOWSDEFPREFIX", "WINDOWSDEFSUFFIX") + for src in source: + # Check explicitly for a non-None deffile so that the __cmp__ + # method of the base SCons.Util.Proxy class used for some Node + # proxies doesn't try to use a non-existent __dict__ attribute. + if deffile and src == deffile: + # Treat this source as a .def file. + listCmd.append("/def:%s" % src.get_string(for_signature)) + else: + # Just treat it as a generic source file. + listCmd.append(src) + return listCmd + +def windowsShlinkTargets(target, source, env, for_signature): + return _dllTargets(target, source, env, for_signature, 'SHLIB') + +def windowsShlinkSources(target, source, env, for_signature): + return _dllSources(target, source, env, for_signature, 'SHLIB') + +def _windowsLdmodTargets(target, source, env, for_signature): + """Get targets for loadable modules.""" + return _dllTargets(target, source, env, for_signature, 'LDMODULE') + +def _windowsLdmodSources(target, source, env, for_signature): + """Get sources for loadable modules.""" + return _dllSources(target, source, env, for_signature, 'LDMODULE') + +def _dllEmitter(target, source, env, paramtp): + """Common implementation of dll emitter.""" + SCons.Tool.msvc.validate_vars(env) + + extratargets = [] + extrasources = [] + + dll = env.FindIxes(target, '%sPREFIX' % paramtp, '%sSUFFIX' % paramtp) + no_import_lib = env.get('no_import_lib', 0) + + if not dll: + raise SCons.Errors.UserError, 'A shared library should have exactly one target with the suffix: %s' % env.subst('$%sSUFFIX' % paramtp) + + insert_def = env.subst("$WINDOWS_INSERT_DEF") + if not insert_def in ['', '0', 0] and \ + not env.FindIxes(source, "WINDOWSDEFPREFIX", "WINDOWSDEFSUFFIX"): + + # append a def file to the list of sources + extrasources.append( + env.ReplaceIxes(dll, + '%sPREFIX' % paramtp, '%sSUFFIX' % paramtp, + "WINDOWSDEFPREFIX", "WINDOWSDEFSUFFIX")) + + version_num, suite = SCons.Tool.msvs.msvs_parse_version(env.get('MSVS_VERSION', '6.0')) + if version_num >= 8.0 and env.get('WINDOWS_INSERT_MANIFEST', 0): + # MSVC 8 automatically generates .manifest files that must be installed + extratargets.append( + env.ReplaceIxes(dll, + '%sPREFIX' % paramtp, '%sSUFFIX' % paramtp, + "WINDOWSSHLIBMANIFESTPREFIX", "WINDOWSSHLIBMANIFESTSUFFIX")) + + if env.has_key('PDB') and env['PDB']: + pdb = env.arg2nodes('$PDB', target=target, source=source)[0] + extratargets.append(pdb) + target[0].attributes.pdb = pdb + + if not no_import_lib and \ + not env.FindIxes(target, "LIBPREFIX", "LIBSUFFIX"): + # Append an import library to the list of targets. + extratargets.append( + env.ReplaceIxes(dll, + '%sPREFIX' % paramtp, '%sSUFFIX' % paramtp, + "LIBPREFIX", "LIBSUFFIX")) + # and .exp file is created if there are exports from a DLL + extratargets.append( + env.ReplaceIxes(dll, + '%sPREFIX' % paramtp, '%sSUFFIX' % paramtp, + "WINDOWSEXPPREFIX", "WINDOWSEXPSUFFIX")) + + return (target+extratargets, source+extrasources) + +def windowsLibEmitter(target, source, env): + return _dllEmitter(target, source, env, 'SHLIB') + +def ldmodEmitter(target, source, env): + """Emitter for loadable modules. + + Loadable modules are identical to shared libraries on Windows, but building + them is subject to different parameters (LDMODULE*). + """ + return _dllEmitter(target, source, env, 'LDMODULE') + +def prog_emitter(target, source, env): + SCons.Tool.msvc.validate_vars(env) + + extratargets = [] + + exe = env.FindIxes(target, "PROGPREFIX", "PROGSUFFIX") + if not exe: + raise SCons.Errors.UserError, "An executable should have exactly one target with the suffix: %s" % env.subst("$PROGSUFFIX") + + version_num, suite = SCons.Tool.msvs.msvs_parse_version(env.get('MSVS_VERSION', '6.0')) + if version_num >= 8.0 and env.get('WINDOWS_INSERT_MANIFEST', 0): + # MSVC 8 automatically generates .manifest files that have to be installed + extratargets.append( + env.ReplaceIxes(exe, + "PROGPREFIX", "PROGSUFFIX", + "WINDOWSPROGMANIFESTPREFIX", "WINDOWSPROGMANIFESTSUFFIX")) + + if env.has_key('PDB') and env['PDB']: + pdb = env.arg2nodes('$PDB', target=target, source=source)[0] + extratargets.append(pdb) + target[0].attributes.pdb = pdb + + return (target+extratargets,source) + +def RegServerFunc(target, source, env): + if env.has_key('register') and env['register']: + ret = regServerAction([target[0]], [source[0]], env) + if ret: + raise SCons.Errors.UserError, "Unable to register %s" % target[0] + else: + print "Registered %s sucessfully" % target[0] + return ret + return 0 + +regServerAction = SCons.Action.Action("$REGSVRCOM", "$REGSVRCOMSTR") +regServerCheck = SCons.Action.Action(RegServerFunc, None) +shlibLinkAction = SCons.Action.Action('${TEMPFILE("$SHLINK $SHLINKFLAGS $_SHLINK_TARGETS $_LIBDIRFLAGS $_LIBFLAGS $_PDB $_SHLINK_SOURCES")}') +compositeShLinkAction = shlibLinkAction + regServerCheck +ldmodLinkAction = SCons.Action.Action('${TEMPFILE("$LDMODULE $LDMODULEFLAGS $_LDMODULE_TARGETS $_LIBDIRFLAGS $_LIBFLAGS $_PDB $_LDMODULE_SOURCES")}') +compositeLdmodAction = ldmodLinkAction + regServerCheck + +def generate(env): + """Add Builders and construction variables for ar to an Environment.""" + SCons.Tool.createSharedLibBuilder(env) + SCons.Tool.createProgBuilder(env) + + env['SHLINK'] = '$LINK' + env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS /dll') + env['_SHLINK_TARGETS'] = windowsShlinkTargets + env['_SHLINK_SOURCES'] = windowsShlinkSources + env['SHLINKCOM'] = compositeShLinkAction + env.Append(SHLIBEMITTER = [windowsLibEmitter]) + env['LINK'] = 'link' + env['LINKFLAGS'] = SCons.Util.CLVar('/nologo') + env['_PDB'] = pdbGenerator + env['LINKCOM'] = '${TEMPFILE("$LINK $LINKFLAGS /OUT:$TARGET.windows $_LIBDIRFLAGS $_LIBFLAGS $_PDB $SOURCES.windows")}' + env.Append(PROGEMITTER = [prog_emitter]) + env['LIBDIRPREFIX']='/LIBPATH:' + env['LIBDIRSUFFIX']='' + env['LIBLINKPREFIX']='' + env['LIBLINKSUFFIX']='$LIBSUFFIX' + + env['WIN32DEFPREFIX'] = '' + env['WIN32DEFSUFFIX'] = '.def' + env['WIN32_INSERT_DEF'] = 0 + env['WINDOWSDEFPREFIX'] = '${WIN32DEFPREFIX}' + env['WINDOWSDEFSUFFIX'] = '${WIN32DEFSUFFIX}' + env['WINDOWS_INSERT_DEF'] = '${WIN32_INSERT_DEF}' + + env['WIN32EXPPREFIX'] = '' + env['WIN32EXPSUFFIX'] = '.exp' + env['WINDOWSEXPPREFIX'] = '${WIN32EXPPREFIX}' + env['WINDOWSEXPSUFFIX'] = '${WIN32EXPSUFFIX}' + + env['WINDOWSSHLIBMANIFESTPREFIX'] = '' + env['WINDOWSSHLIBMANIFESTSUFFIX'] = '${SHLIBSUFFIX}.manifest' + env['WINDOWSPROGMANIFESTPREFIX'] = '' + env['WINDOWSPROGMANIFESTSUFFIX'] = '${PROGSUFFIX}.manifest' + + env['REGSVRACTION'] = regServerCheck + env['REGSVR'] = os.path.join(SCons.Platform.win32.get_system_root(),'System32','regsvr32') + env['REGSVRFLAGS'] = '/s ' + env['REGSVRCOM'] = '$REGSVR $REGSVRFLAGS ${TARGET.windows}' + + # Set-up ms tools paths + msvc_setup_env_once(env) + + + # Loadable modules are on Windows the same as shared libraries, but they + # are subject to different build parameters (LDMODULE* variables). + # Therefore LDMODULE* variables correspond as much as possible to + # SHLINK*/SHLIB* ones. + SCons.Tool.createLoadableModuleBuilder(env) + env['LDMODULE'] = '$SHLINK' + env['LDMODULEPREFIX'] = '$SHLIBPREFIX' + env['LDMODULESUFFIX'] = '$SHLIBSUFFIX' + env['LDMODULEFLAGS'] = '$SHLINKFLAGS' + env['_LDMODULE_TARGETS'] = _windowsLdmodTargets + env['_LDMODULE_SOURCES'] = _windowsLdmodSources + env['LDMODULEEMITTER'] = [ldmodEmitter] + env['LDMODULECOM'] = compositeLdmodAction + +def exists(env): + return msvc_exists() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/mssdk.py b/engine/SCons/Tool/mssdk.py new file mode 100644 index 0000000..7165877 --- /dev/null +++ b/engine/SCons/Tool/mssdk.py @@ -0,0 +1,50 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/mssdk.py 4577 2009/12/27 19:43:56 scons" + +"""engine.SCons.Tool.mssdk + +Tool-specific initialization for Microsoft SDKs, both Platform +SDKs and Windows SDKs. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. +""" + +from MSCommon import mssdk_exists, \ + mssdk_setup_env + +def generate(env): + """Add construction variables for an MS SDK to an Environment.""" + mssdk_setup_env(env) + +def exists(env): + return mssdk_exists() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/msvc.py b/engine/SCons/Tool/msvc.py new file mode 100644 index 0000000..424d099 --- /dev/null +++ b/engine/SCons/Tool/msvc.py @@ -0,0 +1,257 @@ +"""engine.SCons.Tool.msvc + +Tool-specific initialization for Microsoft Visual C/C++. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/msvc.py 4577 2009/12/27 19:43:56 scons" + +import os.path +import re +import string +import sys + +import SCons.Action +import SCons.Builder +import SCons.Errors +import SCons.Platform.win32 +import SCons.Tool +import SCons.Tool.msvs +import SCons.Util +import SCons.Warnings +import SCons.Scanner.RC + +from MSCommon import msvc_exists, msvc_setup_env_once + +CSuffixes = ['.c', '.C'] +CXXSuffixes = ['.cc', '.cpp', '.cxx', '.c++', '.C++'] + +def validate_vars(env): + """Validate the PCH and PCHSTOP construction variables.""" + if env.has_key('PCH') and env['PCH']: + if not env.has_key('PCHSTOP'): + raise SCons.Errors.UserError, "The PCHSTOP construction must be defined if PCH is defined." + if not SCons.Util.is_String(env['PCHSTOP']): + raise SCons.Errors.UserError, "The PCHSTOP construction variable must be a string: %r"%env['PCHSTOP'] + +def pch_emitter(target, source, env): + """Adds the object file target.""" + + validate_vars(env) + + pch = None + obj = None + + for t in target: + if SCons.Util.splitext(str(t))[1] == '.pch': + pch = t + if SCons.Util.splitext(str(t))[1] == '.obj': + obj = t + + if not obj: + obj = SCons.Util.splitext(str(pch))[0]+'.obj' + + target = [pch, obj] # pch must be first, and obj second for the PCHCOM to work + + return (target, source) + +def object_emitter(target, source, env, parent_emitter): + """Sets up the PCH dependencies for an object file.""" + + validate_vars(env) + + parent_emitter(target, source, env) + + if env.has_key('PCH') and env['PCH']: + env.Depends(target, env['PCH']) + + return (target, source) + +def static_object_emitter(target, source, env): + return object_emitter(target, source, env, + SCons.Defaults.StaticObjectEmitter) + +def shared_object_emitter(target, source, env): + return object_emitter(target, source, env, + SCons.Defaults.SharedObjectEmitter) + +pch_action = SCons.Action.Action('$PCHCOM', '$PCHCOMSTR') +pch_builder = SCons.Builder.Builder(action=pch_action, suffix='.pch', + emitter=pch_emitter, + source_scanner=SCons.Tool.SourceFileScanner) + + +# Logic to build .rc files into .res files (resource files) +res_scanner = SCons.Scanner.RC.RCScan() +res_action = SCons.Action.Action('$RCCOM', '$RCCOMSTR') +res_builder = SCons.Builder.Builder(action=res_action, + src_suffix='.rc', + suffix='.res', + src_builder=[], + source_scanner=res_scanner) + +def msvc_batch_key(action, env, target, source): + """ + Returns a key to identify unique batches of sources for compilation. + + If batching is enabled (via the $MSVC_BATCH setting), then all + target+source pairs that use the same action, defined by the same + environment, and have the same target and source directories, will + be batched. + + Returning None specifies that the specified target+source should not + be batched with other compilations. + """ + b = env.subst('$MSVC_BATCH') + if b in (None, '', '0'): + # We're not using batching; return no key. + return None + t = target[0] + s = source[0] + if os.path.splitext(t.name)[0] != os.path.splitext(s.name)[0]: + # The base names are different, so this *must* be compiled + # separately; return no key. + return None + return (id(action), id(env), t.dir, s.dir) + +def msvc_output_flag(target, source, env, for_signature): + """ + Returns the correct /Fo flag for batching. + + If batching is disabled or there's only one source file, then we + return an /Fo string that specifies the target explicitly. Otherwise, + we return an /Fo string that just specifies the first target's + directory (where the Visual C/C++ compiler will put the .obj files). + """ + b = env.subst('$MSVC_BATCH') + if b in (None, '', '0') or len(source) == 1: + return '/Fo$TARGET' + else: + # The Visual C/C++ compiler requires a \ at the end of the /Fo + # option to indicate an output directory. We use os.sep here so + # that the test(s) for this can be run on non-Windows systems + # without having a hard-coded backslash mess up command-line + # argument parsing. + return '/Fo${TARGET.dir}' + os.sep + +CAction = SCons.Action.Action("$CCCOM", "$CCCOMSTR", + batch_key=msvc_batch_key, + targets='$CHANGED_TARGETS') +ShCAction = SCons.Action.Action("$SHCCCOM", "$SHCCCOMSTR", + batch_key=msvc_batch_key, + targets='$CHANGED_TARGETS') +CXXAction = SCons.Action.Action("$CXXCOM", "$CXXCOMSTR", + batch_key=msvc_batch_key, + targets='$CHANGED_TARGETS') +ShCXXAction = SCons.Action.Action("$SHCXXCOM", "$SHCXXCOMSTR", + batch_key=msvc_batch_key, + targets='$CHANGED_TARGETS') + +def generate(env): + """Add Builders and construction variables for MSVC++ to an Environment.""" + static_obj, shared_obj = SCons.Tool.createObjBuilders(env) + + # TODO(batch): shouldn't reach in to cmdgen this way; necessary + # for now to bypass the checks in Builder.DictCmdGenerator.__call__() + # and allow .cc and .cpp to be compiled in the same command line. + static_obj.cmdgen.source_ext_match = False + shared_obj.cmdgen.source_ext_match = False + + for suffix in CSuffixes: + static_obj.add_action(suffix, CAction) + shared_obj.add_action(suffix, ShCAction) + static_obj.add_emitter(suffix, static_object_emitter) + shared_obj.add_emitter(suffix, shared_object_emitter) + + for suffix in CXXSuffixes: + static_obj.add_action(suffix, CXXAction) + shared_obj.add_action(suffix, ShCXXAction) + static_obj.add_emitter(suffix, static_object_emitter) + shared_obj.add_emitter(suffix, shared_object_emitter) + + env['CCPDBFLAGS'] = SCons.Util.CLVar(['${(PDB and "/Z7") or ""}']) + env['CCPCHFLAGS'] = SCons.Util.CLVar(['${(PCH and "/Yu%s /Fp%s"%(PCHSTOP or "",File(PCH))) or ""}']) + env['_MSVC_OUTPUT_FLAG'] = msvc_output_flag + env['_CCCOMCOM'] = '$CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS $CCPCHFLAGS $CCPDBFLAGS' + env['CC'] = 'cl' + env['CCFLAGS'] = SCons.Util.CLVar('/nologo') + env['CFLAGS'] = SCons.Util.CLVar('') + env['CCCOM'] = '$CC $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $CFLAGS $CCFLAGS $_CCCOMCOM' + env['SHCC'] = '$CC' + env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS') + env['SHCFLAGS'] = SCons.Util.CLVar('$CFLAGS') + env['SHCCCOM'] = '$SHCC $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $SHCFLAGS $SHCCFLAGS $_CCCOMCOM' + env['CXX'] = '$CC' + env['CXXFLAGS'] = SCons.Util.CLVar('$( /TP $)') + env['CXXCOM'] = '$CXX $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $CXXFLAGS $CCFLAGS $_CCCOMCOM' + env['SHCXX'] = '$CXX' + env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS') + env['SHCXXCOM'] = '$SHCXX $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $SHCXXFLAGS $SHCCFLAGS $_CCCOMCOM' + env['CPPDEFPREFIX'] = '/D' + env['CPPDEFSUFFIX'] = '' + env['INCPREFIX'] = '/I' + env['INCSUFFIX'] = '' +# env.Append(OBJEMITTER = [static_object_emitter]) +# env.Append(SHOBJEMITTER = [shared_object_emitter]) + env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1 + + env['RC'] = 'rc' + env['RCFLAGS'] = SCons.Util.CLVar('') + env['RCSUFFIXES']=['.rc','.rc2'] + env['RCCOM'] = '$RC $_CPPDEFFLAGS $_CPPINCFLAGS $RCFLAGS /fo$TARGET $SOURCES' + env['BUILDERS']['RES'] = res_builder + env['OBJPREFIX'] = '' + env['OBJSUFFIX'] = '.obj' + env['SHOBJPREFIX'] = '$OBJPREFIX' + env['SHOBJSUFFIX'] = '$OBJSUFFIX' + + # Set-up ms tools paths + msvc_setup_env_once(env) + + env['CFILESUFFIX'] = '.c' + env['CXXFILESUFFIX'] = '.cc' + + env['PCHPDBFLAGS'] = SCons.Util.CLVar(['${(PDB and "/Yd") or ""}']) + env['PCHCOM'] = '$CXX /Fo${TARGETS[1]} $CXXFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c $SOURCES /Yc$PCHSTOP /Fp${TARGETS[0]} $CCPDBFLAGS $PCHPDBFLAGS' + env['BUILDERS']['PCH'] = pch_builder + + if not env.has_key('ENV'): + env['ENV'] = {} + if not env['ENV'].has_key('SystemRoot'): # required for dlls in the winsxs folders + env['ENV']['SystemRoot'] = SCons.Platform.win32.get_system_root() + +def exists(env): + return msvc_exists() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/msvs.py b/engine/SCons/Tool/msvs.py new file mode 100644 index 0000000..c042d68 --- /dev/null +++ b/engine/SCons/Tool/msvs.py @@ -0,0 +1,1439 @@ +"""SCons.Tool.msvs + +Tool-specific initialization for Microsoft Visual Studio project files. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/msvs.py 4577 2009/12/27 19:43:56 scons" + +import base64 +import hashlib +import ntpath +import os +import pickle +import re +import string +import sys + +import SCons.Builder +import SCons.Node.FS +import SCons.Platform.win32 +import SCons.Script.SConscript +import SCons.Util +import SCons.Warnings + +from MSCommon import msvc_exists, msvc_setup_env_once +from SCons.Defaults import processDefines + +############################################################################## +# Below here are the classes and functions for generation of +# DSP/DSW/SLN/VCPROJ files. +############################################################################## + +def _hexdigest(s): + """Return a string as a string of hex characters. + """ + # NOTE: This routine is a method in the Python 2.0 interface + # of the native md5 module, but we want SCons to operate all + # the way back to at least Python 1.5.2, which doesn't have it. + h = string.hexdigits + r = '' + for c in s: + i = ord(c) + r = r + h[(i >> 4) & 0xF] + h[i & 0xF] + return r + +def xmlify(s): + s = string.replace(s, "&", "&") # do this first + s = string.replace(s, "'", "'") + s = string.replace(s, '"', """) + return s + +external_makefile_guid = '{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}' + +def _generateGUID(slnfile, name): + """This generates a dummy GUID for the sln file to use. It is + based on the MD5 signatures of the sln filename plus the name of + the project. It basically just needs to be unique, and not + change with each invocation.""" + m = hashlib.md5() + # Normalize the slnfile path to a Windows path (\ separators) so + # the generated file has a consistent GUID even if we generate + # it on a non-Windows platform. + m.update(ntpath.normpath(str(slnfile)) + str(name)) + # TODO(1.5) + #solution = m.hexdigest().upper() + solution = string.upper(_hexdigest(m.digest())) + # convert most of the signature to GUID form (discard the rest) + solution = "{" + solution[:8] + "-" + solution[8:12] + "-" + solution[12:16] + "-" + solution[16:20] + "-" + solution[20:32] + "}" + return solution + +version_re = re.compile(r'(\d+\.\d+)(.*)') + +def msvs_parse_version(s): + """ + Split a Visual Studio version, which may in fact be something like + '7.0Exp', into is version number (returned as a float) and trailing + "suite" portion. + """ + num, suite = version_re.match(s).groups() + return float(num), suite + +# This is how we re-invoke SCons from inside MSVS Project files. +# The problem is that we might have been invoked as either scons.bat +# or scons.py. If we were invoked directly as scons.py, then we could +# use sys.argv[0] to find the SCons "executable," but that doesn't work +# if we were invoked as scons.bat, which uses "python -c" to execute +# things and ends up with "-c" as sys.argv[0]. Consequently, we have +# the MSVS Project file invoke SCons the same way that scons.bat does, +# which works regardless of how we were invoked. +def getExecScriptMain(env, xml=None): + scons_home = env.get('SCONS_HOME') + if not scons_home and os.environ.has_key('SCONS_LIB_DIR'): + scons_home = os.environ['SCONS_LIB_DIR'] + if scons_home: + exec_script_main = "from os.path import join; import sys; sys.path = [ r'%s' ] + sys.path; import SCons.Script; SCons.Script.main()" % scons_home + else: + version = SCons.__version__ + exec_script_main = "from os.path import join; import sys; sys.path = [ join(sys.prefix, 'Lib', 'site-packages', 'scons-%(version)s'), join(sys.prefix, 'scons-%(version)s'), join(sys.prefix, 'Lib', 'site-packages', 'scons'), join(sys.prefix, 'scons') ] + sys.path; import SCons.Script; SCons.Script.main()" % locals() + if xml: + exec_script_main = xmlify(exec_script_main) + return exec_script_main + +# The string for the Python executable we tell the Project file to use +# is either sys.executable or, if an external PYTHON_ROOT environment +# variable exists, $(PYTHON)ROOT\\python.exe (generalized a little to +# pluck the actual executable name from sys.executable). +try: + python_root = os.environ['PYTHON_ROOT'] +except KeyError: + python_executable = sys.executable +else: + python_executable = os.path.join('$$(PYTHON_ROOT)', + os.path.split(sys.executable)[1]) + +class Config: + pass + +def splitFully(path): + dir, base = os.path.split(path) + if dir and dir != '' and dir != path: + return splitFully(dir)+[base] + if base == '': + return [] + return [base] + +def makeHierarchy(sources): + '''Break a list of files into a hierarchy; for each value, if it is a string, + then it is a file. If it is a dictionary, it is a folder. The string is + the original path of the file.''' + + hierarchy = {} + for file in sources: + path = splitFully(file) + if len(path): + dict = hierarchy + for part in path[:-1]: + if not dict.has_key(part): + dict[part] = {} + dict = dict[part] + dict[path[-1]] = file + #else: + # print 'Warning: failed to decompose path for '+str(file) + return hierarchy + +class _DSPGenerator: + """ Base class for DSP generators """ + + srcargs = [ + 'srcs', + 'incs', + 'localincs', + 'resources', + 'misc'] + + def __init__(self, dspfile, source, env): + self.dspfile = str(dspfile) + try: + get_abspath = dspfile.get_abspath + except AttributeError: + self.dspabs = os.path.abspath(dspfile) + else: + self.dspabs = get_abspath() + + if not env.has_key('variant'): + raise SCons.Errors.InternalError, \ + "You must specify a 'variant' argument (i.e. 'Debug' or " +\ + "'Release') to create an MSVSProject." + elif SCons.Util.is_String(env['variant']): + variants = [env['variant']] + elif SCons.Util.is_List(env['variant']): + variants = env['variant'] + + if not env.has_key('buildtarget') or env['buildtarget'] == None: + buildtarget = [''] + elif SCons.Util.is_String(env['buildtarget']): + buildtarget = [env['buildtarget']] + elif SCons.Util.is_List(env['buildtarget']): + if len(env['buildtarget']) != len(variants): + raise SCons.Errors.InternalError, \ + "Sizes of 'buildtarget' and 'variant' lists must be the same." + buildtarget = [] + for bt in env['buildtarget']: + if SCons.Util.is_String(bt): + buildtarget.append(bt) + else: + buildtarget.append(bt.get_abspath()) + else: + buildtarget = [env['buildtarget'].get_abspath()] + if len(buildtarget) == 1: + bt = buildtarget[0] + buildtarget = [] + for _ in variants: + buildtarget.append(bt) + + if not env.has_key('outdir') or env['outdir'] == None: + outdir = [''] + elif SCons.Util.is_String(env['outdir']): + outdir = [env['outdir']] + elif SCons.Util.is_List(env['outdir']): + if len(env['outdir']) != len(variants): + raise SCons.Errors.InternalError, \ + "Sizes of 'outdir' and 'variant' lists must be the same." + outdir = [] + for s in env['outdir']: + if SCons.Util.is_String(s): + outdir.append(s) + else: + outdir.append(s.get_abspath()) + else: + outdir = [env['outdir'].get_abspath()] + if len(outdir) == 1: + s = outdir[0] + outdir = [] + for v in variants: + outdir.append(s) + + if not env.has_key('runfile') or env['runfile'] == None: + runfile = buildtarget[-1:] + elif SCons.Util.is_String(env['runfile']): + runfile = [env['runfile']] + elif SCons.Util.is_List(env['runfile']): + if len(env['runfile']) != len(variants): + raise SCons.Errors.InternalError, \ + "Sizes of 'runfile' and 'variant' lists must be the same." + runfile = [] + for s in env['runfile']: + if SCons.Util.is_String(s): + runfile.append(s) + else: + runfile.append(s.get_abspath()) + else: + runfile = [env['runfile'].get_abspath()] + if len(runfile) == 1: + s = runfile[0] + runfile = [] + for v in variants: + runfile.append(s) + + self.sconscript = env['MSVSSCONSCRIPT'] + + cmdargs = env.get('cmdargs', '') + + self.env = env + + if self.env.has_key('name'): + self.name = self.env['name'] + else: + self.name = os.path.basename(SCons.Util.splitext(self.dspfile)[0]) + self.name = self.env.subst(self.name) + + sourcenames = [ + 'Source Files', + 'Header Files', + 'Local Headers', + 'Resource Files', + 'Other Files'] + + self.sources = {} + for n in sourcenames: + self.sources[n] = [] + + self.configs = {} + + self.nokeep = 0 + if env.has_key('nokeep') and env['variant'] != 0: + self.nokeep = 1 + + if self.nokeep == 0 and os.path.exists(self.dspabs): + self.Parse() + + for t in zip(sourcenames,self.srcargs): + if self.env.has_key(t[1]): + if SCons.Util.is_List(self.env[t[1]]): + for i in self.env[t[1]]: + if not i in self.sources[t[0]]: + self.sources[t[0]].append(i) + else: + if not self.env[t[1]] in self.sources[t[0]]: + self.sources[t[0]].append(self.env[t[1]]) + + for n in sourcenames: + # TODO(1.5): + #self.sources[n].sort(lambda a, b: cmp(a.lower(), b.lower())) + self.sources[n].sort(lambda a, b: cmp(string.lower(a), string.lower(b))) + + def AddConfig(self, variant, buildtarget, outdir, runfile, cmdargs, dspfile=dspfile): + config = Config() + config.buildtarget = buildtarget + config.outdir = outdir + config.cmdargs = cmdargs + config.runfile = runfile + + match = re.match('(.*)\|(.*)', variant) + if match: + config.variant = match.group(1) + config.platform = match.group(2) + else: + config.variant = variant + config.platform = 'Win32' + + self.configs[variant] = config + print "Adding '" + self.name + ' - ' + config.variant + '|' + config.platform + "' to '" + str(dspfile) + "'" + + for i in range(len(variants)): + AddConfig(self, variants[i], buildtarget[i], outdir[i], runfile[i], cmdargs) + + self.platforms = [] + for key in self.configs.keys(): + platform = self.configs[key].platform + if not platform in self.platforms: + self.platforms.append(platform) + + def Build(self): + pass + +V6DSPHeader = """\ +# Microsoft Developer Studio Project File - Name="%(name)s" - Package Owner=<4> +# Microsoft Developer Studio Generated Build File, Format Version 6.00 +# ** DO NOT EDIT ** + +# TARGTYPE "Win32 (x86) External Target" 0x0106 + +CFG=%(name)s - Win32 %(confkey)s +!MESSAGE This is not a valid makefile. To build this project using NMAKE, +!MESSAGE use the Export Makefile command and run +!MESSAGE +!MESSAGE NMAKE /f "%(name)s.mak". +!MESSAGE +!MESSAGE You can specify a configuration when running NMAKE +!MESSAGE by defining the macro CFG on the command line. For example: +!MESSAGE +!MESSAGE NMAKE /f "%(name)s.mak" CFG="%(name)s - Win32 %(confkey)s" +!MESSAGE +!MESSAGE Possible choices for configuration are: +!MESSAGE +""" + +class _GenerateV6DSP(_DSPGenerator): + """Generates a Project file for MSVS 6.0""" + + def PrintHeader(self): + # pick a default config + confkeys = self.configs.keys() + confkeys.sort() + + name = self.name + confkey = confkeys[0] + + self.file.write(V6DSPHeader % locals()) + + for kind in confkeys: + self.file.write('!MESSAGE "%s - Win32 %s" (based on "Win32 (x86) External Target")\n' % (name, kind)) + + self.file.write('!MESSAGE \n\n') + + def PrintProject(self): + name = self.name + self.file.write('# Begin Project\n' + '# PROP AllowPerConfigDependencies 0\n' + '# PROP Scc_ProjName ""\n' + '# PROP Scc_LocalPath ""\n\n') + + first = 1 + confkeys = self.configs.keys() + confkeys.sort() + for kind in confkeys: + outdir = self.configs[kind].outdir + buildtarget = self.configs[kind].buildtarget + if first == 1: + self.file.write('!IF "$(CFG)" == "%s - Win32 %s"\n\n' % (name, kind)) + first = 0 + else: + self.file.write('\n!ELSEIF "$(CFG)" == "%s - Win32 %s"\n\n' % (name, kind)) + + env_has_buildtarget = self.env.has_key('MSVSBUILDTARGET') + if not env_has_buildtarget: + self.env['MSVSBUILDTARGET'] = buildtarget + + # have to write this twice, once with the BASE settings, and once without + for base in ("BASE ",""): + self.file.write('# PROP %sUse_MFC 0\n' + '# PROP %sUse_Debug_Libraries ' % (base, base)) + # TODO(1.5): + #if kind.lower().find('debug') < 0: + if string.find(string.lower(kind), 'debug') < 0: + self.file.write('0\n') + else: + self.file.write('1\n') + self.file.write('# PROP %sOutput_Dir "%s"\n' + '# PROP %sIntermediate_Dir "%s"\n' % (base,outdir,base,outdir)) + cmd = 'echo Starting SCons && ' + self.env.subst('$MSVSBUILDCOM', 1) + self.file.write('# PROP %sCmd_Line "%s"\n' + '# PROP %sRebuild_Opt "-c && %s"\n' + '# PROP %sTarget_File "%s"\n' + '# PROP %sBsc_Name ""\n' + '# PROP %sTarget_Dir ""\n'\ + %(base,cmd,base,cmd,base,buildtarget,base,base)) + + if not env_has_buildtarget: + del self.env['MSVSBUILDTARGET'] + + self.file.write('\n!ENDIF\n\n' + '# Begin Target\n\n') + for kind in confkeys: + self.file.write('# Name "%s - Win32 %s"\n' % (name,kind)) + self.file.write('\n') + first = 0 + for kind in confkeys: + if first == 0: + self.file.write('!IF "$(CFG)" == "%s - Win32 %s"\n\n' % (name,kind)) + first = 1 + else: + self.file.write('!ELSEIF "$(CFG)" == "%s - Win32 %s"\n\n' % (name,kind)) + self.file.write('!ENDIF \n\n') + self.PrintSourceFiles() + self.file.write('# End Target\n' + '# End Project\n') + + if self.nokeep == 0: + # now we pickle some data and add it to the file -- MSDEV will ignore it. + pdata = pickle.dumps(self.configs,1) + pdata = base64.encodestring(pdata) + self.file.write(pdata + '\n') + pdata = pickle.dumps(self.sources,1) + pdata = base64.encodestring(pdata) + self.file.write(pdata + '\n') + + def PrintSourceFiles(self): + categories = {'Source Files': 'cpp|c|cxx|l|y|def|odl|idl|hpj|bat', + 'Header Files': 'h|hpp|hxx|hm|inl', + 'Local Headers': 'h|hpp|hxx|hm|inl', + 'Resource Files': 'r|rc|ico|cur|bmp|dlg|rc2|rct|bin|cnt|rtf|gif|jpg|jpeg|jpe', + 'Other Files': ''} + + cats = categories.keys() + # TODO(1.5): + #cats.sort(lambda a, b: cmp(a.lower(), b.lower())) + cats.sort(lambda a, b: cmp(string.lower(a), string.lower(b))) + for kind in cats: + if not self.sources[kind]: + continue # skip empty groups + + self.file.write('# Begin Group "' + kind + '"\n\n') + # TODO(1.5) + #typelist = categories[kind].replace('|', ';') + typelist = string.replace(categories[kind], '|', ';') + self.file.write('# PROP Default_Filter "' + typelist + '"\n') + + for file in self.sources[kind]: + file = os.path.normpath(file) + self.file.write('# Begin Source File\n\n' + 'SOURCE="' + file + '"\n' + '# End Source File\n') + self.file.write('# End Group\n') + + # add the SConscript file outside of the groups + self.file.write('# Begin Source File\n\n' + 'SOURCE="' + str(self.sconscript) + '"\n' + '# End Source File\n') + + def Parse(self): + try: + dspfile = open(self.dspabs,'r') + except IOError: + return # doesn't exist yet, so can't add anything to configs. + + line = dspfile.readline() + while line: + # TODO(1.5): + #if line.find("# End Project") > -1: + if string.find(line, "# End Project") > -1: + break + line = dspfile.readline() + + line = dspfile.readline() + datas = line + while line and line != '\n': + line = dspfile.readline() + datas = datas + line + + # OK, we've found our little pickled cache of data. + try: + datas = base64.decodestring(datas) + data = pickle.loads(datas) + except KeyboardInterrupt: + raise + except: + return # unable to unpickle any data for some reason + + self.configs.update(data) + + data = None + line = dspfile.readline() + datas = line + while line and line != '\n': + line = dspfile.readline() + datas = datas + line + + # OK, we've found our little pickled cache of data. + # it has a "# " in front of it, so we strip that. + try: + datas = base64.decodestring(datas) + data = pickle.loads(datas) + except KeyboardInterrupt: + raise + except: + return # unable to unpickle any data for some reason + + self.sources.update(data) + + def Build(self): + try: + self.file = open(self.dspabs,'w') + except IOError, detail: + raise SCons.Errors.InternalError, 'Unable to open "' + self.dspabs + '" for writing:' + str(detail) + else: + self.PrintHeader() + self.PrintProject() + self.file.close() + +V7DSPHeader = """\ +<?xml version="1.0" encoding = "%(encoding)s"?> +<VisualStudioProject +\tProjectType="Visual C++" +\tVersion="%(versionstr)s" +\tName="%(name)s" +%(scc_attrs)s +\tKeyword="MakeFileProj"> +""" + +V7DSPConfiguration = """\ +\t\t<Configuration +\t\t\tName="%(variant)s|%(platform)s" +\t\t\tOutputDirectory="%(outdir)s" +\t\t\tIntermediateDirectory="%(outdir)s" +\t\t\tConfigurationType="0" +\t\t\tUseOfMFC="0" +\t\t\tATLMinimizesCRunTimeLibraryUsage="FALSE"> +\t\t\t<Tool +\t\t\t\tName="VCNMakeTool" +\t\t\t\tBuildCommandLine="%(buildcmd)s" +\t\t\t\tCleanCommandLine="%(cleancmd)s" +\t\t\t\tRebuildCommandLine="%(rebuildcmd)s" +\t\t\t\tOutput="%(runfile)s"/> +\t\t</Configuration> +""" + +V8DSPHeader = """\ +<?xml version="1.0" encoding="%(encoding)s"?> +<VisualStudioProject +\tProjectType="Visual C++" +\tVersion="%(versionstr)s" +\tName="%(name)s" +%(scc_attrs)s +\tRootNamespace="%(name)s" +\tKeyword="MakeFileProj"> +""" + +V8DSPConfiguration = """\ +\t\t<Configuration +\t\t\tName="%(variant)s|%(platform)s" +\t\t\tConfigurationType="0" +\t\t\tUseOfMFC="0" +\t\t\tATLMinimizesCRunTimeLibraryUsage="false" +\t\t\t> +\t\t\t<Tool +\t\t\t\tName="VCNMakeTool" +\t\t\t\tBuildCommandLine="%(buildcmd)s" +\t\t\t\tReBuildCommandLine="%(rebuildcmd)s" +\t\t\t\tCleanCommandLine="%(cleancmd)s" +\t\t\t\tOutput="%(runfile)s" +\t\t\t\tPreprocessorDefinitions="%(preprocdefs)s" +\t\t\t\tIncludeSearchPath="%(includepath)s" +\t\t\t\tForcedIncludes="" +\t\t\t\tAssemblySearchPath="" +\t\t\t\tForcedUsingAssemblies="" +\t\t\t\tCompileAsManaged="" +\t\t\t/> +\t\t</Configuration> +""" +class _GenerateV7DSP(_DSPGenerator): + """Generates a Project file for MSVS .NET""" + + def __init__(self, dspfile, source, env): + _DSPGenerator.__init__(self, dspfile, source, env) + self.version = env['MSVS_VERSION'] + self.version_num, self.suite = msvs_parse_version(self.version) + if self.version_num >= 8.0: + self.versionstr = '8.00' + self.dspheader = V8DSPHeader + self.dspconfiguration = V8DSPConfiguration + else: + if self.version_num >= 7.1: + self.versionstr = '7.10' + else: + self.versionstr = '7.00' + self.dspheader = V7DSPHeader + self.dspconfiguration = V7DSPConfiguration + self.file = None + + def PrintHeader(self): + env = self.env + versionstr = self.versionstr + name = self.name + encoding = self.env.subst('$MSVSENCODING') + scc_provider = env.get('MSVS_SCC_PROVIDER', '') + scc_project_name = env.get('MSVS_SCC_PROJECT_NAME', '') + scc_aux_path = env.get('MSVS_SCC_AUX_PATH', '') + scc_local_path = env.get('MSVS_SCC_LOCAL_PATH', '') + project_guid = env.get('MSVS_PROJECT_GUID', '') + if self.version_num >= 8.0 and not project_guid: + project_guid = _generateGUID(self.dspfile, '') + if scc_provider != '': + scc_attrs = ('\tProjectGUID="%s"\n' + '\tSccProjectName="%s"\n' + '\tSccAuxPath="%s"\n' + '\tSccLocalPath="%s"\n' + '\tSccProvider="%s"' % (project_guid, scc_project_name, scc_aux_path, scc_local_path, scc_provider)) + else: + scc_attrs = ('\tProjectGUID="%s"\n' + '\tSccProjectName="%s"\n' + '\tSccLocalPath="%s"' % (project_guid, scc_project_name, scc_local_path)) + + self.file.write(self.dspheader % locals()) + + self.file.write('\t<Platforms>\n') + for platform in self.platforms: + self.file.write( + '\t\t<Platform\n' + '\t\t\tName="%s"/>\n' % platform) + self.file.write('\t</Platforms>\n') + + if self.version_num >= 8.0: + self.file.write('\t<ToolFiles>\n' + '\t</ToolFiles>\n') + + def PrintProject(self): + self.file.write('\t<Configurations>\n') + + confkeys = self.configs.keys() + confkeys.sort() + for kind in confkeys: + variant = self.configs[kind].variant + platform = self.configs[kind].platform + outdir = self.configs[kind].outdir + buildtarget = self.configs[kind].buildtarget + runfile = self.configs[kind].runfile + cmdargs = self.configs[kind].cmdargs + + env_has_buildtarget = self.env.has_key('MSVSBUILDTARGET') + if not env_has_buildtarget: + self.env['MSVSBUILDTARGET'] = buildtarget + + starting = 'echo Starting SCons && ' + if cmdargs: + cmdargs = ' ' + cmdargs + else: + cmdargs = '' + buildcmd = xmlify(starting + self.env.subst('$MSVSBUILDCOM', 1) + cmdargs) + rebuildcmd = xmlify(starting + self.env.subst('$MSVSREBUILDCOM', 1) + cmdargs) + cleancmd = xmlify(starting + self.env.subst('$MSVSCLEANCOM', 1) + cmdargs) + + # TODO(1.5) + #preprocdefs = xmlify(';'.join(self.env.get('CPPDEFINES', []))) + #includepath = xmlify(';'.join(self.env.get('CPPPATH', []))) + preprocdefs = xmlify(string.join(processDefines(self.env.get('CPPDEFINES', [])), ';')) + includepath = xmlify(string.join(self.env.get('CPPPATH', []), ';')) + + if not env_has_buildtarget: + del self.env['MSVSBUILDTARGET'] + + self.file.write(self.dspconfiguration % locals()) + + self.file.write('\t</Configurations>\n') + + if self.version_num >= 7.1: + self.file.write('\t<References>\n' + '\t</References>\n') + + self.PrintSourceFiles() + + self.file.write('</VisualStudioProject>\n') + + if self.nokeep == 0: + # now we pickle some data and add it to the file -- MSDEV will ignore it. + pdata = pickle.dumps(self.configs,1) + pdata = base64.encodestring(pdata) + self.file.write('<!-- SCons Data:\n' + pdata + '\n') + pdata = pickle.dumps(self.sources,1) + pdata = base64.encodestring(pdata) + self.file.write(pdata + '-->\n') + + def printSources(self, hierarchy, commonprefix): + sorteditems = hierarchy.items() + # TODO(1.5): + #sorteditems.sort(lambda a, b: cmp(a[0].lower(), b[0].lower())) + sorteditems.sort(lambda a, b: cmp(string.lower(a[0]), string.lower(b[0]))) + + # First folders, then files + for key, value in sorteditems: + if SCons.Util.is_Dict(value): + self.file.write('\t\t\t<Filter\n' + '\t\t\t\tName="%s"\n' + '\t\t\t\tFilter="">\n' % (key)) + self.printSources(value, commonprefix) + self.file.write('\t\t\t</Filter>\n') + + for key, value in sorteditems: + if SCons.Util.is_String(value): + file = value + if commonprefix: + file = os.path.join(commonprefix, value) + file = os.path.normpath(file) + self.file.write('\t\t\t<File\n' + '\t\t\t\tRelativePath="%s">\n' + '\t\t\t</File>\n' % (file)) + + def PrintSourceFiles(self): + categories = {'Source Files': 'cpp;c;cxx;l;y;def;odl;idl;hpj;bat', + 'Header Files': 'h;hpp;hxx;hm;inl', + 'Local Headers': 'h;hpp;hxx;hm;inl', + 'Resource Files': 'r;rc;ico;cur;bmp;dlg;rc2;rct;bin;cnt;rtf;gif;jpg;jpeg;jpe', + 'Other Files': ''} + + self.file.write('\t<Files>\n') + + cats = categories.keys() + # TODO(1.5) + #cats.sort(lambda a, b: cmp(a.lower(), b.lower())) + cats.sort(lambda a, b: cmp(string.lower(a), string.lower(b))) + cats = filter(lambda k, s=self: s.sources[k], cats) + for kind in cats: + if len(cats) > 1: + self.file.write('\t\t<Filter\n' + '\t\t\tName="%s"\n' + '\t\t\tFilter="%s">\n' % (kind, categories[kind])) + + sources = self.sources[kind] + + # First remove any common prefix + commonprefix = None + if len(sources) > 1: + s = map(os.path.normpath, sources) + # take the dirname because the prefix may include parts + # of the filenames (e.g. if you have 'dir\abcd' and + # 'dir\acde' then the cp will be 'dir\a' ) + cp = os.path.dirname( os.path.commonprefix(s) ) + if cp and s[0][len(cp)] == os.sep: + # +1 because the filename starts after the separator + sources = map(lambda s, l=len(cp)+1: s[l:], sources) + commonprefix = cp + elif len(sources) == 1: + commonprefix = os.path.dirname( sources[0] ) + sources[0] = os.path.basename( sources[0] ) + + hierarchy = makeHierarchy(sources) + self.printSources(hierarchy, commonprefix=commonprefix) + + if len(cats)>1: + self.file.write('\t\t</Filter>\n') + + # add the SConscript file outside of the groups + self.file.write('\t\t<File\n' + '\t\t\tRelativePath="%s">\n' + '\t\t</File>\n' % str(self.sconscript)) + + self.file.write('\t</Files>\n' + '\t<Globals>\n' + '\t</Globals>\n') + + def Parse(self): + try: + dspfile = open(self.dspabs,'r') + except IOError: + return # doesn't exist yet, so can't add anything to configs. + + line = dspfile.readline() + while line: + # TODO(1.5) + #if line.find('<!-- SCons Data:') > -1: + if string.find(line, '<!-- SCons Data:') > -1: + break + line = dspfile.readline() + + line = dspfile.readline() + datas = line + while line and line != '\n': + line = dspfile.readline() + datas = datas + line + + # OK, we've found our little pickled cache of data. + try: + datas = base64.decodestring(datas) + data = pickle.loads(datas) + except KeyboardInterrupt: + raise + except: + return # unable to unpickle any data for some reason + + self.configs.update(data) + + data = None + line = dspfile.readline() + datas = line + while line and line != '\n': + line = dspfile.readline() + datas = datas + line + + # OK, we've found our little pickled cache of data. + try: + datas = base64.decodestring(datas) + data = pickle.loads(datas) + except KeyboardInterrupt: + raise + except: + return # unable to unpickle any data for some reason + + self.sources.update(data) + + def Build(self): + try: + self.file = open(self.dspabs,'w') + except IOError, detail: + raise SCons.Errors.InternalError, 'Unable to open "' + self.dspabs + '" for writing:' + str(detail) + else: + self.PrintHeader() + self.PrintProject() + self.file.close() + +class _DSWGenerator: + """ Base class for DSW generators """ + def __init__(self, dswfile, source, env): + self.dswfile = os.path.normpath(str(dswfile)) + self.env = env + + if not env.has_key('projects'): + raise SCons.Errors.UserError, \ + "You must specify a 'projects' argument to create an MSVSSolution." + projects = env['projects'] + if not SCons.Util.is_List(projects): + raise SCons.Errors.InternalError, \ + "The 'projects' argument must be a list of nodes." + projects = SCons.Util.flatten(projects) + if len(projects) < 1: + raise SCons.Errors.UserError, \ + "You must specify at least one project to create an MSVSSolution." + self.dspfiles = map(str, projects) + + if self.env.has_key('name'): + self.name = self.env['name'] + else: + self.name = os.path.basename(SCons.Util.splitext(self.dswfile)[0]) + self.name = self.env.subst(self.name) + + def Build(self): + pass + +class _GenerateV7DSW(_DSWGenerator): + """Generates a Solution file for MSVS .NET""" + def __init__(self, dswfile, source, env): + _DSWGenerator.__init__(self, dswfile, source, env) + + self.file = None + self.version = self.env['MSVS_VERSION'] + self.version_num, self.suite = msvs_parse_version(self.version) + self.versionstr = '7.00' + if self.version_num >= 8.0: + self.versionstr = '9.00' + elif self.version_num >= 7.1: + self.versionstr = '8.00' + if self.version_num >= 8.0: + self.versionstr = '9.00' + + if env.has_key('slnguid') and env['slnguid']: + self.slnguid = env['slnguid'] + else: + self.slnguid = _generateGUID(dswfile, self.name) + + self.configs = {} + + self.nokeep = 0 + if env.has_key('nokeep') and env['variant'] != 0: + self.nokeep = 1 + + if self.nokeep == 0 and os.path.exists(self.dswfile): + self.Parse() + + def AddConfig(self, variant, dswfile=dswfile): + config = Config() + + match = re.match('(.*)\|(.*)', variant) + if match: + config.variant = match.group(1) + config.platform = match.group(2) + else: + config.variant = variant + config.platform = 'Win32' + + self.configs[variant] = config + print "Adding '" + self.name + ' - ' + config.variant + '|' + config.platform + "' to '" + str(dswfile) + "'" + + if not env.has_key('variant'): + raise SCons.Errors.InternalError, \ + "You must specify a 'variant' argument (i.e. 'Debug' or " +\ + "'Release') to create an MSVS Solution File." + elif SCons.Util.is_String(env['variant']): + AddConfig(self, env['variant']) + elif SCons.Util.is_List(env['variant']): + for variant in env['variant']: + AddConfig(self, variant) + + self.platforms = [] + for key in self.configs.keys(): + platform = self.configs[key].platform + if not platform in self.platforms: + self.platforms.append(platform) + + def Parse(self): + try: + dswfile = open(self.dswfile,'r') + except IOError: + return # doesn't exist yet, so can't add anything to configs. + + line = dswfile.readline() + while line: + if line[:9] == "EndGlobal": + break + line = dswfile.readline() + + line = dswfile.readline() + datas = line + while line: + line = dswfile.readline() + datas = datas + line + + # OK, we've found our little pickled cache of data. + try: + datas = base64.decodestring(datas) + data = pickle.loads(datas) + except KeyboardInterrupt: + raise + except: + return # unable to unpickle any data for some reason + + self.configs.update(data) + + def PrintSolution(self): + """Writes a solution file""" + self.file.write('Microsoft Visual Studio Solution File, Format Version %s\n' % self.versionstr ) + if self.version_num >= 8.0: + self.file.write('# Visual Studio 2005\n') + for p in self.dspfiles: + name = os.path.basename(p) + base, suffix = SCons.Util.splitext(name) + if suffix == '.vcproj': + name = base + guid = _generateGUID(p, '') + self.file.write('Project("%s") = "%s", "%s", "%s"\n' + % ( external_makefile_guid, name, p, guid ) ) + if self.version_num >= 7.1 and self.version_num < 8.0: + self.file.write('\tProjectSection(ProjectDependencies) = postProject\n' + '\tEndProjectSection\n') + self.file.write('EndProject\n') + + self.file.write('Global\n') + + env = self.env + if env.has_key('MSVS_SCC_PROVIDER'): + dspfile_base = os.path.basename(self.dspfile) + slnguid = self.slnguid + scc_provider = env.get('MSVS_SCC_PROVIDER', '') + scc_provider = string.replace(scc_provider, ' ', r'\u0020') + scc_project_name = env.get('MSVS_SCC_PROJECT_NAME', '') + # scc_aux_path = env.get('MSVS_SCC_AUX_PATH', '') + scc_local_path = env.get('MSVS_SCC_LOCAL_PATH', '') + scc_project_base_path = env.get('MSVS_SCC_PROJECT_BASE_PATH', '') + # project_guid = env.get('MSVS_PROJECT_GUID', '') + + self.file.write('\tGlobalSection(SourceCodeControl) = preSolution\n' + '\t\tSccNumberOfProjects = 2\n' + '\t\tSccProjectUniqueName0 = %(dspfile_base)s\n' + '\t\tSccLocalPath0 = %(scc_local_path)s\n' + '\t\tCanCheckoutShared = true\n' + '\t\tSccProjectFilePathRelativizedFromConnection0 = %(scc_project_base_path)s\n' + '\t\tSccProjectName1 = %(scc_project_name)s\n' + '\t\tSccLocalPath1 = %(scc_local_path)s\n' + '\t\tSccProvider1 = %(scc_provider)s\n' + '\t\tCanCheckoutShared = true\n' + '\t\tSccProjectFilePathRelativizedFromConnection1 = %(scc_project_base_path)s\n' + '\t\tSolutionUniqueID = %(slnguid)s\n' + '\tEndGlobalSection\n' % locals()) + + if self.version_num >= 8.0: + self.file.write('\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\n') + else: + self.file.write('\tGlobalSection(SolutionConfiguration) = preSolution\n') + + confkeys = self.configs.keys() + confkeys.sort() + cnt = 0 + for name in confkeys: + variant = self.configs[name].variant + platform = self.configs[name].platform + if self.version_num >= 8.0: + self.file.write('\t\t%s|%s = %s|%s\n' % (variant, platform, variant, platform)) + else: + self.file.write('\t\tConfigName.%d = %s\n' % (cnt, variant)) + cnt = cnt + 1 + self.file.write('\tEndGlobalSection\n') + if self.version_num < 7.1: + self.file.write('\tGlobalSection(ProjectDependencies) = postSolution\n' + '\tEndGlobalSection\n') + if self.version_num >= 8.0: + self.file.write('\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\n') + else: + self.file.write('\tGlobalSection(ProjectConfiguration) = postSolution\n') + + for name in confkeys: + variant = self.configs[name].variant + platform = self.configs[name].platform + if self.version_num >= 8.0: + for p in self.dspfiles: + guid = _generateGUID(p, '') + self.file.write('\t\t%s.%s|%s.ActiveCfg = %s|%s\n' + '\t\t%s.%s|%s.Build.0 = %s|%s\n' % (guid,variant,platform,variant,platform,guid,variant,platform,variant,platform)) + else: + for p in self.dspfiles: + guid = _generateGUID(p, '') + self.file.write('\t\t%s.%s.ActiveCfg = %s|%s\n' + '\t\t%s.%s.Build.0 = %s|%s\n' %(guid,variant,variant,platform,guid,variant,variant,platform)) + + self.file.write('\tEndGlobalSection\n') + + if self.version_num >= 8.0: + self.file.write('\tGlobalSection(SolutionProperties) = preSolution\n' + '\t\tHideSolutionNode = FALSE\n' + '\tEndGlobalSection\n') + else: + self.file.write('\tGlobalSection(ExtensibilityGlobals) = postSolution\n' + '\tEndGlobalSection\n' + '\tGlobalSection(ExtensibilityAddIns) = postSolution\n' + '\tEndGlobalSection\n') + self.file.write('EndGlobal\n') + if self.nokeep == 0: + pdata = pickle.dumps(self.configs,1) + pdata = base64.encodestring(pdata) + self.file.write(pdata + '\n') + + def Build(self): + try: + self.file = open(self.dswfile,'w') + except IOError, detail: + raise SCons.Errors.InternalError, 'Unable to open "' + self.dswfile + '" for writing:' + str(detail) + else: + self.PrintSolution() + self.file.close() + +V6DSWHeader = """\ +Microsoft Developer Studio Workspace File, Format Version 6.00 +# WARNING: DO NOT EDIT OR DELETE THIS WORKSPACE FILE! + +############################################################################### + +Project: "%(name)s"="%(dspfile)s" - Package Owner=<4> + +Package=<5> +{{{ +}}} + +Package=<4> +{{{ +}}} + +############################################################################### + +Global: + +Package=<5> +{{{ +}}} + +Package=<3> +{{{ +}}} + +############################################################################### +""" + +class _GenerateV6DSW(_DSWGenerator): + """Generates a Workspace file for MSVS 6.0""" + + def PrintWorkspace(self): + """ writes a DSW file """ + name = self.name + dspfile = self.dspfiles[0] + self.file.write(V6DSWHeader % locals()) + + def Build(self): + try: + self.file = open(self.dswfile,'w') + except IOError, detail: + raise SCons.Errors.InternalError, 'Unable to open "' + self.dswfile + '" for writing:' + str(detail) + else: + self.PrintWorkspace() + self.file.close() + + +def GenerateDSP(dspfile, source, env): + """Generates a Project file based on the version of MSVS that is being used""" + + version_num = 6.0 + if env.has_key('MSVS_VERSION'): + version_num, suite = msvs_parse_version(env['MSVS_VERSION']) + if version_num >= 7.0: + g = _GenerateV7DSP(dspfile, source, env) + g.Build() + else: + g = _GenerateV6DSP(dspfile, source, env) + g.Build() + +def GenerateDSW(dswfile, source, env): + """Generates a Solution/Workspace file based on the version of MSVS that is being used""" + + version_num = 6.0 + if env.has_key('MSVS_VERSION'): + version_num, suite = msvs_parse_version(env['MSVS_VERSION']) + if version_num >= 7.0: + g = _GenerateV7DSW(dswfile, source, env) + g.Build() + else: + g = _GenerateV6DSW(dswfile, source, env) + g.Build() + + +############################################################################## +# Above here are the classes and functions for generation of +# DSP/DSW/SLN/VCPROJ files. +############################################################################## + +def GetMSVSProjectSuffix(target, source, env, for_signature): + return env['MSVS']['PROJECTSUFFIX'] + +def GetMSVSSolutionSuffix(target, source, env, for_signature): + return env['MSVS']['SOLUTIONSUFFIX'] + +def GenerateProject(target, source, env): + # generate the dsp file, according to the version of MSVS. + builddspfile = target[0] + dspfile = builddspfile.srcnode() + + # this detects whether or not we're using a VariantDir + if not dspfile is builddspfile: + try: + bdsp = open(str(builddspfile), "w+") + except IOError, detail: + print 'Unable to open "' + str(dspfile) + '" for writing:',detail,'\n' + raise + + bdsp.write("This is just a placeholder file.\nThe real project file is here:\n%s\n" % dspfile.get_abspath()) + + GenerateDSP(dspfile, source, env) + + if env.get('auto_build_solution', 1): + builddswfile = target[1] + dswfile = builddswfile.srcnode() + + if not dswfile is builddswfile: + + try: + bdsw = open(str(builddswfile), "w+") + except IOError, detail: + print 'Unable to open "' + str(dspfile) + '" for writing:',detail,'\n' + raise + + bdsw.write("This is just a placeholder file.\nThe real workspace file is here:\n%s\n" % dswfile.get_abspath()) + + GenerateDSW(dswfile, source, env) + +def GenerateSolution(target, source, env): + GenerateDSW(target[0], source, env) + +def projectEmitter(target, source, env): + """Sets up the DSP dependencies.""" + + # todo: Not sure what sets source to what user has passed as target, + # but this is what happens. When that is fixed, we also won't have + # to make the user always append env['MSVSPROJECTSUFFIX'] to target. + if source[0] == target[0]: + source = [] + + # make sure the suffix is correct for the version of MSVS we're running. + (base, suff) = SCons.Util.splitext(str(target[0])) + suff = env.subst('$MSVSPROJECTSUFFIX') + target[0] = base + suff + + if not source: + source = 'prj_inputs:' + source = source + env.subst('$MSVSSCONSCOM', 1) + source = source + env.subst('$MSVSENCODING', 1) + + if env.has_key('buildtarget') and env['buildtarget'] != None: + if SCons.Util.is_String(env['buildtarget']): + source = source + ' "%s"' % env['buildtarget'] + elif SCons.Util.is_List(env['buildtarget']): + for bt in env['buildtarget']: + if SCons.Util.is_String(bt): + source = source + ' "%s"' % bt + else: + try: source = source + ' "%s"' % bt.get_abspath() + except AttributeError: raise SCons.Errors.InternalError, \ + "buildtarget can be a string, a node, a list of strings or nodes, or None" + else: + try: source = source + ' "%s"' % env['buildtarget'].get_abspath() + except AttributeError: raise SCons.Errors.InternalError, \ + "buildtarget can be a string, a node, a list of strings or nodes, or None" + + if env.has_key('outdir') and env['outdir'] != None: + if SCons.Util.is_String(env['outdir']): + source = source + ' "%s"' % env['outdir'] + elif SCons.Util.is_List(env['outdir']): + for s in env['outdir']: + if SCons.Util.is_String(s): + source = source + ' "%s"' % s + else: + try: source = source + ' "%s"' % s.get_abspath() + except AttributeError: raise SCons.Errors.InternalError, \ + "outdir can be a string, a node, a list of strings or nodes, or None" + else: + try: source = source + ' "%s"' % env['outdir'].get_abspath() + except AttributeError: raise SCons.Errors.InternalError, \ + "outdir can be a string, a node, a list of strings or nodes, or None" + + if env.has_key('name'): + if SCons.Util.is_String(env['name']): + source = source + ' "%s"' % env['name'] + else: + raise SCons.Errors.InternalError, "name must be a string" + + if env.has_key('variant'): + if SCons.Util.is_String(env['variant']): + source = source + ' "%s"' % env['variant'] + elif SCons.Util.is_List(env['variant']): + for variant in env['variant']: + if SCons.Util.is_String(variant): + source = source + ' "%s"' % variant + else: + raise SCons.Errors.InternalError, "name must be a string or a list of strings" + else: + raise SCons.Errors.InternalError, "variant must be a string or a list of strings" + else: + raise SCons.Errors.InternalError, "variant must be specified" + + for s in _DSPGenerator.srcargs: + if env.has_key(s): + if SCons.Util.is_String(env[s]): + source = source + ' "%s' % env[s] + elif SCons.Util.is_List(env[s]): + for t in env[s]: + if SCons.Util.is_String(t): + source = source + ' "%s"' % t + else: + raise SCons.Errors.InternalError, s + " must be a string or a list of strings" + else: + raise SCons.Errors.InternalError, s + " must be a string or a list of strings" + + source = source + ' "%s"' % str(target[0]) + source = [SCons.Node.Python.Value(source)] + + targetlist = [target[0]] + sourcelist = source + + if env.get('auto_build_solution', 1): + env['projects'] = targetlist + t, s = solutionEmitter(target, target, env) + targetlist = targetlist + t + + return (targetlist, sourcelist) + +def solutionEmitter(target, source, env): + """Sets up the DSW dependencies.""" + + # todo: Not sure what sets source to what user has passed as target, + # but this is what happens. When that is fixed, we also won't have + # to make the user always append env['MSVSSOLUTIONSUFFIX'] to target. + if source[0] == target[0]: + source = [] + + # make sure the suffix is correct for the version of MSVS we're running. + (base, suff) = SCons.Util.splitext(str(target[0])) + suff = env.subst('$MSVSSOLUTIONSUFFIX') + target[0] = base + suff + + if not source: + source = 'sln_inputs:' + + if env.has_key('name'): + if SCons.Util.is_String(env['name']): + source = source + ' "%s"' % env['name'] + else: + raise SCons.Errors.InternalError, "name must be a string" + + if env.has_key('variant'): + if SCons.Util.is_String(env['variant']): + source = source + ' "%s"' % env['variant'] + elif SCons.Util.is_List(env['variant']): + for variant in env['variant']: + if SCons.Util.is_String(variant): + source = source + ' "%s"' % variant + else: + raise SCons.Errors.InternalError, "name must be a string or a list of strings" + else: + raise SCons.Errors.InternalError, "variant must be a string or a list of strings" + else: + raise SCons.Errors.InternalError, "variant must be specified" + + if env.has_key('slnguid'): + if SCons.Util.is_String(env['slnguid']): + source = source + ' "%s"' % env['slnguid'] + else: + raise SCons.Errors.InternalError, "slnguid must be a string" + + if env.has_key('projects'): + if SCons.Util.is_String(env['projects']): + source = source + ' "%s"' % env['projects'] + elif SCons.Util.is_List(env['projects']): + for t in env['projects']: + if SCons.Util.is_String(t): + source = source + ' "%s"' % t + + source = source + ' "%s"' % str(target[0]) + source = [SCons.Node.Python.Value(source)] + + return ([target[0]], source) + +projectAction = SCons.Action.Action(GenerateProject, None) + +solutionAction = SCons.Action.Action(GenerateSolution, None) + +projectBuilder = SCons.Builder.Builder(action = '$MSVSPROJECTCOM', + suffix = '$MSVSPROJECTSUFFIX', + emitter = projectEmitter) + +solutionBuilder = SCons.Builder.Builder(action = '$MSVSSOLUTIONCOM', + suffix = '$MSVSSOLUTIONSUFFIX', + emitter = solutionEmitter) + +default_MSVS_SConscript = None + +def generate(env): + """Add Builders and construction variables for Microsoft Visual + Studio project files to an Environment.""" + try: + env['BUILDERS']['MSVSProject'] + except KeyError: + env['BUILDERS']['MSVSProject'] = projectBuilder + + try: + env['BUILDERS']['MSVSSolution'] + except KeyError: + env['BUILDERS']['MSVSSolution'] = solutionBuilder + + env['MSVSPROJECTCOM'] = projectAction + env['MSVSSOLUTIONCOM'] = solutionAction + + if SCons.Script.call_stack: + # XXX Need to find a way to abstract this; the build engine + # shouldn't depend on anything in SCons.Script. + env['MSVSSCONSCRIPT'] = SCons.Script.call_stack[0].sconscript + else: + global default_MSVS_SConscript + if default_MSVS_SConscript is None: + default_MSVS_SConscript = env.File('SConstruct') + env['MSVSSCONSCRIPT'] = default_MSVS_SConscript + + env['MSVSSCONS'] = '"%s" -c "%s"' % (python_executable, getExecScriptMain(env)) + env['MSVSSCONSFLAGS'] = '-C "${MSVSSCONSCRIPT.dir.abspath}" -f ${MSVSSCONSCRIPT.name}' + env['MSVSSCONSCOM'] = '$MSVSSCONS $MSVSSCONSFLAGS' + env['MSVSBUILDCOM'] = '$MSVSSCONSCOM "$MSVSBUILDTARGET"' + env['MSVSREBUILDCOM'] = '$MSVSSCONSCOM "$MSVSBUILDTARGET"' + env['MSVSCLEANCOM'] = '$MSVSSCONSCOM -c "$MSVSBUILDTARGET"' + env['MSVSENCODING'] = 'Windows-1252' + + # Set-up ms tools paths for default version + msvc_setup_env_once(env) + + if env.has_key('MSVS_VERSION'): + version_num, suite = msvs_parse_version(env['MSVS_VERSION']) + else: + (version_num, suite) = (7.0, None) # guess at a default + if not env.has_key('MSVS'): + env['MSVS'] = {} + if (version_num < 7.0): + env['MSVS']['PROJECTSUFFIX'] = '.dsp' + env['MSVS']['SOLUTIONSUFFIX'] = '.dsw' + else: + env['MSVS']['PROJECTSUFFIX'] = '.vcproj' + env['MSVS']['SOLUTIONSUFFIX'] = '.sln' + + env['GET_MSVSPROJECTSUFFIX'] = GetMSVSProjectSuffix + env['GET_MSVSSOLUTIONSUFFIX'] = GetMSVSSolutionSuffix + env['MSVSPROJECTSUFFIX'] = '${GET_MSVSPROJECTSUFFIX}' + env['MSVSSOLUTIONSUFFIX'] = '${GET_MSVSSOLUTIONSUFFIX}' + env['SCONS_HOME'] = os.environ.get('SCONS_HOME') + +def exists(env): + return msvc_exists() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/mwcc.py b/engine/SCons/Tool/mwcc.py new file mode 100644 index 0000000..6af7e0b --- /dev/null +++ b/engine/SCons/Tool/mwcc.py @@ -0,0 +1,208 @@ +"""SCons.Tool.mwcc + +Tool-specific initialization for the Metrowerks CodeWarrior compiler. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/mwcc.py 4577 2009/12/27 19:43:56 scons" + +import os +import os.path +import string + +import SCons.Util + +def set_vars(env): + """Set MWCW_VERSION, MWCW_VERSIONS, and some codewarrior environment vars + + MWCW_VERSIONS is set to a list of objects representing installed versions + + MWCW_VERSION is set to the version object that will be used for building. + MWCW_VERSION can be set to a string during Environment + construction to influence which version is chosen, otherwise + the latest one from MWCW_VERSIONS is used. + + Returns true if at least one version is found, false otherwise + """ + desired = env.get('MWCW_VERSION', '') + + # return right away if the variables are already set + if isinstance(desired, MWVersion): + return 1 + elif desired is None: + return 0 + + versions = find_versions() + version = None + + if desired: + for v in versions: + if str(v) == desired: + version = v + elif versions: + version = versions[-1] + + env['MWCW_VERSIONS'] = versions + env['MWCW_VERSION'] = version + + if version is None: + return 0 + + env.PrependENVPath('PATH', version.clpath) + env.PrependENVPath('PATH', version.dllpath) + ENV = env['ENV'] + ENV['CWFolder'] = version.path + ENV['LM_LICENSE_FILE'] = version.license + plus = lambda x: '+%s' % x + ENV['MWCIncludes'] = string.join(map(plus, version.includes), os.pathsep) + ENV['MWLibraries'] = string.join(map(plus, version.libs), os.pathsep) + return 1 + + +def find_versions(): + """Return a list of MWVersion objects representing installed versions""" + versions = [] + + ### This function finds CodeWarrior by reading from the registry on + ### Windows. Some other method needs to be implemented for other + ### platforms, maybe something that calls env.WhereIs('mwcc') + + if SCons.Util.can_read_reg: + try: + HLM = SCons.Util.HKEY_LOCAL_MACHINE + product = 'SOFTWARE\\Metrowerks\\CodeWarrior\\Product Versions' + product_key = SCons.Util.RegOpenKeyEx(HLM, product) + + i = 0 + while 1: + name = product + '\\' + SCons.Util.RegEnumKey(product_key, i) + name_key = SCons.Util.RegOpenKeyEx(HLM, name) + + try: + version = SCons.Util.RegQueryValueEx(name_key, 'VERSION') + path = SCons.Util.RegQueryValueEx(name_key, 'PATH') + mwv = MWVersion(version[0], path[0], 'Win32-X86') + versions.append(mwv) + except SCons.Util.RegError: + pass + + i = i + 1 + + except SCons.Util.RegError: + pass + + return versions + + +class MWVersion: + def __init__(self, version, path, platform): + self.version = version + self.path = path + self.platform = platform + self.clpath = os.path.join(path, 'Other Metrowerks Tools', + 'Command Line Tools') + self.dllpath = os.path.join(path, 'Bin') + + # The Metrowerks tools don't store any configuration data so they + # are totally dumb when it comes to locating standard headers, + # libraries, and other files, expecting all the information + # to be handed to them in environment variables. The members set + # below control what information scons injects into the environment + + ### The paths below give a normal build environment in CodeWarrior for + ### Windows, other versions of CodeWarrior might need different paths. + + msl = os.path.join(path, 'MSL') + support = os.path.join(path, '%s Support' % platform) + + self.license = os.path.join(path, 'license.dat') + self.includes = [msl, support] + self.libs = [msl, support] + + def __str__(self): + return self.version + + +CSuffixes = ['.c', '.C'] +CXXSuffixes = ['.cc', '.cpp', '.cxx', '.c++', '.C++'] + + +def generate(env): + """Add Builders and construction variables for the mwcc to an Environment.""" + import SCons.Defaults + import SCons.Tool + + set_vars(env) + + static_obj, shared_obj = SCons.Tool.createObjBuilders(env) + + for suffix in CSuffixes: + static_obj.add_action(suffix, SCons.Defaults.CAction) + shared_obj.add_action(suffix, SCons.Defaults.ShCAction) + + for suffix in CXXSuffixes: + static_obj.add_action(suffix, SCons.Defaults.CXXAction) + shared_obj.add_action(suffix, SCons.Defaults.ShCXXAction) + + env['CCCOMFLAGS'] = '$CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -nolink -o $TARGET $SOURCES' + + env['CC'] = 'mwcc' + env['CCCOM'] = '$CC $CFLAGS $CCFLAGS $CCCOMFLAGS' + + env['CXX'] = 'mwcc' + env['CXXCOM'] = '$CXX $CXXFLAGS $CCCOMFLAGS' + + env['SHCC'] = '$CC' + env['SHCCFLAGS'] = '$CCFLAGS' + env['SHCFLAGS'] = '$CFLAGS' + env['SHCCCOM'] = '$SHCC $SHCFLAGS $SHCCFLAGS $CCCOMFLAGS' + + env['SHCXX'] = '$CXX' + env['SHCXXFLAGS'] = '$CXXFLAGS' + env['SHCXXCOM'] = '$SHCXX $SHCXXFLAGS $CCCOMFLAGS' + + env['CFILESUFFIX'] = '.c' + env['CXXFILESUFFIX'] = '.cpp' + env['CPPDEFPREFIX'] = '-D' + env['CPPDEFSUFFIX'] = '' + env['INCPREFIX'] = '-I' + env['INCSUFFIX'] = '' + + #env['PCH'] = ? + #env['PCHSTOP'] = ? + + +def exists(env): + return set_vars(env) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/mwld.py b/engine/SCons/Tool/mwld.py new file mode 100644 index 0000000..03a1d77 --- /dev/null +++ b/engine/SCons/Tool/mwld.py @@ -0,0 +1,107 @@ +"""SCons.Tool.mwld + +Tool-specific initialization for the Metrowerks CodeWarrior linker. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/mwld.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Tool + + +def generate(env): + """Add Builders and construction variables for lib to an Environment.""" + SCons.Tool.createStaticLibBuilder(env) + SCons.Tool.createSharedLibBuilder(env) + SCons.Tool.createProgBuilder(env) + + env['AR'] = 'mwld' + env['ARCOM'] = '$AR $ARFLAGS -library -o $TARGET $SOURCES' + + env['LIBDIRPREFIX'] = '-L' + env['LIBDIRSUFFIX'] = '' + env['LIBLINKPREFIX'] = '-l' + env['LIBLINKSUFFIX'] = '.lib' + + env['LINK'] = 'mwld' + env['LINKCOM'] = '$LINK $LINKFLAGS -o $TARGET $SOURCES $_LIBDIRFLAGS $_LIBFLAGS' + + env['SHLINK'] = '$LINK' + env['SHLINKFLAGS'] = '$LINKFLAGS' + env['SHLINKCOM'] = shlib_action + env['SHLIBEMITTER']= shlib_emitter + + +def exists(env): + import SCons.Tool.mwcc + return SCons.Tool.mwcc.set_vars(env) + + +def shlib_generator(target, source, env, for_signature): + cmd = ['$SHLINK', '$SHLINKFLAGS', '-shared'] + + no_import_lib = env.get('no_import_lib', 0) + if no_import_lib: cmd.extend('-noimplib') + + dll = env.FindIxes(target, 'SHLIBPREFIX', 'SHLIBSUFFIX') + if dll: cmd.extend(['-o', dll]) + + implib = env.FindIxes(target, 'LIBPREFIX', 'LIBSUFFIX') + if implib: cmd.extend(['-implib', implib.get_string(for_signature)]) + + cmd.extend(['$SOURCES', '$_LIBDIRFLAGS', '$_LIBFLAGS']) + + return [cmd] + + +def shlib_emitter(target, source, env): + dll = env.FindIxes(target, 'SHLIBPREFIX', 'SHLIBSUFFIX') + no_import_lib = env.get('no_import_lib', 0) + + if not dll: + raise SCons.Errors.UserError, "A shared library should have exactly one target with the suffix: %s" % env.subst("$SHLIBSUFFIX") + + if not no_import_lib and \ + not env.FindIxes(target, 'LIBPREFIX', 'LIBSUFFIX'): + + # Append an import library to the list of targets. + target.append(env.ReplaceIxes(dll, + 'SHLIBPREFIX', 'SHLIBSUFFIX', + 'LIBPREFIX', 'LIBSUFFIX')) + + return target, source + + +shlib_action = SCons.Action.Action(shlib_generator, generator=1) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/nasm.py b/engine/SCons/Tool/nasm.py new file mode 100644 index 0000000..56e5556 --- /dev/null +++ b/engine/SCons/Tool/nasm.py @@ -0,0 +1,72 @@ +"""SCons.Tool.nasm + +Tool-specific initialization for nasm, the famous Netwide Assembler. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/nasm.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Defaults +import SCons.Tool +import SCons.Util + +ASSuffixes = ['.s', '.asm', '.ASM'] +ASPPSuffixes = ['.spp', '.SPP', '.sx'] +if SCons.Util.case_sensitive_suffixes('.s', '.S'): + ASPPSuffixes.extend(['.S']) +else: + ASSuffixes.extend(['.S']) + +def generate(env): + """Add Builders and construction variables for nasm to an Environment.""" + static_obj, shared_obj = SCons.Tool.createObjBuilders(env) + + for suffix in ASSuffixes: + static_obj.add_action(suffix, SCons.Defaults.ASAction) + static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter) + + for suffix in ASPPSuffixes: + static_obj.add_action(suffix, SCons.Defaults.ASPPAction) + static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter) + + env['AS'] = 'nasm' + env['ASFLAGS'] = SCons.Util.CLVar('') + env['ASPPFLAGS'] = '$ASFLAGS' + env['ASCOM'] = '$AS $ASFLAGS -o $TARGET $SOURCES' + env['ASPPCOM'] = '$CC $ASPPFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -c -o $TARGET $SOURCES' + +def exists(env): + return env.Detect('nasm') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/packaging/__init__.py b/engine/SCons/Tool/packaging/__init__.py new file mode 100644 index 0000000..7d191dd --- /dev/null +++ b/engine/SCons/Tool/packaging/__init__.py @@ -0,0 +1,314 @@ +"""SCons.Tool.Packaging + +SCons Packaging Tool. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/packaging/__init__.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Environment +from SCons.Variables import * +from SCons.Errors import * +from SCons.Util import is_List, make_path_relative +from SCons.Warnings import warn, Warning + +import os, imp +import SCons.Defaults + +__all__ = [ 'src_targz', 'src_tarbz2', 'src_zip', 'tarbz2', 'targz', 'zip', 'rpm', 'msi', 'ipk' ] + +# +# Utility and Builder function +# +def Tag(env, target, source, *more_tags, **kw_tags): + """ Tag a file with the given arguments, just sets the accordingly named + attribute on the file object. + + TODO: FIXME + """ + if not target: + target=source + first_tag=None + else: + first_tag=source + + if first_tag: + kw_tags[first_tag[0]] = '' + + if len(kw_tags) == 0 and len(more_tags) == 0: + raise UserError, "No tags given." + + # XXX: sanity checks + for x in more_tags: + kw_tags[x] = '' + + if not SCons.Util.is_List(target): + target=[target] + else: + # hmm, sometimes the target list, is a list of a list + # make sure it is flattened prior to processing. + # TODO: perhaps some bug ?!? + target=env.Flatten(target) + + for t in target: + for (k,v) in kw_tags.items(): + # all file tags have to start with PACKAGING_, so we can later + # differentiate between "normal" object attributes and the + # packaging attributes. As the user should not be bothered with + # that, the prefix will be added here if missing. + #if not k.startswith('PACKAGING_'): + if k[:10] != 'PACKAGING_': + k='PACKAGING_'+k + setattr(t, k, v) + +def Package(env, target=None, source=None, **kw): + """ Entry point for the package tool. + """ + # check if we need to find the source files ourself + if not source: + source = env.FindInstalledFiles() + + if len(source)==0: + raise UserError, "No source for Package() given" + + # decide which types of packages shall be built. Can be defined through + # four mechanisms: command line argument, keyword argument, + # environment argument and default selection( zip or tar.gz ) in that + # order. + try: kw['PACKAGETYPE']=env['PACKAGETYPE'] + except KeyError: pass + + if not kw.get('PACKAGETYPE'): + from SCons.Script import GetOption + kw['PACKAGETYPE'] = GetOption('package_type') + + if kw['PACKAGETYPE'] == None: + if env['BUILDERS'].has_key('Tar'): + kw['PACKAGETYPE']='targz' + elif env['BUILDERS'].has_key('Zip'): + kw['PACKAGETYPE']='zip' + else: + raise UserError, "No type for Package() given" + + PACKAGETYPE=kw['PACKAGETYPE'] + if not is_List(PACKAGETYPE): + PACKAGETYPE=string.split(PACKAGETYPE, ',') + + # load the needed packagers. + def load_packager(type): + try: + file,path,desc=imp.find_module(type, __path__) + return imp.load_module(type, file, path, desc) + except ImportError, e: + raise EnvironmentError("packager %s not available: %s"%(type,str(e))) + + packagers=map(load_packager, PACKAGETYPE) + + # set up targets and the PACKAGEROOT + try: + # fill up the target list with a default target name until the PACKAGETYPE + # list is of the same size as the target list. + if not target: target = [] + + size_diff = len(PACKAGETYPE)-len(target) + default_name = "%(NAME)s-%(VERSION)s" + + if size_diff>0: + default_target = default_name%kw + target.extend( [default_target]*size_diff ) + + if not kw.has_key('PACKAGEROOT'): + kw['PACKAGEROOT'] = default_name%kw + + except KeyError, e: + raise SCons.Errors.UserError( "Missing Packagetag '%s'"%e.args[0] ) + + # setup the source files + source=env.arg2nodes(source, env.fs.Entry) + + # call the packager to setup the dependencies. + targets=[] + try: + for packager in packagers: + t=[target.pop(0)] + t=apply(packager.package, [env,t,source], kw) + targets.extend(t) + + assert( len(target) == 0 ) + + except KeyError, e: + raise SCons.Errors.UserError( "Missing Packagetag '%s' for %s packager"\ + % (e.args[0],packager.__name__) ) + except TypeError, e: + # this exception means that a needed argument for the packager is + # missing. As our packagers get their "tags" as named function + # arguments we need to find out which one is missing. + from inspect import getargspec + args,varargs,varkw,defaults=getargspec(packager.package) + if defaults!=None: + args=args[:-len(defaults)] # throw away arguments with default values + args.remove('env') + args.remove('target') + args.remove('source') + # now remove any args for which we have a value in kw. + #args=[x for x in args if not kw.has_key(x)] + args=filter(lambda x, kw=kw: not kw.has_key(x), args) + + if len(args)==0: + raise # must be a different error, so reraise + elif len(args)==1: + raise SCons.Errors.UserError( "Missing Packagetag '%s' for %s packager"\ + % (args[0],packager.__name__) ) + else: + raise SCons.Errors.UserError( "Missing Packagetags '%s' for %s packager"\ + % (", ".join(args),packager.__name__) ) + + target=env.arg2nodes(target, env.fs.Entry) + targets.extend(env.Alias( 'package', targets )) + return targets + +# +# SCons tool initialization functions +# + +added = None + +def generate(env): + from SCons.Script import AddOption + global added + if not added: + added = 1 + AddOption('--package-type', + dest='package_type', + default=None, + type="string", + action="store", + help='The type of package to create.') + + try: + env['BUILDERS']['Package'] + env['BUILDERS']['Tag'] + except KeyError: + env['BUILDERS']['Package'] = Package + env['BUILDERS']['Tag'] = Tag + +def exists(env): + return 1 + +# XXX +def options(opts): + opts.AddVariables( + EnumVariable( 'PACKAGETYPE', + 'the type of package to create.', + None, allowed_values=map( str, __all__ ), + ignorecase=2 + ) + ) + +# +# Internal utility functions +# + +def copy_attr(f1, f2): + """ copies the special packaging file attributes from f1 to f2. + """ + #pattrs = [x for x in dir(f1) if not hasattr(f2, x) and\ + # x.startswith('PACKAGING_')] + copyit = lambda x, f2=f2: not hasattr(f2, x) and x[:10] == 'PACKAGING_' + pattrs = filter(copyit, dir(f1)) + for attr in pattrs: + setattr(f2, attr, getattr(f1, attr)) +def putintopackageroot(target, source, env, pkgroot, honor_install_location=1): + """ Uses the CopyAs builder to copy all source files to the directory given + in pkgroot. + + If honor_install_location is set and the copied source file has an + PACKAGING_INSTALL_LOCATION attribute, the PACKAGING_INSTALL_LOCATION is + used as the new name of the source file under pkgroot. + + The source file will not be copied if it is already under the the pkgroot + directory. + + All attributes of the source file will be copied to the new file. + """ + # make sure the packageroot is a Dir object. + if SCons.Util.is_String(pkgroot): pkgroot=env.Dir(pkgroot) + if not SCons.Util.is_List(source): source=[source] + + new_source = [] + for file in source: + if SCons.Util.is_String(file): file = env.File(file) + + if file.is_under(pkgroot): + new_source.append(file) + else: + if hasattr(file, 'PACKAGING_INSTALL_LOCATION') and\ + honor_install_location: + new_name=make_path_relative(file.PACKAGING_INSTALL_LOCATION) + else: + new_name=make_path_relative(file.get_path()) + + new_file=pkgroot.File(new_name) + new_file=env.CopyAs(new_file, file)[0] + copy_attr(file, new_file) + new_source.append(new_file) + + return (target, new_source) + +def stripinstallbuilder(target, source, env): + """ strips the install builder action from the source list and stores + the final installation location as the "PACKAGING_INSTALL_LOCATION" of + the source of the source file. This effectively removes the final installed + files from the source list while remembering the installation location. + + It also warns about files which have no install builder attached. + """ + def has_no_install_location(file): + return not (file.has_builder() and\ + hasattr(file.builder, 'name') and\ + (file.builder.name=="InstallBuilder" or\ + file.builder.name=="InstallAsBuilder")) + + if len(filter(has_no_install_location, source)): + warn(Warning, "there are files to package which have no\ + InstallBuilder attached, this might lead to irreproducible packages") + + n_source=[] + for s in source: + if has_no_install_location(s): + n_source.append(s) + else: + for ss in s.sources: + n_source.append(ss) + copy_attr(s, ss) + setattr(ss, 'PACKAGING_INSTALL_LOCATION', s.get_path()) + + return (target, n_source) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/packaging/ipk.py b/engine/SCons/Tool/packaging/ipk.py new file mode 100644 index 0000000..c703e00 --- /dev/null +++ b/engine/SCons/Tool/packaging/ipk.py @@ -0,0 +1,185 @@ +"""SCons.Tool.Packaging.ipk +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/packaging/ipk.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Builder +import SCons.Node.FS +import os + +from SCons.Tool.packaging import stripinstallbuilder, putintopackageroot + +def package(env, target, source, PACKAGEROOT, NAME, VERSION, DESCRIPTION, + SUMMARY, X_IPK_PRIORITY, X_IPK_SECTION, SOURCE_URL, + X_IPK_MAINTAINER, X_IPK_DEPENDS, **kw): + """ this function prepares the packageroot directory for packaging with the + ipkg builder. + """ + SCons.Tool.Tool('ipkg').generate(env) + + # setup the Ipkg builder + bld = env['BUILDERS']['Ipkg'] + target, source = stripinstallbuilder(target, source, env) + target, source = putintopackageroot(target, source, env, PACKAGEROOT) + + # This should be overridable from the construction environment, + # which it is by using ARCHITECTURE=. + # Guessing based on what os.uname() returns at least allows it + # to work for both i386 and x86_64 Linux systems. + archmap = { + 'i686' : 'i386', + 'i586' : 'i386', + 'i486' : 'i386', + } + + buildarchitecture = os.uname()[4] + buildarchitecture = archmap.get(buildarchitecture, buildarchitecture) + + if kw.has_key('ARCHITECTURE'): + buildarchitecture = kw['ARCHITECTURE'] + + # setup the kw to contain the mandatory arguments to this fucntion. + # do this before calling any builder or setup function + loc=locals() + del loc['kw'] + kw.update(loc) + del kw['source'], kw['target'], kw['env'] + + # generate the specfile + specfile = gen_ipk_dir(PACKAGEROOT, source, env, kw) + + # override the default target. + if str(target[0])=="%s-%s"%(NAME, VERSION): + target=[ "%s_%s_%s.ipk"%(NAME, VERSION, buildarchitecture) ] + + # now apply the Ipkg builder + return apply(bld, [env, target, specfile], kw) + +def gen_ipk_dir(proot, source, env, kw): + # make sure the packageroot is a Dir object. + if SCons.Util.is_String(proot): proot=env.Dir(proot) + + # create the specfile builder + s_bld=SCons.Builder.Builder( + action = build_specfiles, + ) + + # create the specfile targets + spec_target=[] + control=proot.Dir('CONTROL') + spec_target.append(control.File('control')) + spec_target.append(control.File('conffiles')) + spec_target.append(control.File('postrm')) + spec_target.append(control.File('prerm')) + spec_target.append(control.File('postinst')) + spec_target.append(control.File('preinst')) + + # apply the builder to the specfile targets + apply(s_bld, [env, spec_target, source], kw) + + # the packageroot directory does now contain the specfiles. + return proot + +def build_specfiles(source, target, env): + """ filter the targets for the needed files and use the variables in env + to create the specfile. + """ + # + # At first we care for the CONTROL/control file, which is the main file for ipk. + # + # For this we need to open multiple files in random order, so we store into + # a dict so they can be easily accessed. + # + # + opened_files={} + def open_file(needle, haystack): + try: + return opened_files[needle] + except KeyError: + file=filter(lambda x: x.get_path().rfind(needle)!=-1, haystack)[0] + opened_files[needle]=open(file.abspath, 'w') + return opened_files[needle] + + control_file=open_file('control', target) + + if not env.has_key('X_IPK_DESCRIPTION'): + env['X_IPK_DESCRIPTION']="%s\n %s"%(env['SUMMARY'], + env['DESCRIPTION'].replace('\n', '\n ')) + + + content = """ +Package: $NAME +Version: $VERSION +Priority: $X_IPK_PRIORITY +Section: $X_IPK_SECTION +Source: $SOURCE_URL +Architecture: $ARCHITECTURE +Maintainer: $X_IPK_MAINTAINER +Depends: $X_IPK_DEPENDS +Description: $X_IPK_DESCRIPTION +""" + + control_file.write(env.subst(content)) + + # + # now handle the various other files, which purpose it is to set post-, + # pre-scripts and mark files as config files. + # + # We do so by filtering the source files for files which are marked with + # the "config" tag and afterwards we do the same for x_ipk_postrm, + # x_ipk_prerm, x_ipk_postinst and x_ipk_preinst tags. + # + # The first one will write the name of the file into the file + # CONTROL/configfiles, the latter add the content of the x_ipk_* variable + # into the same named file. + # + for f in [x for x in source if 'PACKAGING_CONFIG' in dir(x)]: + config=open_file('conffiles') + config.write(f.PACKAGING_INSTALL_LOCATION) + config.write('\n') + + for str in 'POSTRM PRERM POSTINST PREINST'.split(): + name="PACKAGING_X_IPK_%s"%str + for f in [x for x in source if name in dir(x)]: + file=open_file(name) + file.write(env[str]) + + # + # close all opened files + for f in opened_files.values(): + f.close() + + # call a user specified function + if env.has_key('CHANGE_SPECFILE'): + content += env['CHANGE_SPECFILE'](target) + + return 0 + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/packaging/msi.py b/engine/SCons/Tool/packaging/msi.py new file mode 100644 index 0000000..be25f11 --- /dev/null +++ b/engine/SCons/Tool/packaging/msi.py @@ -0,0 +1,526 @@ +"""SCons.Tool.packaging.msi + +The msi packager. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/packaging/msi.py 4577 2009/12/27 19:43:56 scons" + +import os +import SCons +from SCons.Action import Action +from SCons.Builder import Builder + +from xml.dom.minidom import * +from xml.sax.saxutils import escape + +from SCons.Tool.packaging import stripinstallbuilder + +# +# Utility functions +# +def convert_to_id(s, id_set): + """ Some parts of .wxs need an Id attribute (for example: The File and + Directory directives. The charset is limited to A-Z, a-z, digits, + underscores, periods. Each Id must begin with a letter or with a + underscore. Google for "CNDL0015" for information about this. + + Requirements: + * the string created must only contain chars from the target charset. + * the string created must have a minimal editing distance from the + original string. + * the string created must be unique for the whole .wxs file. + + Observation: + * There are 62 chars in the charset. + + Idea: + * filter out forbidden characters. Check for a collision with the help + of the id_set. Add the number of the number of the collision at the + end of the created string. Furthermore care for a correct start of + the string. + """ + charset = 'ABCDEFGHIJKLMNOPQRSTUVWXYabcdefghijklmnopqrstuvwxyz0123456789_.' + if s[0] in '0123456789.': + s += '_'+s + id = filter( lambda c : c in charset, s ) + + # did we already generate an id for this file? + try: + return id_set[id][s] + except KeyError: + # no we did not so initialize with the id + if not id_set.has_key(id): id_set[id] = { s : id } + # there is a collision, generate an id which is unique by appending + # the collision number + else: id_set[id][s] = id + str(len(id_set[id])) + + return id_set[id][s] + +def is_dos_short_file_name(file): + """ examine if the given file is in the 8.3 form. + """ + fname, ext = os.path.splitext(file) + proper_ext = len(ext) == 0 or (2 <= len(ext) <= 4) # the ext contains the dot + proper_fname = file.isupper() and len(fname) <= 8 + + return proper_ext and proper_fname + +def gen_dos_short_file_name(file, filename_set): + """ see http://support.microsoft.com/default.aspx?scid=kb;en-us;Q142982 + + These are no complete 8.3 dos short names. The ~ char is missing and + replaced with one character from the filename. WiX warns about such + filenames, since a collision might occur. Google for "CNDL1014" for + more information. + """ + # guard this to not confuse the generation + if is_dos_short_file_name(file): + return file + + fname, ext = os.path.splitext(file) # ext contains the dot + + # first try if it suffices to convert to upper + file = file.upper() + if is_dos_short_file_name(file): + return file + + # strip forbidden characters. + forbidden = '."/[]:;=, ' + fname = filter( lambda c : c not in forbidden, fname ) + + # check if we already generated a filename with the same number: + # thisis1.txt, thisis2.txt etc. + duplicate, num = not None, 1 + while duplicate: + shortname = "%s%s" % (fname[:8-len(str(num))].upper(),\ + str(num)) + if len(ext) >= 2: + shortname = "%s%s" % (shortname, ext[:4].upper()) + + duplicate, num = shortname in filename_set, num+1 + + assert( is_dos_short_file_name(shortname) ), 'shortname is %s, longname is %s' % (shortname, file) + filename_set.append(shortname) + return shortname + +def create_feature_dict(files): + """ X_MSI_FEATURE and doc FileTag's can be used to collect files in a + hierarchy. This function collects the files into this hierarchy. + """ + dict = {} + + def add_to_dict( feature, file ): + if not SCons.Util.is_List( feature ): + feature = [ feature ] + + for f in feature: + if not dict.has_key( f ): + dict[ f ] = [ file ] + else: + dict[ f ].append( file ) + + for file in files: + if hasattr( file, 'PACKAGING_X_MSI_FEATURE' ): + add_to_dict(file.PACKAGING_X_MSI_FEATURE, file) + elif hasattr( file, 'PACKAGING_DOC' ): + add_to_dict( 'PACKAGING_DOC', file ) + else: + add_to_dict( 'default', file ) + + return dict + +def generate_guids(root): + """ generates globally unique identifiers for parts of the xml which need + them. + + Component tags have a special requirement. Their UUID is only allowed to + change if the list of their contained resources has changed. This allows + for clean removal and proper updates. + + To handle this requirement, the uuid is generated with an md5 hashing the + whole subtree of a xml node. + """ + from hashlib import md5 + + # specify which tags need a guid and in which attribute this should be stored. + needs_id = { 'Product' : 'Id', + 'Package' : 'Id', + 'Component' : 'Guid', + } + + # find all XMl nodes matching the key, retrieve their attribute, hash their + # subtree, convert hash to string and add as a attribute to the xml node. + for (key,value) in needs_id.items(): + node_list = root.getElementsByTagName(key) + attribute = value + for node in node_list: + hash = md5(node.toxml()).hexdigest() + hash_str = '%s-%s-%s-%s-%s' % ( hash[:8], hash[8:12], hash[12:16], hash[16:20], hash[20:] ) + node.attributes[attribute] = hash_str + + + +def string_wxsfile(target, source, env): + return "building WiX file %s"%( target[0].path ) + +def build_wxsfile(target, source, env): + """ compiles a .wxs file from the keywords given in env['msi_spec'] and + by analyzing the tree of source nodes and their tags. + """ + file = open(target[0].abspath, 'w') + + try: + # Create a document with the Wix root tag + doc = Document() + root = doc.createElement( 'Wix' ) + root.attributes['xmlns']='http://schemas.microsoft.com/wix/2003/01/wi' + doc.appendChild( root ) + + filename_set = [] # this is to circumvent duplicates in the shortnames + id_set = {} # this is to circumvent duplicates in the ids + + # Create the content + build_wxsfile_header_section(root, env) + build_wxsfile_file_section(root, source, env['NAME'], env['VERSION'], env['VENDOR'], filename_set, id_set) + generate_guids(root) + build_wxsfile_features_section(root, source, env['NAME'], env['VERSION'], env['SUMMARY'], id_set) + build_wxsfile_default_gui(root) + build_license_file(target[0].get_dir(), env) + + # write the xml to a file + file.write( doc.toprettyxml() ) + + # call a user specified function + if env.has_key('CHANGE_SPECFILE'): + env['CHANGE_SPECFILE'](target, source) + + except KeyError, e: + raise SCons.Errors.UserError( '"%s" package field for MSI is missing.' % e.args[0] ) + +# +# setup function +# +def create_default_directory_layout(root, NAME, VERSION, VENDOR, filename_set): + """ Create the wix default target directory layout and return the innermost + directory. + + We assume that the XML tree delivered in the root argument already contains + the Product tag. + + Everything is put under the PFiles directory property defined by WiX. + After that a directory with the 'VENDOR' tag is placed and then a + directory with the name of the project and its VERSION. This leads to the + following TARGET Directory Layout: + C:\<PFiles>\<Vendor>\<Projectname-Version>\ + Example: C:\Programme\Company\Product-1.2\ + """ + doc = Document() + d1 = doc.createElement( 'Directory' ) + d1.attributes['Id'] = 'TARGETDIR' + d1.attributes['Name'] = 'SourceDir' + + d2 = doc.createElement( 'Directory' ) + d2.attributes['Id'] = 'ProgramFilesFolder' + d2.attributes['Name'] = 'PFiles' + + d3 = doc.createElement( 'Directory' ) + d3.attributes['Id'] = 'VENDOR_folder' + d3.attributes['Name'] = escape( gen_dos_short_file_name( VENDOR, filename_set ) ) + d3.attributes['LongName'] = escape( VENDOR ) + + d4 = doc.createElement( 'Directory' ) + project_folder = "%s-%s" % ( NAME, VERSION ) + d4.attributes['Id'] = 'MY_DEFAULT_FOLDER' + d4.attributes['Name'] = escape( gen_dos_short_file_name( project_folder, filename_set ) ) + d4.attributes['LongName'] = escape( project_folder ) + + d1.childNodes.append( d2 ) + d2.childNodes.append( d3 ) + d3.childNodes.append( d4 ) + + root.getElementsByTagName('Product')[0].childNodes.append( d1 ) + + return d4 + +# +# mandatory and optional file tags +# +def build_wxsfile_file_section(root, files, NAME, VERSION, VENDOR, filename_set, id_set): + """ builds the Component sections of the wxs file with their included files. + + Files need to be specified in 8.3 format and in the long name format, long + filenames will be converted automatically. + + Features are specficied with the 'X_MSI_FEATURE' or 'DOC' FileTag. + """ + root = create_default_directory_layout( root, NAME, VERSION, VENDOR, filename_set ) + components = create_feature_dict( files ) + factory = Document() + + def get_directory( node, dir ): + """ returns the node under the given node representing the directory. + + Returns the component node if dir is None or empty. + """ + if dir == '' or not dir: + return node + + Directory = node + dir_parts = dir.split(os.path.sep) + + # to make sure that our directory ids are unique, the parent folders are + # consecutively added to upper_dir + upper_dir = '' + + # walk down the xml tree finding parts of the directory + dir_parts = filter( lambda d: d != '', dir_parts ) + for d in dir_parts[:]: + already_created = filter( lambda c: c.nodeName == 'Directory' and c.attributes['LongName'].value == escape(d), Directory.childNodes ) + + if already_created != []: + Directory = already_created[0] + dir_parts.remove(d) + upper_dir += d + else: + break + + for d in dir_parts: + nDirectory = factory.createElement( 'Directory' ) + nDirectory.attributes['LongName'] = escape( d ) + nDirectory.attributes['Name'] = escape( gen_dos_short_file_name( d, filename_set ) ) + upper_dir += d + nDirectory.attributes['Id'] = convert_to_id( upper_dir, id_set ) + + Directory.childNodes.append( nDirectory ) + Directory = nDirectory + + return Directory + + for file in files: + drive, path = os.path.splitdrive( file.PACKAGING_INSTALL_LOCATION ) + filename = os.path.basename( path ) + dirname = os.path.dirname( path ) + + h = { + # tagname : default value + 'PACKAGING_X_MSI_VITAL' : 'yes', + 'PACKAGING_X_MSI_FILEID' : convert_to_id(filename, id_set), + 'PACKAGING_X_MSI_LONGNAME' : filename, + 'PACKAGING_X_MSI_SHORTNAME' : gen_dos_short_file_name(filename, filename_set), + 'PACKAGING_X_MSI_SOURCE' : file.get_path(), + } + + # fill in the default tags given above. + for k,v in [ (k, v) for (k,v) in h.items() if not hasattr(file, k) ]: + setattr( file, k, v ) + + File = factory.createElement( 'File' ) + File.attributes['LongName'] = escape( file.PACKAGING_X_MSI_LONGNAME ) + File.attributes['Name'] = escape( file.PACKAGING_X_MSI_SHORTNAME ) + File.attributes['Source'] = escape( file.PACKAGING_X_MSI_SOURCE ) + File.attributes['Id'] = escape( file.PACKAGING_X_MSI_FILEID ) + File.attributes['Vital'] = escape( file.PACKAGING_X_MSI_VITAL ) + + # create the <Component> Tag under which this file should appear + Component = factory.createElement('Component') + Component.attributes['DiskId'] = '1' + Component.attributes['Id'] = convert_to_id( filename, id_set ) + + # hang the component node under the root node and the file node + # under the component node. + Directory = get_directory( root, dirname ) + Directory.childNodes.append( Component ) + Component.childNodes.append( File ) + +# +# additional functions +# +def build_wxsfile_features_section(root, files, NAME, VERSION, SUMMARY, id_set): + """ This function creates the <features> tag based on the supplied xml tree. + + This is achieved by finding all <component>s and adding them to a default target. + + It should be called after the tree has been built completly. We assume + that a MY_DEFAULT_FOLDER Property is defined in the wxs file tree. + + Furthermore a top-level with the name and VERSION of the software will be created. + + An PACKAGING_X_MSI_FEATURE can either be a string, where the feature + DESCRIPTION will be the same as its title or a Tuple, where the first + part will be its title and the second its DESCRIPTION. + """ + factory = Document() + Feature = factory.createElement('Feature') + Feature.attributes['Id'] = 'complete' + Feature.attributes['ConfigurableDirectory'] = 'MY_DEFAULT_FOLDER' + Feature.attributes['Level'] = '1' + Feature.attributes['Title'] = escape( '%s %s' % (NAME, VERSION) ) + Feature.attributes['Description'] = escape( SUMMARY ) + Feature.attributes['Display'] = 'expand' + + for (feature, files) in create_feature_dict(files).items(): + SubFeature = factory.createElement('Feature') + SubFeature.attributes['Level'] = '1' + + if SCons.Util.is_Tuple(feature): + SubFeature.attributes['Id'] = convert_to_id( feature[0], id_set ) + SubFeature.attributes['Title'] = escape(feature[0]) + SubFeature.attributes['Description'] = escape(feature[1]) + else: + SubFeature.attributes['Id'] = convert_to_id( feature, id_set ) + if feature=='default': + SubFeature.attributes['Description'] = 'Main Part' + SubFeature.attributes['Title'] = 'Main Part' + elif feature=='PACKAGING_DOC': + SubFeature.attributes['Description'] = 'Documentation' + SubFeature.attributes['Title'] = 'Documentation' + else: + SubFeature.attributes['Description'] = escape(feature) + SubFeature.attributes['Title'] = escape(feature) + + # build the componentrefs. As one of the design decision is that every + # file is also a component we walk the list of files and create a + # reference. + for f in files: + ComponentRef = factory.createElement('ComponentRef') + ComponentRef.attributes['Id'] = convert_to_id( os.path.basename(f.get_path()), id_set ) + SubFeature.childNodes.append(ComponentRef) + + Feature.childNodes.append(SubFeature) + + root.getElementsByTagName('Product')[0].childNodes.append(Feature) + +def build_wxsfile_default_gui(root): + """ this function adds a default GUI to the wxs file + """ + factory = Document() + Product = root.getElementsByTagName('Product')[0] + + UIRef = factory.createElement('UIRef') + UIRef.attributes['Id'] = 'WixUI_Mondo' + Product.childNodes.append(UIRef) + + UIRef = factory.createElement('UIRef') + UIRef.attributes['Id'] = 'WixUI_ErrorProgressText' + Product.childNodes.append(UIRef) + +def build_license_file(directory, spec): + """ creates a License.rtf file with the content of "X_MSI_LICENSE_TEXT" + in the given directory + """ + name, text = '', '' + + try: + name = spec['LICENSE'] + text = spec['X_MSI_LICENSE_TEXT'] + except KeyError: + pass # ignore this as X_MSI_LICENSE_TEXT is optional + + if name!='' or text!='': + file = open( os.path.join(directory.get_path(), 'License.rtf'), 'w' ) + file.write('{\\rtf') + if text!='': + file.write(text.replace('\n', '\\par ')) + else: + file.write(name+'\\par\\par') + file.write('}') + file.close() + +# +# mandatory and optional package tags +# +def build_wxsfile_header_section(root, spec): + """ Adds the xml file node which define the package meta-data. + """ + # Create the needed DOM nodes and add them at the correct position in the tree. + factory = Document() + Product = factory.createElement( 'Product' ) + Package = factory.createElement( 'Package' ) + + root.childNodes.append( Product ) + Product.childNodes.append( Package ) + + # set "mandatory" default values + if not spec.has_key('X_MSI_LANGUAGE'): + spec['X_MSI_LANGUAGE'] = '1033' # select english + + # mandatory sections, will throw a KeyError if the tag is not available + Product.attributes['Name'] = escape( spec['NAME'] ) + Product.attributes['Version'] = escape( spec['VERSION'] ) + Product.attributes['Manufacturer'] = escape( spec['VENDOR'] ) + Product.attributes['Language'] = escape( spec['X_MSI_LANGUAGE'] ) + Package.attributes['Description'] = escape( spec['SUMMARY'] ) + + # now the optional tags, for which we avoid the KeyErrror exception + if spec.has_key( 'DESCRIPTION' ): + Package.attributes['Comments'] = escape( spec['DESCRIPTION'] ) + + if spec.has_key( 'X_MSI_UPGRADE_CODE' ): + Package.attributes['X_MSI_UPGRADE_CODE'] = escape( spec['X_MSI_UPGRADE_CODE'] ) + + # We hardcode the media tag as our current model cannot handle it. + Media = factory.createElement('Media') + Media.attributes['Id'] = '1' + Media.attributes['Cabinet'] = 'default.cab' + Media.attributes['EmbedCab'] = 'yes' + root.getElementsByTagName('Product')[0].childNodes.append(Media) + +# this builder is the entry-point for .wxs file compiler. +wxs_builder = Builder( + action = Action( build_wxsfile, string_wxsfile ), + ensure_suffix = '.wxs' ) + +def package(env, target, source, PACKAGEROOT, NAME, VERSION, + DESCRIPTION, SUMMARY, VENDOR, X_MSI_LANGUAGE, **kw): + # make sure that the Wix Builder is in the environment + SCons.Tool.Tool('wix').generate(env) + + # get put the keywords for the specfile compiler. These are the arguments + # given to the package function and all optional ones stored in kw, minus + # the the source, target and env one. + loc = locals() + del loc['kw'] + kw.update(loc) + del kw['source'], kw['target'], kw['env'] + + # strip the install builder from the source files + target, source = stripinstallbuilder(target, source, env) + + # put the arguments into the env and call the specfile builder. + env['msi_spec'] = kw + specfile = apply( wxs_builder, [env, target, source], kw ) + + # now call the WiX Tool with the built specfile added as a source. + msifile = env.WiX(target, specfile) + + # return the target and source tuple. + return (msifile, source+[specfile]) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/packaging/rpm.py b/engine/SCons/Tool/packaging/rpm.py new file mode 100644 index 0000000..6cb613d --- /dev/null +++ b/engine/SCons/Tool/packaging/rpm.py @@ -0,0 +1,367 @@ +"""SCons.Tool.Packaging.rpm + +The rpm packager. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/packaging/rpm.py 4577 2009/12/27 19:43:56 scons" + +import os +import string + +import SCons.Builder + +from SCons.Environment import OverrideEnvironment +from SCons.Tool.packaging import stripinstallbuilder, src_targz +from SCons.Errors import UserError + +def package(env, target, source, PACKAGEROOT, NAME, VERSION, + PACKAGEVERSION, DESCRIPTION, SUMMARY, X_RPM_GROUP, LICENSE, + **kw): + # initialize the rpm tool + SCons.Tool.Tool('rpm').generate(env) + + bld = env['BUILDERS']['Rpm'] + + # Generate a UserError whenever the target name has been set explicitly, + # since rpm does not allow for controlling it. This is detected by + # checking if the target has been set to the default by the Package() + # Environment function. + if str(target[0])!="%s-%s"%(NAME, VERSION): + raise UserError( "Setting target is not supported for rpm." ) + else: + # This should be overridable from the construction environment, + # which it is by using ARCHITECTURE=. + # Guessing based on what os.uname() returns at least allows it + # to work for both i386 and x86_64 Linux systems. + archmap = { + 'i686' : 'i386', + 'i586' : 'i386', + 'i486' : 'i386', + } + + buildarchitecture = os.uname()[4] + buildarchitecture = archmap.get(buildarchitecture, buildarchitecture) + + if kw.has_key('ARCHITECTURE'): + buildarchitecture = kw['ARCHITECTURE'] + + fmt = '%s-%s-%s.%s.rpm' + srcrpm = fmt % (NAME, VERSION, PACKAGEVERSION, 'src') + binrpm = fmt % (NAME, VERSION, PACKAGEVERSION, buildarchitecture) + + target = [ srcrpm, binrpm ] + + # get the correct arguments into the kw hash + loc=locals() + del loc['kw'] + kw.update(loc) + del kw['source'], kw['target'], kw['env'] + + # if no "SOURCE_URL" tag is given add a default one. + if not kw.has_key('SOURCE_URL'): + #kw['SOURCE_URL']=(str(target[0])+".tar.gz").replace('.rpm', '') + kw['SOURCE_URL']=string.replace(str(target[0])+".tar.gz", '.rpm', '') + + # mangle the source and target list for the rpmbuild + env = OverrideEnvironment(env, kw) + target, source = stripinstallbuilder(target, source, env) + target, source = addspecfile(target, source, env) + target, source = collectintargz(target, source, env) + + # now call the rpm builder to actually build the packet. + return apply(bld, [env, target, source], kw) + +def collectintargz(target, source, env): + """ Puts all source files into a tar.gz file. """ + # the rpm tool depends on a source package, until this is chagned + # this hack needs to be here that tries to pack all sources in. + sources = env.FindSourceFiles() + + # filter out the target we are building the source list for. + #sources = [s for s in sources if not (s in target)] + sources = filter(lambda s, t=target: not (s in t), sources) + + # find the .spec file for rpm and add it since it is not necessarily found + # by the FindSourceFiles function. + #sources.extend( [s for s in source if str(s).rfind('.spec')!=-1] ) + spec_file = lambda s: string.rfind(str(s), '.spec') != -1 + sources.extend( filter(spec_file, source) ) + + # as the source contains the url of the source package this rpm package + # is built from, we extract the target name + #tarball = (str(target[0])+".tar.gz").replace('.rpm', '') + tarball = string.replace(str(target[0])+".tar.gz", '.rpm', '') + try: + #tarball = env['SOURCE_URL'].split('/')[-1] + tarball = string.split(env['SOURCE_URL'], '/')[-1] + except KeyError, e: + raise SCons.Errors.UserError( "Missing PackageTag '%s' for RPM packager" % e.args[0] ) + + tarball = src_targz.package(env, source=sources, target=tarball, + PACKAGEROOT=env['PACKAGEROOT'], ) + + return (target, tarball) + +def addspecfile(target, source, env): + specfile = "%s-%s" % (env['NAME'], env['VERSION']) + + bld = SCons.Builder.Builder(action = build_specfile, + suffix = '.spec', + target_factory = SCons.Node.FS.File) + + source.extend(bld(env, specfile, source)) + + return (target,source) + +def build_specfile(target, source, env): + """ Builds a RPM specfile from a dictionary with string metadata and + by analyzing a tree of nodes. + """ + file = open(target[0].abspath, 'w') + str = "" + + try: + file.write( build_specfile_header(env) ) + file.write( build_specfile_sections(env) ) + file.write( build_specfile_filesection(env, source) ) + file.close() + + # call a user specified function + if env.has_key('CHANGE_SPECFILE'): + env['CHANGE_SPECFILE'](target, source) + + except KeyError, e: + raise SCons.Errors.UserError( '"%s" package field for RPM is missing.' % e.args[0] ) + + +# +# mandatory and optional package tag section +# +def build_specfile_sections(spec): + """ Builds the sections of a rpm specfile. + """ + str = "" + + mandatory_sections = { + 'DESCRIPTION' : '\n%%description\n%s\n\n', } + + str = str + SimpleTagCompiler(mandatory_sections).compile( spec ) + + optional_sections = { + 'DESCRIPTION_' : '%%description -l %s\n%s\n\n', + 'CHANGELOG' : '%%changelog\n%s\n\n', + 'X_RPM_PREINSTALL' : '%%pre\n%s\n\n', + 'X_RPM_POSTINSTALL' : '%%post\n%s\n\n', + 'X_RPM_PREUNINSTALL' : '%%preun\n%s\n\n', + 'X_RPM_POSTUNINSTALL' : '%%postun\n%s\n\n', + 'X_RPM_VERIFY' : '%%verify\n%s\n\n', + + # These are for internal use but could possibly be overriden + 'X_RPM_PREP' : '%%prep\n%s\n\n', + 'X_RPM_BUILD' : '%%build\n%s\n\n', + 'X_RPM_INSTALL' : '%%install\n%s\n\n', + 'X_RPM_CLEAN' : '%%clean\n%s\n\n', + } + + # Default prep, build, install and clean rules + # TODO: optimize those build steps, to not compile the project a second time + if not spec.has_key('X_RPM_PREP'): + spec['X_RPM_PREP'] = '[ -n "$RPM_BUILD_ROOT" -a "$RPM_BUILD_ROOT" != / ] && rm -rf "$RPM_BUILD_ROOT"' + '\n%setup -q' + + if not spec.has_key('X_RPM_BUILD'): + spec['X_RPM_BUILD'] = 'mkdir "$RPM_BUILD_ROOT"' + + if not spec.has_key('X_RPM_INSTALL'): + spec['X_RPM_INSTALL'] = 'scons --install-sandbox="$RPM_BUILD_ROOT" "$RPM_BUILD_ROOT"' + + if not spec.has_key('X_RPM_CLEAN'): + spec['X_RPM_CLEAN'] = '[ -n "$RPM_BUILD_ROOT" -a "$RPM_BUILD_ROOT" != / ] && rm -rf "$RPM_BUILD_ROOT"' + + str = str + SimpleTagCompiler(optional_sections, mandatory=0).compile( spec ) + + return str + +def build_specfile_header(spec): + """ Builds all section but the %file of a rpm specfile + """ + str = "" + + # first the mandatory sections + mandatory_header_fields = { + 'NAME' : '%%define name %s\nName: %%{name}\n', + 'VERSION' : '%%define version %s\nVersion: %%{version}\n', + 'PACKAGEVERSION' : '%%define release %s\nRelease: %%{release}\n', + 'X_RPM_GROUP' : 'Group: %s\n', + 'SUMMARY' : 'Summary: %s\n', + 'LICENSE' : 'License: %s\n', } + + str = str + SimpleTagCompiler(mandatory_header_fields).compile( spec ) + + # now the optional tags + optional_header_fields = { + 'VENDOR' : 'Vendor: %s\n', + 'X_RPM_URL' : 'Url: %s\n', + 'SOURCE_URL' : 'Source: %s\n', + 'SUMMARY_' : 'Summary(%s): %s\n', + 'X_RPM_DISTRIBUTION' : 'Distribution: %s\n', + 'X_RPM_ICON' : 'Icon: %s\n', + 'X_RPM_PACKAGER' : 'Packager: %s\n', + 'X_RPM_GROUP_' : 'Group(%s): %s\n', + + 'X_RPM_REQUIRES' : 'Requires: %s\n', + 'X_RPM_PROVIDES' : 'Provides: %s\n', + 'X_RPM_CONFLICTS' : 'Conflicts: %s\n', + 'X_RPM_BUILDREQUIRES' : 'BuildRequires: %s\n', + + 'X_RPM_SERIAL' : 'Serial: %s\n', + 'X_RPM_EPOCH' : 'Epoch: %s\n', + 'X_RPM_AUTOREQPROV' : 'AutoReqProv: %s\n', + 'X_RPM_EXCLUDEARCH' : 'ExcludeArch: %s\n', + 'X_RPM_EXCLUSIVEARCH' : 'ExclusiveArch: %s\n', + 'X_RPM_PREFIX' : 'Prefix: %s\n', + 'X_RPM_CONFLICTS' : 'Conflicts: %s\n', + + # internal use + 'X_RPM_BUILDROOT' : 'BuildRoot: %s\n', } + + # fill in default values: + # Adding a BuildRequires renders the .rpm unbuildable under System, which + # are not managed by rpm, since the database to resolve this dependency is + # missing (take Gentoo as an example) +# if not s.has_key('x_rpm_BuildRequires'): +# s['x_rpm_BuildRequires'] = 'scons' + + if not spec.has_key('X_RPM_BUILDROOT'): + spec['X_RPM_BUILDROOT'] = '%{_tmppath}/%{name}-%{version}-%{release}' + + str = str + SimpleTagCompiler(optional_header_fields, mandatory=0).compile( spec ) + return str + +# +# mandatory and optional file tags +# +def build_specfile_filesection(spec, files): + """ builds the %file section of the specfile + """ + str = '%files\n' + + if not spec.has_key('X_RPM_DEFATTR'): + spec['X_RPM_DEFATTR'] = '(-,root,root)' + + str = str + '%%defattr %s\n' % spec['X_RPM_DEFATTR'] + + supported_tags = { + 'PACKAGING_CONFIG' : '%%config %s', + 'PACKAGING_CONFIG_NOREPLACE' : '%%config(noreplace) %s', + 'PACKAGING_DOC' : '%%doc %s', + 'PACKAGING_UNIX_ATTR' : '%%attr %s', + 'PACKAGING_LANG_' : '%%lang(%s) %s', + 'PACKAGING_X_RPM_VERIFY' : '%%verify %s', + 'PACKAGING_X_RPM_DIR' : '%%dir %s', + 'PACKAGING_X_RPM_DOCDIR' : '%%docdir %s', + 'PACKAGING_X_RPM_GHOST' : '%%ghost %s', } + + for file in files: + # build the tagset + tags = {} + for k in supported_tags.keys(): + try: + tags[k]=getattr(file, k) + except AttributeError: + pass + + # compile the tagset + str = str + SimpleTagCompiler(supported_tags, mandatory=0).compile( tags ) + + str = str + ' ' + str = str + file.PACKAGING_INSTALL_LOCATION + str = str + '\n\n' + + return str + +class SimpleTagCompiler: + """ This class is a simple string substition utility: + the replacement specfication is stored in the tagset dictionary, something + like: + { "abc" : "cdef %s ", + "abc_" : "cdef %s %s" } + + the compile function gets a value dictionary, which may look like: + { "abc" : "ghij", + "abc_gh" : "ij" } + + The resulting string will be: + "cdef ghij cdef gh ij" + """ + def __init__(self, tagset, mandatory=1): + self.tagset = tagset + self.mandatory = mandatory + + def compile(self, values): + """ compiles the tagset and returns a str containing the result + """ + def is_international(tag): + #return tag.endswith('_') + return tag[-1:] == '_' + + def get_country_code(tag): + return tag[-2:] + + def strip_country_code(tag): + return tag[:-2] + + replacements = self.tagset.items() + + str = "" + #domestic = [ (k,v) for k,v in replacements if not is_international(k) ] + domestic = filter(lambda t, i=is_international: not i(t[0]), replacements) + for key, replacement in domestic: + try: + str = str + replacement % values[key] + except KeyError, e: + if self.mandatory: + raise e + + #international = [ (k,v) for k,v in replacements if is_international(k) ] + international = filter(lambda t, i=is_international: i(t[0]), replacements) + for key, replacement in international: + try: + #int_values_for_key = [ (get_country_code(k),v) for k,v in values.items() if strip_country_code(k) == key ] + x = filter(lambda t,key=key,s=strip_country_code: s(t[0]) == key, values.items()) + int_values_for_key = map(lambda t,g=get_country_code: (g(t[0]),t[1]), x) + for v in int_values_for_key: + str = str + replacement % v + except KeyError, e: + if self.mandatory: + raise e + + return str + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/packaging/src_tarbz2.py b/engine/SCons/Tool/packaging/src_tarbz2.py new file mode 100644 index 0000000..9bc32b7 --- /dev/null +++ b/engine/SCons/Tool/packaging/src_tarbz2.py @@ -0,0 +1,43 @@ +"""SCons.Tool.Packaging.tarbz2 + +The tarbz2 SRC packager. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/packaging/src_tarbz2.py 4577 2009/12/27 19:43:56 scons" + +from SCons.Tool.packaging import putintopackageroot + +def package(env, target, source, PACKAGEROOT, **kw): + bld = env['BUILDERS']['Tar'] + bld.set_suffix('.tar.bz2') + target, source = putintopackageroot(target, source, env, PACKAGEROOT, honor_install_location=0) + return bld(env, target, source, TARFLAGS='-jc') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/packaging/src_targz.py b/engine/SCons/Tool/packaging/src_targz.py new file mode 100644 index 0000000..a40fd9b --- /dev/null +++ b/engine/SCons/Tool/packaging/src_targz.py @@ -0,0 +1,43 @@ +"""SCons.Tool.Packaging.targz + +The targz SRC packager. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/packaging/src_targz.py 4577 2009/12/27 19:43:56 scons" + +from SCons.Tool.packaging import putintopackageroot + +def package(env, target, source, PACKAGEROOT, **kw): + bld = env['BUILDERS']['Tar'] + bld.set_suffix('.tar.gz') + target, source = putintopackageroot(target, source, env, PACKAGEROOT, honor_install_location=0) + return bld(env, target, source, TARFLAGS='-zc') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/packaging/src_zip.py b/engine/SCons/Tool/packaging/src_zip.py new file mode 100644 index 0000000..173242c --- /dev/null +++ b/engine/SCons/Tool/packaging/src_zip.py @@ -0,0 +1,43 @@ +"""SCons.Tool.Packaging.zip + +The zip SRC packager. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/packaging/src_zip.py 4577 2009/12/27 19:43:56 scons" + +from SCons.Tool.packaging import putintopackageroot + +def package(env, target, source, PACKAGEROOT, **kw): + bld = env['BUILDERS']['Zip'] + bld.set_suffix('.zip') + target, source = putintopackageroot(target, source, env, PACKAGEROOT, honor_install_location=0) + return bld(env, target, source) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/packaging/tarbz2.py b/engine/SCons/Tool/packaging/tarbz2.py new file mode 100644 index 0000000..5c4d3f9 --- /dev/null +++ b/engine/SCons/Tool/packaging/tarbz2.py @@ -0,0 +1,44 @@ +"""SCons.Tool.Packaging.tarbz2 + +The tarbz2 SRC packager. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/packaging/tarbz2.py 4577 2009/12/27 19:43:56 scons" + +from SCons.Tool.packaging import stripinstallbuilder, putintopackageroot + +def package(env, target, source, PACKAGEROOT, **kw): + bld = env['BUILDERS']['Tar'] + bld.set_suffix('.tar.gz') + target, source = putintopackageroot(target, source, env, PACKAGEROOT) + target, source = stripinstallbuilder(target, source, env) + return bld(env, target, source, TARFLAGS='-jc') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/packaging/targz.py b/engine/SCons/Tool/packaging/targz.py new file mode 100644 index 0000000..5cc0cd5 --- /dev/null +++ b/engine/SCons/Tool/packaging/targz.py @@ -0,0 +1,44 @@ +"""SCons.Tool.Packaging.targz + +The targz SRC packager. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/packaging/targz.py 4577 2009/12/27 19:43:56 scons" + +from SCons.Tool.packaging import stripinstallbuilder, putintopackageroot + +def package(env, target, source, PACKAGEROOT, **kw): + bld = env['BUILDERS']['Tar'] + bld.set_suffix('.tar.gz') + target, source = stripinstallbuilder(target, source, env) + target, source = putintopackageroot(target, source, env, PACKAGEROOT) + return bld(env, target, source, TARFLAGS='-zc') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/packaging/zip.py b/engine/SCons/Tool/packaging/zip.py new file mode 100644 index 0000000..faeaacf --- /dev/null +++ b/engine/SCons/Tool/packaging/zip.py @@ -0,0 +1,44 @@ +"""SCons.Tool.Packaging.zip + +The zip SRC packager. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/packaging/zip.py 4577 2009/12/27 19:43:56 scons" + +from SCons.Tool.packaging import stripinstallbuilder, putintopackageroot + +def package(env, target, source, PACKAGEROOT, **kw): + bld = env['BUILDERS']['Zip'] + bld.set_suffix('.zip') + target, source = stripinstallbuilder(target, source, env) + target, source = putintopackageroot(target, source, env, PACKAGEROOT) + return bld(env, target, source) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/pdf.py b/engine/SCons/Tool/pdf.py new file mode 100644 index 0000000..3a3d9d4 --- /dev/null +++ b/engine/SCons/Tool/pdf.py @@ -0,0 +1,78 @@ +"""SCons.Tool.pdf + +Common PDF Builder definition for various other Tool modules that use it. +Add an explicit action to run epstopdf to convert .eps files to .pdf + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/pdf.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Builder +import SCons.Tool + +PDFBuilder = None + +EpsPdfAction = SCons.Action.Action('$EPSTOPDFCOM', '$EPSTOPDFCOMSTR') + +def generate(env): + try: + env['BUILDERS']['PDF'] + except KeyError: + global PDFBuilder + if PDFBuilder is None: + PDFBuilder = SCons.Builder.Builder(action = {}, + source_scanner = SCons.Tool.PDFLaTeXScanner, + prefix = '$PDFPREFIX', + suffix = '$PDFSUFFIX', + emitter = {}, + source_ext_match = None, + single_source=True) + env['BUILDERS']['PDF'] = PDFBuilder + + env['PDFPREFIX'] = '' + env['PDFSUFFIX'] = '.pdf' + +# put the epstopdf builder in this routine so we can add it after +# the pdftex builder so that one is the default for no source suffix +def generate2(env): + bld = env['BUILDERS']['PDF'] + #bld.add_action('.ps', EpsPdfAction) # this is covered by direct Ghostcript action in gs.py + bld.add_action('.eps', EpsPdfAction) + + env['EPSTOPDF'] = 'epstopdf' + env['EPSTOPDFFLAGS'] = SCons.Util.CLVar('') + env['EPSTOPDFCOM'] = '$EPSTOPDF $EPSTOPDFFLAGS ${SOURCE} --outfile=${TARGET}' + +def exists(env): + # This only puts a skeleton Builder in place, so if someone + # references this Tool directly, it's always "available." + return 1 + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/pdflatex.py b/engine/SCons/Tool/pdflatex.py new file mode 100644 index 0000000..a510c84 --- /dev/null +++ b/engine/SCons/Tool/pdflatex.py @@ -0,0 +1,83 @@ +"""SCons.Tool.pdflatex + +Tool-specific initialization for pdflatex. +Generates .pdf files from .latex or .ltx files + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/pdflatex.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Action +import SCons.Util +import SCons.Tool.pdf +import SCons.Tool.tex + +PDFLaTeXAction = None + +def PDFLaTeXAuxFunction(target = None, source= None, env=None): + result = SCons.Tool.tex.InternalLaTeXAuxAction( PDFLaTeXAction, target, source, env ) + if result != 0: + print env['PDFLATEX']," returned an error, check the log file" + return result + +PDFLaTeXAuxAction = None + +def generate(env): + """Add Builders and construction variables for pdflatex to an Environment.""" + global PDFLaTeXAction + if PDFLaTeXAction is None: + PDFLaTeXAction = SCons.Action.Action('$PDFLATEXCOM', '$PDFLATEXCOMSTR') + + global PDFLaTeXAuxAction + if PDFLaTeXAuxAction is None: + PDFLaTeXAuxAction = SCons.Action.Action(PDFLaTeXAuxFunction, + strfunction=SCons.Tool.tex.TeXLaTeXStrFunction) + + env.AppendUnique(LATEXSUFFIXES=SCons.Tool.LaTeXSuffixes) + + import pdf + pdf.generate(env) + + bld = env['BUILDERS']['PDF'] + bld.add_action('.ltx', PDFLaTeXAuxAction) + bld.add_action('.latex', PDFLaTeXAuxAction) + bld.add_emitter('.ltx', SCons.Tool.tex.tex_pdf_emitter) + bld.add_emitter('.latex', SCons.Tool.tex.tex_pdf_emitter) + + SCons.Tool.tex.generate_common(env) + +def exists(env): + return env.Detect('pdflatex') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/pdftex.py b/engine/SCons/Tool/pdftex.py new file mode 100644 index 0000000..35a07ee --- /dev/null +++ b/engine/SCons/Tool/pdftex.py @@ -0,0 +1,108 @@ +"""SCons.Tool.pdftex + +Tool-specific initialization for pdftex. +Generates .pdf files from .tex files + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/pdftex.py 4577 2009/12/27 19:43:56 scons" + +import os +import SCons.Action +import SCons.Util +import SCons.Tool.tex + +PDFTeXAction = None + +# This action might be needed more than once if we are dealing with +# labels and bibtex. +PDFLaTeXAction = None + +def PDFLaTeXAuxAction(target = None, source= None, env=None): + result = SCons.Tool.tex.InternalLaTeXAuxAction( PDFLaTeXAction, target, source, env ) + return result + +def PDFTeXLaTeXFunction(target = None, source= None, env=None): + """A builder for TeX and LaTeX that scans the source file to + decide the "flavor" of the source and then executes the appropriate + program.""" + basedir = os.path.split(str(source[0]))[0] + abspath = os.path.abspath(basedir) + + if SCons.Tool.tex.is_LaTeX(source,env,abspath): + result = PDFLaTeXAuxAction(target,source,env) + if result != 0: + print env['PDFLATEX']," returned an error, check the log file" + else: + result = PDFTeXAction(target,source,env) + if result != 0: + print env['PDFTEX']," returned an error, check the log file" + return result + +PDFTeXLaTeXAction = None + +def generate(env): + """Add Builders and construction variables for pdftex to an Environment.""" + global PDFTeXAction + if PDFTeXAction is None: + PDFTeXAction = SCons.Action.Action('$PDFTEXCOM', '$PDFTEXCOMSTR') + + global PDFLaTeXAction + if PDFLaTeXAction is None: + PDFLaTeXAction = SCons.Action.Action("$PDFLATEXCOM", "$PDFLATEXCOMSTR") + + global PDFTeXLaTeXAction + if PDFTeXLaTeXAction is None: + PDFTeXLaTeXAction = SCons.Action.Action(PDFTeXLaTeXFunction, + strfunction=SCons.Tool.tex.TeXLaTeXStrFunction) + + env.AppendUnique(LATEXSUFFIXES=SCons.Tool.LaTeXSuffixes) + + import pdf + pdf.generate(env) + + bld = env['BUILDERS']['PDF'] + bld.add_action('.tex', PDFTeXLaTeXAction) + bld.add_emitter('.tex', SCons.Tool.tex.tex_pdf_emitter) + + # Add the epstopdf builder after the pdftex builder + # so pdftex is the default for no source suffix + pdf.generate2(env) + + SCons.Tool.tex.generate_common(env) + +def exists(env): + return env.Detect('pdftex') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/qt.py b/engine/SCons/Tool/qt.py new file mode 100644 index 0000000..f4a35b4 --- /dev/null +++ b/engine/SCons/Tool/qt.py @@ -0,0 +1,336 @@ + +"""SCons.Tool.qt + +Tool-specific initialization for Qt. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/qt.py 4577 2009/12/27 19:43:56 scons" + +import os.path +import re + +import SCons.Action +import SCons.Builder +import SCons.Defaults +import SCons.Scanner +import SCons.Tool +import SCons.Util + +class ToolQtWarning(SCons.Warnings.Warning): + pass + +class GeneratedMocFileNotIncluded(ToolQtWarning): + pass + +class QtdirNotFound(ToolQtWarning): + pass + +SCons.Warnings.enableWarningClass(ToolQtWarning) + +header_extensions = [".h", ".hxx", ".hpp", ".hh"] +if SCons.Util.case_sensitive_suffixes('.h', '.H'): + header_extensions.append('.H') +cplusplus = __import__('c++', globals(), locals(), []) +cxx_suffixes = cplusplus.CXXSuffixes + +def checkMocIncluded(target, source, env): + moc = target[0] + cpp = source[0] + # looks like cpp.includes is cleared before the build stage :-( + # not really sure about the path transformations (moc.cwd? cpp.cwd?) :-/ + path = SCons.Defaults.CScan.path(env, moc.cwd) + includes = SCons.Defaults.CScan(cpp, env, path) + if not moc in includes: + SCons.Warnings.warn( + GeneratedMocFileNotIncluded, + "Generated moc file '%s' is not included by '%s'" % + (str(moc), str(cpp))) + +def find_file(filename, paths, node_factory): + for dir in paths: + node = node_factory(filename, dir) + if node.rexists(): + return node + return None + +class _Automoc: + """ + Callable class, which works as an emitter for Programs, SharedLibraries and + StaticLibraries. + """ + + def __init__(self, objBuilderName): + self.objBuilderName = objBuilderName + + def __call__(self, target, source, env): + """ + Smart autoscan function. Gets the list of objects for the Program + or Lib. Adds objects and builders for the special qt files. + """ + try: + if int(env.subst('$QT_AUTOSCAN')) == 0: + return target, source + except ValueError: + pass + try: + debug = int(env.subst('$QT_DEBUG')) + except ValueError: + debug = 0 + + # some shortcuts used in the scanner + splitext = SCons.Util.splitext + objBuilder = getattr(env, self.objBuilderName) + + # some regular expressions: + # Q_OBJECT detection + q_object_search = re.compile(r'[^A-Za-z0-9]Q_OBJECT[^A-Za-z0-9]') + # cxx and c comment 'eater' + #comment = re.compile(r'(//.*)|(/\*(([^*])|(\*[^/]))*\*/)') + # CW: something must be wrong with the regexp. See also bug #998222 + # CURRENTLY THERE IS NO TEST CASE FOR THAT + + # The following is kind of hacky to get builders working properly (FIXME) + objBuilderEnv = objBuilder.env + objBuilder.env = env + mocBuilderEnv = env.Moc.env + env.Moc.env = env + + # make a deep copy for the result; MocH objects will be appended + out_sources = source[:] + + for obj in source: + if not obj.has_builder(): + # binary obj file provided + if debug: + print "scons: qt: '%s' seems to be a binary. Discarded." % str(obj) + continue + cpp = obj.sources[0] + if not splitext(str(cpp))[1] in cxx_suffixes: + if debug: + print "scons: qt: '%s' is no cxx file. Discarded." % str(cpp) + # c or fortran source + continue + #cpp_contents = comment.sub('', cpp.get_text_contents()) + cpp_contents = cpp.get_text_contents() + h=None + for h_ext in header_extensions: + # try to find the header file in the corresponding source + # directory + hname = splitext(cpp.name)[0] + h_ext + h = find_file(hname, (cpp.get_dir(),), env.File) + if h: + if debug: + print "scons: qt: Scanning '%s' (header of '%s')" % (str(h), str(cpp)) + #h_contents = comment.sub('', h.get_text_contents()) + h_contents = h.get_text_contents() + break + if not h and debug: + print "scons: qt: no header for '%s'." % (str(cpp)) + if h and q_object_search.search(h_contents): + # h file with the Q_OBJECT macro found -> add moc_cpp + moc_cpp = env.Moc(h) + moc_o = objBuilder(moc_cpp) + out_sources.append(moc_o) + #moc_cpp.target_scanner = SCons.Defaults.CScan + if debug: + print "scons: qt: found Q_OBJECT macro in '%s', moc'ing to '%s'" % (str(h), str(moc_cpp)) + if cpp and q_object_search.search(cpp_contents): + # cpp file with Q_OBJECT macro found -> add moc + # (to be included in cpp) + moc = env.Moc(cpp) + env.Ignore(moc, moc) + if debug: + print "scons: qt: found Q_OBJECT macro in '%s', moc'ing to '%s'" % (str(cpp), str(moc)) + #moc.source_scanner = SCons.Defaults.CScan + # restore the original env attributes (FIXME) + objBuilder.env = objBuilderEnv + env.Moc.env = mocBuilderEnv + + return (target, out_sources) + +AutomocShared = _Automoc('SharedObject') +AutomocStatic = _Automoc('StaticObject') + +def _detect(env): + """Not really safe, but fast method to detect the QT library""" + QTDIR = None + if not QTDIR: + QTDIR = env.get('QTDIR',None) + if not QTDIR: + QTDIR = os.environ.get('QTDIR',None) + if not QTDIR: + moc = env.WhereIs('moc') + if moc: + QTDIR = os.path.dirname(os.path.dirname(moc)) + SCons.Warnings.warn( + QtdirNotFound, + "Could not detect qt, using moc executable as a hint (QTDIR=%s)" % QTDIR) + else: + QTDIR = None + SCons.Warnings.warn( + QtdirNotFound, + "Could not detect qt, using empty QTDIR") + return QTDIR + +def uicEmitter(target, source, env): + adjustixes = SCons.Util.adjustixes + bs = SCons.Util.splitext(str(source[0].name))[0] + bs = os.path.join(str(target[0].get_dir()),bs) + # first target (header) is automatically added by builder + if len(target) < 2: + # second target is implementation + target.append(adjustixes(bs, + env.subst('$QT_UICIMPLPREFIX'), + env.subst('$QT_UICIMPLSUFFIX'))) + if len(target) < 3: + # third target is moc file + target.append(adjustixes(bs, + env.subst('$QT_MOCHPREFIX'), + env.subst('$QT_MOCHSUFFIX'))) + return target, source + +def uicScannerFunc(node, env, path): + lookout = [] + lookout.extend(env['CPPPATH']) + lookout.append(str(node.rfile().dir)) + includes = re.findall("<include.*?>(.*?)</include>", node.get_text_contents()) + result = [] + for incFile in includes: + dep = env.FindFile(incFile,lookout) + if dep: + result.append(dep) + return result + +uicScanner = SCons.Scanner.Base(uicScannerFunc, + name = "UicScanner", + node_class = SCons.Node.FS.File, + node_factory = SCons.Node.FS.File, + recursive = 0) + +def generate(env): + """Add Builders and construction variables for qt to an Environment.""" + CLVar = SCons.Util.CLVar + Action = SCons.Action.Action + Builder = SCons.Builder.Builder + + env.SetDefault(QTDIR = _detect(env), + QT_BINPATH = os.path.join('$QTDIR', 'bin'), + QT_CPPPATH = os.path.join('$QTDIR', 'include'), + QT_LIBPATH = os.path.join('$QTDIR', 'lib'), + QT_MOC = os.path.join('$QT_BINPATH','moc'), + QT_UIC = os.path.join('$QT_BINPATH','uic'), + QT_LIB = 'qt', # may be set to qt-mt + + QT_AUTOSCAN = 1, # scan for moc'able sources + + # Some QT specific flags. I don't expect someone wants to + # manipulate those ... + QT_UICIMPLFLAGS = CLVar(''), + QT_UICDECLFLAGS = CLVar(''), + QT_MOCFROMHFLAGS = CLVar(''), + QT_MOCFROMCXXFLAGS = CLVar('-i'), + + # suffixes/prefixes for the headers / sources to generate + QT_UICDECLPREFIX = '', + QT_UICDECLSUFFIX = '.h', + QT_UICIMPLPREFIX = 'uic_', + QT_UICIMPLSUFFIX = '$CXXFILESUFFIX', + QT_MOCHPREFIX = 'moc_', + QT_MOCHSUFFIX = '$CXXFILESUFFIX', + QT_MOCCXXPREFIX = '', + QT_MOCCXXSUFFIX = '.moc', + QT_UISUFFIX = '.ui', + + # Commands for the qt support ... + # command to generate header, implementation and moc-file + # from a .ui file + QT_UICCOM = [ + CLVar('$QT_UIC $QT_UICDECLFLAGS -o ${TARGETS[0]} $SOURCE'), + CLVar('$QT_UIC $QT_UICIMPLFLAGS -impl ${TARGETS[0].file} ' + '-o ${TARGETS[1]} $SOURCE'), + CLVar('$QT_MOC $QT_MOCFROMHFLAGS -o ${TARGETS[2]} ${TARGETS[0]}')], + # command to generate meta object information for a class + # declarated in a header + QT_MOCFROMHCOM = ( + '$QT_MOC $QT_MOCFROMHFLAGS -o ${TARGETS[0]} $SOURCE'), + # command to generate meta object information for a class + # declarated in a cpp file + QT_MOCFROMCXXCOM = [ + CLVar('$QT_MOC $QT_MOCFROMCXXFLAGS -o ${TARGETS[0]} $SOURCE'), + Action(checkMocIncluded,None)]) + + # ... and the corresponding builders + uicBld = Builder(action=SCons.Action.Action('$QT_UICCOM', '$QT_UICCOMSTR'), + emitter=uicEmitter, + src_suffix='$QT_UISUFFIX', + suffix='$QT_UICDECLSUFFIX', + prefix='$QT_UICDECLPREFIX', + source_scanner=uicScanner) + mocBld = Builder(action={}, prefix={}, suffix={}) + for h in header_extensions: + act = SCons.Action.Action('$QT_MOCFROMHCOM', '$QT_MOCFROMHCOMSTR') + mocBld.add_action(h, act) + mocBld.prefix[h] = '$QT_MOCHPREFIX' + mocBld.suffix[h] = '$QT_MOCHSUFFIX' + for cxx in cxx_suffixes: + act = SCons.Action.Action('$QT_MOCFROMCXXCOM', '$QT_MOCFROMCXXCOMSTR') + mocBld.add_action(cxx, act) + mocBld.prefix[cxx] = '$QT_MOCCXXPREFIX' + mocBld.suffix[cxx] = '$QT_MOCCXXSUFFIX' + + # register the builders + env['BUILDERS']['Uic'] = uicBld + env['BUILDERS']['Moc'] = mocBld + static_obj, shared_obj = SCons.Tool.createObjBuilders(env) + static_obj.add_src_builder('Uic') + shared_obj.add_src_builder('Uic') + + # We use the emitters of Program / StaticLibrary / SharedLibrary + # to scan for moc'able files + # We can't refer to the builders directly, we have to fetch them + # as Environment attributes because that sets them up to be called + # correctly later by our emitter. + env.AppendUnique(PROGEMITTER =[AutomocStatic], + SHLIBEMITTER=[AutomocShared], + LIBEMITTER =[AutomocStatic], + # Of course, we need to link against the qt libraries + CPPPATH=["$QT_CPPPATH"], + LIBPATH=["$QT_LIBPATH"], + LIBS=['$QT_LIB']) + +def exists(env): + return _detect(env) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/rmic.py b/engine/SCons/Tool/rmic.py new file mode 100644 index 0000000..6eaabc6 --- /dev/null +++ b/engine/SCons/Tool/rmic.py @@ -0,0 +1,121 @@ +"""SCons.Tool.rmic + +Tool-specific initialization for rmic. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/rmic.py 4577 2009/12/27 19:43:56 scons" + +import os.path +import string + +import SCons.Action +import SCons.Builder +import SCons.Node.FS +import SCons.Util + +def emit_rmic_classes(target, source, env): + """Create and return lists of Java RMI stub and skeleton + class files to be created from a set of class files. + """ + class_suffix = env.get('JAVACLASSSUFFIX', '.class') + classdir = env.get('JAVACLASSDIR') + + if not classdir: + try: + s = source[0] + except IndexError: + classdir = '.' + else: + try: + classdir = s.attributes.java_classdir + except AttributeError: + classdir = '.' + classdir = env.Dir(classdir).rdir() + if str(classdir) == '.': + c_ = None + else: + c_ = str(classdir) + os.sep + + slist = [] + for src in source: + try: + classname = src.attributes.java_classname + except AttributeError: + classname = str(src) + if c_ and classname[:len(c_)] == c_: + classname = classname[len(c_):] + if class_suffix and classname[:-len(class_suffix)] == class_suffix: + classname = classname[-len(class_suffix):] + s = src.rfile() + s.attributes.java_classdir = classdir + s.attributes.java_classname = classname + slist.append(s) + + stub_suffixes = ['_Stub'] + if env.get('JAVAVERSION') == '1.4': + stub_suffixes.append('_Skel') + + tlist = [] + for s in source: + for suff in stub_suffixes: + fname = string.replace(s.attributes.java_classname, '.', os.sep) + \ + suff + class_suffix + t = target[0].File(fname) + t.attributes.java_lookupdir = target[0] + tlist.append(t) + + return tlist, source + +RMICAction = SCons.Action.Action('$RMICCOM', '$RMICCOMSTR') + +RMICBuilder = SCons.Builder.Builder(action = RMICAction, + emitter = emit_rmic_classes, + src_suffix = '$JAVACLASSSUFFIX', + target_factory = SCons.Node.FS.Dir, + source_factory = SCons.Node.FS.File) + +def generate(env): + """Add Builders and construction variables for rmic to an Environment.""" + env['BUILDERS']['RMIC'] = RMICBuilder + + env['RMIC'] = 'rmic' + env['RMICFLAGS'] = SCons.Util.CLVar('') + env['RMICCOM'] = '$RMIC $RMICFLAGS -d ${TARGET.attributes.java_lookupdir} -classpath ${SOURCE.attributes.java_classdir} ${SOURCES.attributes.java_classname}' + env['JAVACLASSSUFFIX'] = '.class' + +def exists(env): + return env.Detect('rmic') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/rpcgen.py b/engine/SCons/Tool/rpcgen.py new file mode 100644 index 0000000..07a3161 --- /dev/null +++ b/engine/SCons/Tool/rpcgen.py @@ -0,0 +1,70 @@ +"""SCons.Tool.rpcgen + +Tool-specific initialization for RPCGEN tools. + +Three normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/rpcgen.py 4577 2009/12/27 19:43:56 scons" + +from SCons.Builder import Builder +import SCons.Util + +cmd = "cd ${SOURCE.dir} && $RPCGEN -%s $RPCGENFLAGS %s -o ${TARGET.abspath} ${SOURCE.file}" + +rpcgen_client = cmd % ('l', '$RPCGENCLIENTFLAGS') +rpcgen_header = cmd % ('h', '$RPCGENHEADERFLAGS') +rpcgen_service = cmd % ('m', '$RPCGENSERVICEFLAGS') +rpcgen_xdr = cmd % ('c', '$RPCGENXDRFLAGS') + +def generate(env): + "Add RPCGEN Builders and construction variables for an Environment." + + client = Builder(action=rpcgen_client, suffix='_clnt.c', src_suffix='.x') + header = Builder(action=rpcgen_header, suffix='.h', src_suffix='.x') + service = Builder(action=rpcgen_service, suffix='_svc.c', src_suffix='.x') + xdr = Builder(action=rpcgen_xdr, suffix='_xdr.c', src_suffix='.x') + env.Append(BUILDERS={'RPCGenClient' : client, + 'RPCGenHeader' : header, + 'RPCGenService' : service, + 'RPCGenXDR' : xdr}) + env['RPCGEN'] = 'rpcgen' + env['RPCGENFLAGS'] = SCons.Util.CLVar('') + env['RPCGENCLIENTFLAGS'] = SCons.Util.CLVar('') + env['RPCGENHEADERFLAGS'] = SCons.Util.CLVar('') + env['RPCGENSERVICEFLAGS'] = SCons.Util.CLVar('') + env['RPCGENXDRFLAGS'] = SCons.Util.CLVar('') + +def exists(env): + return env.Detect('rpcgen') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/rpm.py b/engine/SCons/Tool/rpm.py new file mode 100644 index 0000000..150f75d --- /dev/null +++ b/engine/SCons/Tool/rpm.py @@ -0,0 +1,132 @@ +"""SCons.Tool.rpm + +Tool-specific initialization for rpm. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +The rpm tool calls the rpmbuild command. The first and only argument should a +tar.gz consisting of the source file and a specfile. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/rpm.py 4577 2009/12/27 19:43:56 scons" + +import os +import re +import shutil +import subprocess + +import SCons.Builder +import SCons.Node.FS +import SCons.Util +import SCons.Action +import SCons.Defaults + +def get_cmd(source, env): + tar_file_with_included_specfile = source + if SCons.Util.is_List(source): + tar_file_with_included_specfile = source[0] + return "%s %s %s"%(env['RPM'], env['RPMFLAGS'], + tar_file_with_included_specfile.abspath ) + +def build_rpm(target, source, env): + # create a temporary rpm build root. + tmpdir = os.path.join( os.path.dirname( target[0].abspath ), 'rpmtemp' ) + if os.path.exists(tmpdir): + shutil.rmtree(tmpdir) + + # now create the mandatory rpm directory structure. + for d in ['RPMS', 'SRPMS', 'SPECS', 'BUILD']: + os.makedirs( os.path.join( tmpdir, d ) ) + + # set the topdir as an rpmflag. + env.Prepend( RPMFLAGS = '--define \'_topdir %s\'' % tmpdir ) + + # now call rpmbuild to create the rpm package. + handle = subprocess.Popen(get_cmd(source, env), + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + shell=True) + output = handle.stdout.read() + status = handle.wait() + + if status: + raise SCons.Errors.BuildError( node=target[0], + errstr=output, + filename=str(target[0]) ) + else: + # XXX: assume that LC_ALL=c is set while running rpmbuild + output_files = re.compile( 'Wrote: (.*)' ).findall( output ) + + for output, input in zip( output_files, target ): + rpm_output = os.path.basename(output) + expected = os.path.basename(input.get_path()) + + assert expected == rpm_output, "got %s but expected %s" % (rpm_output, expected) + shutil.copy( output, input.abspath ) + + + # cleanup before leaving. + shutil.rmtree(tmpdir) + + return status + +def string_rpm(target, source, env): + try: + return env['RPMCOMSTR'] + except KeyError: + return get_cmd(source, env) + +rpmAction = SCons.Action.Action(build_rpm, string_rpm) + +RpmBuilder = SCons.Builder.Builder(action = SCons.Action.Action('$RPMCOM', '$RPMCOMSTR'), + source_scanner = SCons.Defaults.DirScanner, + suffix = '$RPMSUFFIX') + + + +def generate(env): + """Add Builders and construction variables for rpm to an Environment.""" + try: + bld = env['BUILDERS']['Rpm'] + except KeyError: + bld = RpmBuilder + env['BUILDERS']['Rpm'] = bld + + env.SetDefault(RPM = 'LC_ALL=c rpmbuild') + env.SetDefault(RPMFLAGS = SCons.Util.CLVar('-ta')) + env.SetDefault(RPMCOM = rpmAction) + env.SetDefault(RPMSUFFIX = '.rpm') + +def exists(env): + return env.Detect('rpmbuild') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/sgiar.py b/engine/SCons/Tool/sgiar.py new file mode 100644 index 0000000..4e68e5e --- /dev/null +++ b/engine/SCons/Tool/sgiar.py @@ -0,0 +1,68 @@ +"""SCons.Tool.sgiar + +Tool-specific initialization for SGI ar (library archive). If CC +exists, static libraries should be built with it, so the prelinker has +a chance to resolve C++ template instantiations. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/sgiar.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Defaults +import SCons.Tool +import SCons.Util + +def generate(env): + """Add Builders and construction variables for ar to an Environment.""" + SCons.Tool.createStaticLibBuilder(env) + + if env.Detect('CC'): + env['AR'] = 'CC' + env['ARFLAGS'] = SCons.Util.CLVar('-ar') + env['ARCOM'] = '$AR $ARFLAGS -o $TARGET $SOURCES' + else: + env['AR'] = 'ar' + env['ARFLAGS'] = SCons.Util.CLVar('r') + env['ARCOM'] = '$AR $ARFLAGS $TARGET $SOURCES' + + env['SHLINK'] = '$LINK' + env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -shared') + env['SHLINKCOM'] = '$SHLINK $SHLINKFLAGS -o $TARGET $SOURCES $_LIBDIRFLAGS $_LIBFLAGS' + env['LIBPREFIX'] = 'lib' + env['LIBSUFFIX'] = '.a' + +def exists(env): + return env.Detect('CC') or env.Detect('ar') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/sgic++.py b/engine/SCons/Tool/sgic++.py new file mode 100644 index 0000000..8360ca7 --- /dev/null +++ b/engine/SCons/Tool/sgic++.py @@ -0,0 +1,58 @@ +"""SCons.Tool.sgic++ + +Tool-specific initialization for MIPSpro C++ on SGI. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/sgic++.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Util + +cplusplus = __import__('c++', globals(), locals(), []) + +def generate(env): + """Add Builders and construction variables for SGI MIPS C++ to an Environment.""" + + cplusplus.generate(env) + + env['CXX'] = 'CC' + env['CXXFLAGS'] = SCons.Util.CLVar('-LANG:std') + env['SHCXX'] = '$CXX' + env['SHOBJSUFFIX'] = '.o' + env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1 + +def exists(env): + return env.Detect('CC') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/sgicc.py b/engine/SCons/Tool/sgicc.py new file mode 100644 index 0000000..241d897 --- /dev/null +++ b/engine/SCons/Tool/sgicc.py @@ -0,0 +1,53 @@ +"""SCons.Tool.sgicc + +Tool-specific initialization for MIPSPro cc on SGI. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/sgicc.py 4577 2009/12/27 19:43:56 scons" + +import cc + +def generate(env): + """Add Builders and construction variables for gcc to an Environment.""" + cc.generate(env) + + env['CXX'] = 'CC' + env['SHOBJSUFFIX'] = '.o' + env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1 + +def exists(env): + return env.Detect('cc') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/sgilink.py b/engine/SCons/Tool/sgilink.py new file mode 100644 index 0000000..1662f84 --- /dev/null +++ b/engine/SCons/Tool/sgilink.py @@ -0,0 +1,63 @@ +"""SCons.Tool.sgilink + +Tool-specific initialization for the SGI MIPSPro linker on SGI. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/sgilink.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Util + +import link + +linkers = ['CC', 'cc'] + +def generate(env): + """Add Builders and construction variables for MIPSPro to an Environment.""" + link.generate(env) + + env['LINK'] = env.Detect(linkers) or 'cc' + env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -shared') + + # __RPATH is set to $_RPATH in the platform specification if that + # platform supports it. + env.Append(LINKFLAGS=['$__RPATH']) + env['RPATHPREFIX'] = '-rpath ' + env['RPATHSUFFIX'] = '' + env['_RPATH'] = '${_concat(RPATHPREFIX, RPATH, RPATHSUFFIX, __env__)}' + +def exists(env): + return env.Detect(linkers) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/sunar.py b/engine/SCons/Tool/sunar.py new file mode 100644 index 0000000..c64867c --- /dev/null +++ b/engine/SCons/Tool/sunar.py @@ -0,0 +1,67 @@ +"""engine.SCons.Tool.sunar + +Tool-specific initialization for Solaris (Forte) ar (library archive). If CC +exists, static libraries should be built with it, so that template +instantians can be resolved. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/sunar.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Defaults +import SCons.Tool +import SCons.Util + +def generate(env): + """Add Builders and construction variables for ar to an Environment.""" + SCons.Tool.createStaticLibBuilder(env) + + if env.Detect('CC'): + env['AR'] = 'CC' + env['ARFLAGS'] = SCons.Util.CLVar('-xar') + env['ARCOM'] = '$AR $ARFLAGS -o $TARGET $SOURCES' + else: + env['AR'] = 'ar' + env['ARFLAGS'] = SCons.Util.CLVar('r') + env['ARCOM'] = '$AR $ARFLAGS $TARGET $SOURCES' + + env['SHLINK'] = '$LINK' + env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -G') + env['SHLINKCOM'] = '$SHLINK $SHLINKFLAGS -o $TARGET $SOURCES $_LIBDIRFLAGS $_LIBFLAGS' + env['LIBPREFIX'] = 'lib' + env['LIBSUFFIX'] = '.a' + +def exists(env): + return env.Detect('CC') or env.Detect('ar') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/sunc++.py b/engine/SCons/Tool/sunc++.py new file mode 100644 index 0000000..00fb8c8 --- /dev/null +++ b/engine/SCons/Tool/sunc++.py @@ -0,0 +1,142 @@ +"""SCons.Tool.sunc++ + +Tool-specific initialization for C++ on SunOS / Solaris. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/sunc++.py 4577 2009/12/27 19:43:56 scons" + +import SCons + +import os +import re +import subprocess + +cplusplus = __import__('c++', globals(), locals(), []) + +package_info = {} + +def get_package_info(package_name, pkginfo, pkgchk): + try: + return package_info[package_name] + except KeyError: + version = None + pathname = None + try: + sadm_contents = open('/var/sadm/install/contents', 'r').read() + except EnvironmentError: + pass + else: + sadm_re = re.compile('^(\S*/bin/CC)(=\S*)? %s$' % package_name, re.M) + sadm_match = sadm_re.search(sadm_contents) + if sadm_match: + pathname = os.path.dirname(sadm_match.group(1)) + + try: + p = subprocess.Popen([pkginfo, '-l', package_name], + stdout=subprocess.PIPE, + stderr=open('/dev/null', 'w')) + except EnvironmentError: + pass + else: + pkginfo_contents = p.communicate()[0] + version_re = re.compile('^ *VERSION:\s*(.*)$', re.M) + version_match = version_re.search(pkginfo_contents) + if version_match: + version = version_match.group(1) + + if pathname is None: + try: + p = subprocess.Popen([pkgchk, '-l', package_name], + stdout=subprocess.PIPE, + stderr=open('/dev/null', 'w')) + except EnvironmentError: + pass + else: + pkgchk_contents = p.communicate()[0] + pathname_re = re.compile(r'^Pathname:\s*(.*/bin/CC)$', re.M) + pathname_match = pathname_re.search(pkgchk_contents) + if pathname_match: + pathname = os.path.dirname(pathname_match.group(1)) + + package_info[package_name] = (pathname, version) + return package_info[package_name] + +# use the package installer tool lslpp to figure out where cppc and what +# version of it is installed +def get_cppc(env): + cxx = env.subst('$CXX') + if cxx: + cppcPath = os.path.dirname(cxx) + else: + cppcPath = None + + cppcVersion = None + + pkginfo = env.subst('$PKGINFO') + pkgchk = env.subst('$PKGCHK') + + for package in ['SPROcpl']: + path, version = get_package_info(package, pkginfo, pkgchk) + if path and version: + cppcPath, cppcVersion = path, version + break + + return (cppcPath, 'CC', 'CC', cppcVersion) + +def generate(env): + """Add Builders and construction variables for SunPRO C++.""" + path, cxx, shcxx, version = get_cppc(env) + if path: + cxx = os.path.join(path, cxx) + shcxx = os.path.join(path, shcxx) + + cplusplus.generate(env) + + env['CXX'] = cxx + env['SHCXX'] = shcxx + env['CXXVERSION'] = version + env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS -KPIC') + env['SHOBJPREFIX'] = 'so_' + env['SHOBJSUFFIX'] = '.o' + +def exists(env): + path, cxx, shcxx, version = get_cppc(env) + if path and cxx: + cppc = os.path.join(path, cxx) + if os.path.exists(cppc): + return cppc + return None + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/suncc.py b/engine/SCons/Tool/suncc.py new file mode 100644 index 0000000..c062324 --- /dev/null +++ b/engine/SCons/Tool/suncc.py @@ -0,0 +1,58 @@ +"""SCons.Tool.suncc + +Tool-specific initialization for Sun Solaris (Forte) CC and cc. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/suncc.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Util + +import cc + +def generate(env): + """ + Add Builders and construction variables for Forte C and C++ compilers + to an Environment. + """ + cc.generate(env) + + env['CXX'] = 'CC' + env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS -KPIC') + env['SHOBJPREFIX'] = 'so_' + env['SHOBJSUFFIX'] = '.o' + +def exists(env): + return env.Detect('CC') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/sunf77.py b/engine/SCons/Tool/sunf77.py new file mode 100644 index 0000000..25728cf --- /dev/null +++ b/engine/SCons/Tool/sunf77.py @@ -0,0 +1,63 @@ +"""SCons.Tool.sunf77 + +Tool-specific initialization for sunf77, the Sun Studio F77 compiler. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/sunf77.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Util + +from FortranCommon import add_all_to_env + +compilers = ['sunf77', 'f77'] + +def generate(env): + """Add Builders and construction variables for sunf77 to an Environment.""" + add_all_to_env(env) + + fcomp = env.Detect(compilers) or 'f77' + env['FORTRAN'] = fcomp + env['F77'] = fcomp + + env['SHFORTRAN'] = '$FORTRAN' + env['SHF77'] = '$F77' + + env['SHFORTRANFLAGS'] = SCons.Util.CLVar('$FORTRANFLAGS -KPIC') + env['SHF77FLAGS'] = SCons.Util.CLVar('$F77FLAGS -KPIC') + +def exists(env): + return env.Detect(compilers) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/sunf90.py b/engine/SCons/Tool/sunf90.py new file mode 100644 index 0000000..bf09cfc --- /dev/null +++ b/engine/SCons/Tool/sunf90.py @@ -0,0 +1,64 @@ +"""SCons.Tool.sunf90 + +Tool-specific initialization for sunf90, the Sun Studio F90 compiler. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/sunf90.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Util + +from FortranCommon import add_all_to_env + +compilers = ['sunf90', 'f90'] + +def generate(env): + """Add Builders and construction variables for sun f90 compiler to an + Environment.""" + add_all_to_env(env) + + fcomp = env.Detect(compilers) or 'f90' + env['FORTRAN'] = fcomp + env['F90'] = fcomp + + env['SHFORTRAN'] = '$FORTRAN' + env['SHF90'] = '$F90' + + env['SHFORTRANFLAGS'] = SCons.Util.CLVar('$FORTRANFLAGS -KPIC') + env['SHF90FLAGS'] = SCons.Util.CLVar('$F90FLAGS -KPIC') + +def exists(env): + return env.Detect(compilers) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/sunf95.py b/engine/SCons/Tool/sunf95.py new file mode 100644 index 0000000..c8216b3 --- /dev/null +++ b/engine/SCons/Tool/sunf95.py @@ -0,0 +1,64 @@ +"""SCons.Tool.sunf95 + +Tool-specific initialization for sunf95, the Sun Studio F95 compiler. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/sunf95.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Util + +from FortranCommon import add_all_to_env + +compilers = ['sunf95', 'f95'] + +def generate(env): + """Add Builders and construction variables for sunf95 to an + Environment.""" + add_all_to_env(env) + + fcomp = env.Detect(compilers) or 'f95' + env['FORTRAN'] = fcomp + env['F95'] = fcomp + + env['SHFORTRAN'] = '$FORTRAN' + env['SHF95'] = '$F95' + + env['SHFORTRANFLAGS'] = SCons.Util.CLVar('$FORTRANFLAGS -KPIC') + env['SHF95FLAGS'] = SCons.Util.CLVar('$F95FLAGS -KPIC') + +def exists(env): + return env.Detect(compilers) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/sunlink.py b/engine/SCons/Tool/sunlink.py new file mode 100644 index 0000000..73d4ae3 --- /dev/null +++ b/engine/SCons/Tool/sunlink.py @@ -0,0 +1,77 @@ +"""SCons.Tool.sunlink + +Tool-specific initialization for the Sun Solaris (Forte) linker. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/sunlink.py 4577 2009/12/27 19:43:56 scons" + +import os +import os.path + +import SCons.Util + +import link + +ccLinker = None + +# search for the acc compiler and linker front end + +try: + dirs = os.listdir('/opt') +except (IOError, OSError): + # Not being able to read the directory because it doesn't exist + # (IOError) or isn't readable (OSError) is okay. + dirs = [] + +for d in dirs: + linker = '/opt/' + d + '/bin/CC' + if os.path.exists(linker): + ccLinker = linker + break + +def generate(env): + """Add Builders and construction variables for Forte to an Environment.""" + link.generate(env) + + env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -G') + + env.Append(LINKFLAGS=['$__RPATH']) + env['RPATHPREFIX'] = '-R' + env['RPATHSUFFIX'] = '' + env['_RPATH'] = '${_concat(RPATHPREFIX, RPATH, RPATHSUFFIX, __env__)}' + +def exists(env): + return ccLinker + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/swig.py b/engine/SCons/Tool/swig.py new file mode 100644 index 0000000..5cccb91 --- /dev/null +++ b/engine/SCons/Tool/swig.py @@ -0,0 +1,186 @@ +"""SCons.Tool.swig + +Tool-specific initialization for swig. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/swig.py 4577 2009/12/27 19:43:56 scons" + +import os.path +import re +import string +import subprocess + +import SCons.Action +import SCons.Defaults +import SCons.Scanner +import SCons.Tool +import SCons.Util + +SwigAction = SCons.Action.Action('$SWIGCOM', '$SWIGCOMSTR') + +def swigSuffixEmitter(env, source): + if '-c++' in SCons.Util.CLVar(env.subst("$SWIGFLAGS", source=source)): + return '$SWIGCXXFILESUFFIX' + else: + return '$SWIGCFILESUFFIX' + +# Match '%module test', as well as '%module(directors="1") test' +# Also allow for test to be quoted (SWIG permits double quotes, but not single) +_reModule = re.compile(r'%module(\s*\(.*\))?\s+("?)(.+)\2') + +def _find_modules(src): + """Find all modules referenced by %module lines in `src`, a SWIG .i file. + Returns a list of all modules, and a flag set if SWIG directors have + been requested (SWIG will generate an additional header file in this + case.)""" + directors = 0 + mnames = [] + try: + matches = _reModule.findall(open(src).read()) + except IOError: + # If the file's not yet generated, guess the module name from the filename + matches = [] + mnames.append(os.path.splitext(src)[0]) + + for m in matches: + mnames.append(m[2]) + directors = directors or string.find(m[0], 'directors') >= 0 + return mnames, directors + +def _add_director_header_targets(target, env): + # Directors only work with C++ code, not C + suffix = env.subst(env['SWIGCXXFILESUFFIX']) + # For each file ending in SWIGCXXFILESUFFIX, add a new target director + # header by replacing the ending with SWIGDIRECTORSUFFIX. + for x in target[:]: + n = x.name + d = x.dir + if n[-len(suffix):] == suffix: + target.append(d.File(n[:-len(suffix)] + env['SWIGDIRECTORSUFFIX'])) + +def _swigEmitter(target, source, env): + swigflags = env.subst("$SWIGFLAGS", target=target, source=source) + flags = SCons.Util.CLVar(swigflags) + for src in source: + src = str(src.rfile()) + mnames = None + if "-python" in flags and "-noproxy" not in flags: + if mnames is None: + mnames, directors = _find_modules(src) + if directors: + _add_director_header_targets(target, env) + python_files = map(lambda m: m + ".py", mnames) + outdir = env.subst('$SWIGOUTDIR', target=target, source=source) + # .py files should be generated in SWIGOUTDIR if specified, + # otherwise in the same directory as the target + if outdir: + python_files = map(lambda j, o=outdir, e=env: + e.fs.File(os.path.join(o, j)), + python_files) + else: + python_files = map(lambda m, d=target[0].dir: + d.File(m), python_files) + target.extend(python_files) + if "-java" in flags: + if mnames is None: + mnames, directors = _find_modules(src) + if directors: + _add_director_header_targets(target, env) + java_files = map(lambda m: [m + ".java", m + "JNI.java"], mnames) + java_files = SCons.Util.flatten(java_files) + outdir = env.subst('$SWIGOUTDIR', target=target, source=source) + if outdir: + java_files = map(lambda j, o=outdir: os.path.join(o, j), java_files) + java_files = map(env.fs.File, java_files) + for jf in java_files: + t_from_s = lambda t, p, s, x: t.dir + SCons.Util.AddMethod(jf, t_from_s, 'target_from_source') + target.extend(java_files) + return (target, source) + +def _get_swig_version(env): + """Run the SWIG command line tool to get and return the version number""" + pipe = SCons.Action._subproc(env, [env['SWIG'], '-version'], + stdin = 'devnull', + stderr = 'devnull', + stdout = subprocess.PIPE) + if pipe.wait() != 0: return + + out = pipe.stdout.read() + match = re.search(r'SWIG Version\s+(\S+)$', out, re.MULTILINE) + if match: + return match.group(1) + +def generate(env): + """Add Builders and construction variables for swig to an Environment.""" + c_file, cxx_file = SCons.Tool.createCFileBuilders(env) + + c_file.suffix['.i'] = swigSuffixEmitter + cxx_file.suffix['.i'] = swigSuffixEmitter + + c_file.add_action('.i', SwigAction) + c_file.add_emitter('.i', _swigEmitter) + cxx_file.add_action('.i', SwigAction) + cxx_file.add_emitter('.i', _swigEmitter) + + java_file = SCons.Tool.CreateJavaFileBuilder(env) + + java_file.suffix['.i'] = swigSuffixEmitter + + java_file.add_action('.i', SwigAction) + java_file.add_emitter('.i', _swigEmitter) + + env['SWIG'] = 'swig' + env['SWIGVERSION'] = _get_swig_version(env) + env['SWIGFLAGS'] = SCons.Util.CLVar('') + env['SWIGDIRECTORSUFFIX'] = '_wrap.h' + env['SWIGCFILESUFFIX'] = '_wrap$CFILESUFFIX' + env['SWIGCXXFILESUFFIX'] = '_wrap$CXXFILESUFFIX' + env['_SWIGOUTDIR'] = r'${"-outdir \"%s\"" % SWIGOUTDIR}' + env['SWIGPATH'] = [] + env['SWIGINCPREFIX'] = '-I' + env['SWIGINCSUFFIX'] = '' + env['_SWIGINCFLAGS'] = '$( ${_concat(SWIGINCPREFIX, SWIGPATH, SWIGINCSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)' + env['SWIGCOM'] = '$SWIG -o $TARGET ${_SWIGOUTDIR} ${_SWIGINCFLAGS} $SWIGFLAGS $SOURCES' + + expr = '^[ \t]*%[ \t]*(?:include|import|extern)[ \t]*(<|"?)([^>\s"]+)(?:>|"?)' + scanner = SCons.Scanner.ClassicCPP("SWIGScan", ".i", "SWIGPATH", expr) + + env.Append(SCANNERS = scanner) + +def exists(env): + return env.Detect(['swig']) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/tar.py b/engine/SCons/Tool/tar.py new file mode 100644 index 0000000..e99cac1 --- /dev/null +++ b/engine/SCons/Tool/tar.py @@ -0,0 +1,73 @@ +"""SCons.Tool.tar + +Tool-specific initialization for tar. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/tar.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Action +import SCons.Builder +import SCons.Defaults +import SCons.Node.FS +import SCons.Util + +tars = ['tar', 'gtar'] + +TarAction = SCons.Action.Action('$TARCOM', '$TARCOMSTR') + +TarBuilder = SCons.Builder.Builder(action = TarAction, + source_factory = SCons.Node.FS.Entry, + source_scanner = SCons.Defaults.DirScanner, + suffix = '$TARSUFFIX', + multi = 1) + + +def generate(env): + """Add Builders and construction variables for tar to an Environment.""" + try: + bld = env['BUILDERS']['Tar'] + except KeyError: + bld = TarBuilder + env['BUILDERS']['Tar'] = bld + + env['TAR'] = env.Detect(tars) or 'gtar' + env['TARFLAGS'] = SCons.Util.CLVar('-c') + env['TARCOM'] = '$TAR $TARFLAGS -f $TARGET $SOURCES' + env['TARSUFFIX'] = '.tar' + +def exists(env): + return env.Detect(tars) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/tex.py b/engine/SCons/Tool/tex.py new file mode 100644 index 0000000..664b8ab --- /dev/null +++ b/engine/SCons/Tool/tex.py @@ -0,0 +1,792 @@ +"""SCons.Tool.tex + +Tool-specific initialization for TeX. +Generates .dvi files from .tex files + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/tex.py 4577 2009/12/27 19:43:56 scons" + +import os.path +import re +import string +import shutil + +import SCons.Action +import SCons.Node +import SCons.Node.FS +import SCons.Util +import SCons.Scanner.LaTeX + +Verbose = False + +must_rerun_latex = True + +# these are files that just need to be checked for changes and then rerun latex +check_suffixes = ['.toc', '.lof', '.lot', '.out', '.nav', '.snm'] + +# these are files that require bibtex or makeindex to be run when they change +all_suffixes = check_suffixes + ['.bbl', '.idx', '.nlo', '.glo', '.acn'] + +# +# regular expressions used to search for Latex features +# or outputs that require rerunning latex +# +# search for all .aux files opened by latex (recorded in the .fls file) +openout_aux_re = re.compile(r"INPUT *(.*\.aux)") + +#printindex_re = re.compile(r"^[^%]*\\printindex", re.MULTILINE) +#printnomenclature_re = re.compile(r"^[^%]*\\printnomenclature", re.MULTILINE) +#printglossary_re = re.compile(r"^[^%]*\\printglossary", re.MULTILINE) + +# search to find rerun warnings +warning_rerun_str = '(^LaTeX Warning:.*Rerun)|(^Package \w+ Warning:.*Rerun)' +warning_rerun_re = re.compile(warning_rerun_str, re.MULTILINE) + +# search to find citation rerun warnings +rerun_citations_str = "^LaTeX Warning:.*\n.*Rerun to get citations correct" +rerun_citations_re = re.compile(rerun_citations_str, re.MULTILINE) + +# search to find undefined references or citations warnings +undefined_references_str = '(^LaTeX Warning:.*undefined references)|(^Package \w+ Warning:.*undefined citations)' +undefined_references_re = re.compile(undefined_references_str, re.MULTILINE) + +# used by the emitter +auxfile_re = re.compile(r".", re.MULTILINE) +tableofcontents_re = re.compile(r"^[^%\n]*\\tableofcontents", re.MULTILINE) +makeindex_re = re.compile(r"^[^%\n]*\\makeindex", re.MULTILINE) +bibliography_re = re.compile(r"^[^%\n]*\\bibliography", re.MULTILINE) +listoffigures_re = re.compile(r"^[^%\n]*\\listoffigures", re.MULTILINE) +listoftables_re = re.compile(r"^[^%\n]*\\listoftables", re.MULTILINE) +hyperref_re = re.compile(r"^[^%\n]*\\usepackage.*\{hyperref\}", re.MULTILINE) +makenomenclature_re = re.compile(r"^[^%\n]*\\makenomenclature", re.MULTILINE) +makeglossary_re = re.compile(r"^[^%\n]*\\makeglossary", re.MULTILINE) +makeglossaries_re = re.compile(r"^[^%\n]*\\makeglossaries", re.MULTILINE) +makeacronyms_re = re.compile(r"^[^%\n]*\\makeglossaries", re.MULTILINE) +beamer_re = re.compile(r"^[^%\n]*\\documentclass\{beamer\}", re.MULTILINE) + +# search to find all files included by Latex +include_re = re.compile(r'^[^%\n]*\\(?:include|input){([^}]*)}', re.MULTILINE) + +# search to find all graphics files included by Latex +includegraphics_re = re.compile(r'^[^%\n]*\\(?:includegraphics(?:\[[^\]]+\])?){([^}]*)}', re.MULTILINE) + +# search to find all files opened by Latex (recorded in .log file) +openout_re = re.compile(r"OUTPUT *(.*)") + +# list of graphics file extensions for TeX and LaTeX +TexGraphics = SCons.Scanner.LaTeX.TexGraphics +LatexGraphics = SCons.Scanner.LaTeX.LatexGraphics + +# An Action sufficient to build any generic tex file. +TeXAction = None + +# An action to build a latex file. This action might be needed more +# than once if we are dealing with labels and bibtex. +LaTeXAction = None + +# An action to run BibTeX on a file. +BibTeXAction = None + +# An action to run MakeIndex on a file. +MakeIndexAction = None + +# An action to run MakeIndex (for nomencl) on a file. +MakeNclAction = None + +# An action to run MakeIndex (for glossary) on a file. +MakeGlossaryAction = None + +# An action to run MakeIndex (for acronyms) on a file. +MakeAcronymsAction = None + +# Used as a return value of modify_env_var if the variable is not set. +_null = SCons.Scanner.LaTeX._null + +modify_env_var = SCons.Scanner.LaTeX.modify_env_var + +def FindFile(name,suffixes,paths,env,requireExt=False): + if requireExt: + name,ext = SCons.Util.splitext(name) + # if the user gave an extension use it. + if ext: + name = name + ext + if Verbose: + print " searching for '%s' with extensions: " % name,suffixes + + for path in paths: + testName = os.path.join(path,name) + if Verbose: + print " look for '%s'" % testName + if os.path.exists(testName): + if Verbose: + print " found '%s'" % testName + return env.fs.File(testName) + else: + name_ext = SCons.Util.splitext(testName)[1] + if name_ext: + continue + + # if no suffix try adding those passed in + for suffix in suffixes: + testNameExt = testName + suffix + if Verbose: + print " look for '%s'" % testNameExt + + if os.path.exists(testNameExt): + if Verbose: + print " found '%s'" % testNameExt + return env.fs.File(testNameExt) + if Verbose: + print " did not find '%s'" % name + return None + +def InternalLaTeXAuxAction(XXXLaTeXAction, target = None, source= None, env=None): + """A builder for LaTeX files that checks the output in the aux file + and decides how many times to use LaTeXAction, and BibTeXAction.""" + + global must_rerun_latex + + # This routine is called with two actions. In this file for DVI builds + # with LaTeXAction and from the pdflatex.py with PDFLaTeXAction + # set this up now for the case where the user requests a different extension + # for the target filename + if (XXXLaTeXAction == LaTeXAction): + callerSuffix = ".dvi" + else: + callerSuffix = env['PDFSUFFIX'] + + basename = SCons.Util.splitext(str(source[0]))[0] + basedir = os.path.split(str(source[0]))[0] + basefile = os.path.split(str(basename))[1] + abspath = os.path.abspath(basedir) + + targetext = os.path.splitext(str(target[0]))[1] + targetdir = os.path.split(str(target[0]))[0] + + saved_env = {} + for var in SCons.Scanner.LaTeX.LaTeX.env_variables: + saved_env[var] = modify_env_var(env, var, abspath) + + # Create base file names with the target directory since the auxiliary files + # will be made there. That's because the *COM variables have the cd + # command in the prolog. We check + # for the existence of files before opening them--even ones like the + # aux file that TeX always creates--to make it possible to write tests + # with stubs that don't necessarily generate all of the same files. + + targetbase = os.path.join(targetdir, basefile) + + # if there is a \makeindex there will be a .idx and thus + # we have to run makeindex at least once to keep the build + # happy even if there is no index. + # Same for glossaries and nomenclature + src_content = source[0].get_text_contents() + run_makeindex = makeindex_re.search(src_content) and not os.path.exists(targetbase + '.idx') + run_nomenclature = makenomenclature_re.search(src_content) and not os.path.exists(targetbase + '.nlo') + run_glossary = makeglossary_re.search(src_content) and not os.path.exists(targetbase + '.glo') + run_glossaries = makeglossaries_re.search(src_content) and not os.path.exists(targetbase + '.glo') + run_acronyms = makeacronyms_re.search(src_content) and not os.path.exists(targetbase + '.acn') + + saved_hashes = {} + suffix_nodes = {} + + for suffix in all_suffixes: + theNode = env.fs.File(targetbase + suffix) + suffix_nodes[suffix] = theNode + saved_hashes[suffix] = theNode.get_csig() + + if Verbose: + print "hashes: ",saved_hashes + + must_rerun_latex = True + + # + # routine to update MD5 hash and compare + # + # TODO(1.5): nested scopes + def check_MD5(filenode, suffix, saved_hashes=saved_hashes, targetbase=targetbase): + global must_rerun_latex + # two calls to clear old csig + filenode.clear_memoized_values() + filenode.ninfo = filenode.new_ninfo() + new_md5 = filenode.get_csig() + + if saved_hashes[suffix] == new_md5: + if Verbose: + print "file %s not changed" % (targetbase+suffix) + return False # unchanged + saved_hashes[suffix] = new_md5 + must_rerun_latex = True + if Verbose: + print "file %s changed, rerunning Latex, new hash = " % (targetbase+suffix), new_md5 + return True # changed + + # generate the file name that latex will generate + resultfilename = targetbase + callerSuffix + + count = 0 + + while (must_rerun_latex and count < int(env.subst('$LATEXRETRIES'))) : + result = XXXLaTeXAction(target, source, env) + if result != 0: + return result + + count = count + 1 + + must_rerun_latex = False + # Decide if various things need to be run, or run again. + + # Read the log file to find warnings/errors + logfilename = targetbase + '.log' + logContent = '' + if os.path.exists(logfilename): + logContent = open(logfilename, "rb").read() + + + # Read the fls file to find all .aux files + flsfilename = targetbase + '.fls' + flsContent = '' + auxfiles = [] + if os.path.exists(flsfilename): + flsContent = open(flsfilename, "rb").read() + auxfiles = openout_aux_re.findall(flsContent) + if Verbose: + print "auxfiles ",auxfiles + + # Now decide if bibtex will need to be run. + # The information that bibtex reads from the .aux file is + # pass-independent. If we find (below) that the .bbl file is unchanged, + # then the last latex saw a correct bibliography. + # Therefore only do this on the first pass + if count == 1: + for auxfilename in auxfiles: + target_aux = os.path.join(targetdir, auxfilename) + if os.path.exists(target_aux): + content = open(target_aux, "rb").read() + if string.find(content, "bibdata") != -1: + if Verbose: + print "Need to run bibtex" + bibfile = env.fs.File(targetbase) + result = BibTeXAction(bibfile, bibfile, env) + if result != 0: + print env['BIBTEX']," returned an error, check the blg file" + return result + must_rerun_latex = check_MD5(suffix_nodes['.bbl'],'.bbl') + break + + # Now decide if latex will need to be run again due to index. + if check_MD5(suffix_nodes['.idx'],'.idx') or (count == 1 and run_makeindex): + # We must run makeindex + if Verbose: + print "Need to run makeindex" + idxfile = suffix_nodes['.idx'] + result = MakeIndexAction(idxfile, idxfile, env) + if result != 0: + print env['MAKEINDEX']," returned an error, check the ilg file" + return result + + # TO-DO: need to add a way for the user to extend this list for whatever + # auxiliary files they create in other (or their own) packages + # Harder is case is where an action needs to be called -- that should be rare (I hope?) + + for index in check_suffixes: + check_MD5(suffix_nodes[index],index) + + # Now decide if latex will need to be run again due to nomenclature. + if check_MD5(suffix_nodes['.nlo'],'.nlo') or (count == 1 and run_nomenclature): + # We must run makeindex + if Verbose: + print "Need to run makeindex for nomenclature" + nclfile = suffix_nodes['.nlo'] + result = MakeNclAction(nclfile, nclfile, env) + if result != 0: + print env['MAKENCL']," (nomenclature) returned an error, check the nlg file" + #return result + + # Now decide if latex will need to be run again due to glossary. + if check_MD5(suffix_nodes['.glo'],'.glo') or (count == 1 and run_glossaries) or (count == 1 and run_glossary): + # We must run makeindex + if Verbose: + print "Need to run makeindex for glossary" + glofile = suffix_nodes['.glo'] + result = MakeGlossaryAction(glofile, glofile, env) + if result != 0: + print env['MAKEGLOSSARY']," (glossary) returned an error, check the glg file" + #return result + + # Now decide if latex will need to be run again due to acronyms. + if check_MD5(suffix_nodes['.acn'],'.acn') or (count == 1 and run_acronyms): + # We must run makeindex + if Verbose: + print "Need to run makeindex for acronyms" + acrfile = suffix_nodes['.acn'] + result = MakeAcronymsAction(acrfile, acrfile, env) + if result != 0: + print env['MAKEACRONYMS']," (acronymns) returned an error, check the alg file" + return result + + # Now decide if latex needs to be run yet again to resolve warnings. + if warning_rerun_re.search(logContent): + must_rerun_latex = True + if Verbose: + print "rerun Latex due to latex or package rerun warning" + + if rerun_citations_re.search(logContent): + must_rerun_latex = True + if Verbose: + print "rerun Latex due to 'Rerun to get citations correct' warning" + + if undefined_references_re.search(logContent): + must_rerun_latex = True + if Verbose: + print "rerun Latex due to undefined references or citations" + + if (count >= int(env.subst('$LATEXRETRIES')) and must_rerun_latex): + print "reached max number of retries on Latex ,",int(env.subst('$LATEXRETRIES')) +# end of while loop + + # rename Latex's output to what the target name is + if not (str(target[0]) == resultfilename and os.path.exists(resultfilename)): + if os.path.exists(resultfilename): + print "move %s to %s" % (resultfilename, str(target[0]), ) + shutil.move(resultfilename,str(target[0])) + + # Original comment (when TEXPICTS was not restored): + # The TEXPICTS enviroment variable is needed by a dvi -> pdf step + # later on Mac OSX so leave it + # + # It is also used when searching for pictures (implicit dependencies). + # Why not set the variable again in the respective builder instead + # of leaving local modifications in the environment? What if multiple + # latex builds in different directories need different TEXPICTS? + for var in SCons.Scanner.LaTeX.LaTeX.env_variables: + if var == 'TEXPICTS': + continue + if saved_env[var] is _null: + try: + del env['ENV'][var] + except KeyError: + pass # was never set + else: + env['ENV'][var] = saved_env[var] + + return result + +def LaTeXAuxAction(target = None, source= None, env=None): + result = InternalLaTeXAuxAction( LaTeXAction, target, source, env ) + return result + +LaTeX_re = re.compile("\\\\document(style|class)") + +def is_LaTeX(flist,env,abspath): + """Scan a file list to decide if it's TeX- or LaTeX-flavored.""" + + # We need to scan files that are included in case the + # \documentclass command is in them. + + # get path list from both env['TEXINPUTS'] and env['ENV']['TEXINPUTS'] + savedpath = modify_env_var(env, 'TEXINPUTS', abspath) + paths = env['ENV']['TEXINPUTS'] + if SCons.Util.is_List(paths): + pass + else: + # Split at os.pathsep to convert into absolute path + # TODO(1.5) + #paths = paths.split(os.pathsep) + paths = string.split(paths, os.pathsep) + + # now that we have the path list restore the env + if savedpath is _null: + try: + del env['ENV']['TEXINPUTS'] + except KeyError: + pass # was never set + else: + env['ENV']['TEXINPUTS'] = savedpath + if Verbose: + print "is_LaTeX search path ",paths + print "files to search :",flist + + # Now that we have the search path and file list, check each one + for f in flist: + if Verbose: + print " checking for Latex source ",str(f) + + content = f.get_text_contents() + if LaTeX_re.search(content): + if Verbose: + print "file %s is a LaTeX file" % str(f) + return 1 + if Verbose: + print "file %s is not a LaTeX file" % str(f) + + # now find included files + inc_files = [ ] + inc_files.extend( include_re.findall(content) ) + if Verbose: + print "files included by '%s': "%str(f),inc_files + # inc_files is list of file names as given. need to find them + # using TEXINPUTS paths. + + # search the included files + for src in inc_files: + srcNode = FindFile(src,['.tex','.ltx','.latex'],paths,env,requireExt=False) + # make this a list since is_LaTeX takes a list. + fileList = [srcNode,] + if Verbose: + print "FindFile found ",srcNode + if srcNode is not None: + file_test = is_LaTeX(fileList, env, abspath) + + # return on first file that finds latex is needed. + if file_test: + return file_test + + if Verbose: + print " done scanning ",str(f) + + return 0 + +def TeXLaTeXFunction(target = None, source= None, env=None): + """A builder for TeX and LaTeX that scans the source file to + decide the "flavor" of the source and then executes the appropriate + program.""" + + # find these paths for use in is_LaTeX to search for included files + basedir = os.path.split(str(source[0]))[0] + abspath = os.path.abspath(basedir) + + if is_LaTeX(source,env,abspath): + result = LaTeXAuxAction(target,source,env) + if result != 0: + print env['LATEX']," returned an error, check the log file" + else: + result = TeXAction(target,source,env) + if result != 0: + print env['TEX']," returned an error, check the log file" + return result + +def TeXLaTeXStrFunction(target = None, source= None, env=None): + """A strfunction for TeX and LaTeX that scans the source file to + decide the "flavor" of the source and then returns the appropriate + command string.""" + if env.GetOption("no_exec"): + + # find these paths for use in is_LaTeX to search for included files + basedir = os.path.split(str(source[0]))[0] + abspath = os.path.abspath(basedir) + + if is_LaTeX(source,env,abspath): + result = env.subst('$LATEXCOM',0,target,source)+" ..." + else: + result = env.subst("$TEXCOM",0,target,source)+" ..." + else: + result = '' + return result + +def tex_eps_emitter(target, source, env): + """An emitter for TeX and LaTeX sources when + executing tex or latex. It will accept .ps and .eps + graphics files + """ + (target, source) = tex_emitter_core(target, source, env, TexGraphics) + + return (target, source) + +def tex_pdf_emitter(target, source, env): + """An emitter for TeX and LaTeX sources when + executing pdftex or pdflatex. It will accept graphics + files of types .pdf, .jpg, .png, .gif, and .tif + """ + (target, source) = tex_emitter_core(target, source, env, LatexGraphics) + + return (target, source) + +def ScanFiles(theFile, target, paths, file_tests, file_tests_search, env, graphics_extensions, targetdir): + """ For theFile (a Node) update any file_tests and search for graphics files + then find all included files and call ScanFiles recursively for each of them""" + + content = theFile.get_text_contents() + if Verbose: + print " scanning ",str(theFile) + + for i in range(len(file_tests_search)): + if file_tests[i][0] is None: + file_tests[i][0] = file_tests_search[i].search(content) + + # recursively call this on each of the included files + inc_files = [ ] + inc_files.extend( include_re.findall(content) ) + if Verbose: + print "files included by '%s': "%str(theFile),inc_files + # inc_files is list of file names as given. need to find them + # using TEXINPUTS paths. + + for src in inc_files: + srcNode = FindFile(src,['.tex','.ltx','.latex'],paths,env,requireExt=False) + if srcNode is not None: + file_tests = ScanFiles(srcNode, target, paths, file_tests, file_tests_search, env, graphics_extensions, targetdir) + if Verbose: + print " done scanning ",str(theFile) + return file_tests + +def tex_emitter_core(target, source, env, graphics_extensions): + """An emitter for TeX and LaTeX sources. + For LaTeX sources we try and find the common created files that + are needed on subsequent runs of latex to finish tables of contents, + bibliographies, indices, lists of figures, and hyperlink references. + """ + basename = SCons.Util.splitext(str(source[0]))[0] + basefile = os.path.split(str(basename))[1] + targetdir = os.path.split(str(target[0]))[0] + targetbase = os.path.join(targetdir, basefile) + + basedir = os.path.split(str(source[0]))[0] + abspath = os.path.abspath(basedir) + target[0].attributes.path = abspath + + # + # file names we will make use of in searching the sources and log file + # + emit_suffixes = ['.aux', '.log', '.ilg', '.blg', '.nls', '.nlg', '.gls', '.glg', '.alg'] + all_suffixes + auxfilename = targetbase + '.aux' + logfilename = targetbase + '.log' + flsfilename = targetbase + '.fls' + + env.SideEffect(auxfilename,target[0]) + env.SideEffect(logfilename,target[0]) + env.SideEffect(flsfilename,target[0]) + if Verbose: + print "side effect :",auxfilename,logfilename,flsfilename + env.Clean(target[0],auxfilename) + env.Clean(target[0],logfilename) + env.Clean(target[0],flsfilename) + + content = source[0].get_text_contents() + + idx_exists = os.path.exists(targetbase + '.idx') + nlo_exists = os.path.exists(targetbase + '.nlo') + glo_exists = os.path.exists(targetbase + '.glo') + acr_exists = os.path.exists(targetbase + '.acn') + + # set up list with the regular expressions + # we use to find features used + file_tests_search = [auxfile_re, + makeindex_re, + bibliography_re, + tableofcontents_re, + listoffigures_re, + listoftables_re, + hyperref_re, + makenomenclature_re, + makeglossary_re, + makeglossaries_re, + makeacronyms_re, + beamer_re ] + # set up list with the file suffixes that need emitting + # when a feature is found + file_tests_suff = [['.aux'], + ['.idx', '.ind', '.ilg'], + ['.bbl', '.blg'], + ['.toc'], + ['.lof'], + ['.lot'], + ['.out'], + ['.nlo', '.nls', '.nlg'], + ['.glo', '.gls', '.glg'], + ['.glo', '.gls', '.glg'], + ['.acn', '.acr', '.alg'], + ['.nav', '.snm', '.out', '.toc'] ] + # build the list of lists + file_tests = [] + for i in range(len(file_tests_search)): + file_tests.append( [None, file_tests_suff[i]] ) + + # TO-DO: need to add a way for the user to extend this list for whatever + # auxiliary files they create in other (or their own) packages + + # get path list from both env['TEXINPUTS'] and env['ENV']['TEXINPUTS'] + savedpath = modify_env_var(env, 'TEXINPUTS', abspath) + paths = env['ENV']['TEXINPUTS'] + if SCons.Util.is_List(paths): + pass + else: + # Split at os.pathsep to convert into absolute path + # TODO(1.5) + #paths = paths.split(os.pathsep) + paths = string.split(paths, os.pathsep) + + # now that we have the path list restore the env + if savedpath is _null: + try: + del env['ENV']['TEXINPUTS'] + except KeyError: + pass # was never set + else: + env['ENV']['TEXINPUTS'] = savedpath + if Verbose: + print "search path ",paths + + file_tests = ScanFiles(source[0], target, paths, file_tests, file_tests_search, env, graphics_extensions, targetdir) + + for (theSearch,suffix_list) in file_tests: + if theSearch: + for suffix in suffix_list: + env.SideEffect(targetbase + suffix,target[0]) + if Verbose: + print "side effect :",targetbase + suffix + env.Clean(target[0],targetbase + suffix) + + # read fls file to get all other files that latex creates and will read on the next pass + # remove files from list that we explicitly dealt with above + if os.path.exists(flsfilename): + content = open(flsfilename, "rb").read() + out_files = openout_re.findall(content) + myfiles = [auxfilename, logfilename, flsfilename, targetbase+'.dvi',targetbase+'.pdf'] + for filename in out_files[:]: + if filename in myfiles: + out_files.remove(filename) + env.SideEffect(out_files,target[0]) + if Verbose: + print "side effect :",out_files + env.Clean(target[0],out_files) + + return (target, source) + + +TeXLaTeXAction = None + +def generate(env): + """Add Builders and construction variables for TeX to an Environment.""" + + global TeXLaTeXAction + if TeXLaTeXAction is None: + TeXLaTeXAction = SCons.Action.Action(TeXLaTeXFunction, + strfunction=TeXLaTeXStrFunction) + + env.AppendUnique(LATEXSUFFIXES=SCons.Tool.LaTeXSuffixes) + + generate_common(env) + + import dvi + dvi.generate(env) + + bld = env['BUILDERS']['DVI'] + bld.add_action('.tex', TeXLaTeXAction) + bld.add_emitter('.tex', tex_eps_emitter) + +def generate_common(env): + """Add internal Builders and construction variables for LaTeX to an Environment.""" + + # A generic tex file Action, sufficient for all tex files. + global TeXAction + if TeXAction is None: + TeXAction = SCons.Action.Action("$TEXCOM", "$TEXCOMSTR") + + # An Action to build a latex file. This might be needed more + # than once if we are dealing with labels and bibtex. + global LaTeXAction + if LaTeXAction is None: + LaTeXAction = SCons.Action.Action("$LATEXCOM", "$LATEXCOMSTR") + + # Define an action to run BibTeX on a file. + global BibTeXAction + if BibTeXAction is None: + BibTeXAction = SCons.Action.Action("$BIBTEXCOM", "$BIBTEXCOMSTR") + + # Define an action to run MakeIndex on a file. + global MakeIndexAction + if MakeIndexAction is None: + MakeIndexAction = SCons.Action.Action("$MAKEINDEXCOM", "$MAKEINDEXCOMSTR") + + # Define an action to run MakeIndex on a file for nomenclatures. + global MakeNclAction + if MakeNclAction is None: + MakeNclAction = SCons.Action.Action("$MAKENCLCOM", "$MAKENCLCOMSTR") + + # Define an action to run MakeIndex on a file for glossaries. + global MakeGlossaryAction + if MakeGlossaryAction is None: + MakeGlossaryAction = SCons.Action.Action("$MAKEGLOSSARYCOM", "$MAKEGLOSSARYCOMSTR") + + # Define an action to run MakeIndex on a file for acronyms. + global MakeAcronymsAction + if MakeAcronymsAction is None: + MakeAcronymsAction = SCons.Action.Action("$MAKEACRONYMSCOM", "$MAKEACRONYMSCOMSTR") + + env['TEX'] = 'tex' + env['TEXFLAGS'] = SCons.Util.CLVar('-interaction=nonstopmode -recorder') + env['TEXCOM'] = 'cd ${TARGET.dir} && $TEX $TEXFLAGS ${SOURCE.file}' + + env['PDFTEX'] = 'pdftex' + env['PDFTEXFLAGS'] = SCons.Util.CLVar('-interaction=nonstopmode -recorder') + env['PDFTEXCOM'] = 'cd ${TARGET.dir} && $PDFTEX $PDFTEXFLAGS ${SOURCE.file}' + + env['LATEX'] = 'latex' + env['LATEXFLAGS'] = SCons.Util.CLVar('-interaction=nonstopmode -recorder') + env['LATEXCOM'] = 'cd ${TARGET.dir} && $LATEX $LATEXFLAGS ${SOURCE.file}' + env['LATEXRETRIES'] = 3 + + env['PDFLATEX'] = 'pdflatex' + env['PDFLATEXFLAGS'] = SCons.Util.CLVar('-interaction=nonstopmode -recorder') + env['PDFLATEXCOM'] = 'cd ${TARGET.dir} && $PDFLATEX $PDFLATEXFLAGS ${SOURCE.file}' + + env['BIBTEX'] = 'bibtex' + env['BIBTEXFLAGS'] = SCons.Util.CLVar('') + env['BIBTEXCOM'] = 'cd ${TARGET.dir} && $BIBTEX $BIBTEXFLAGS ${SOURCE.filebase}' + + env['MAKEINDEX'] = 'makeindex' + env['MAKEINDEXFLAGS'] = SCons.Util.CLVar('') + env['MAKEINDEXCOM'] = 'cd ${TARGET.dir} && $MAKEINDEX $MAKEINDEXFLAGS ${SOURCE.file}' + + env['MAKEGLOSSARY'] = 'makeindex' + env['MAKEGLOSSARYSTYLE'] = '${SOURCE.filebase}.ist' + env['MAKEGLOSSARYFLAGS'] = SCons.Util.CLVar('-s ${MAKEGLOSSARYSTYLE} -t ${SOURCE.filebase}.glg') + env['MAKEGLOSSARYCOM'] = 'cd ${TARGET.dir} && $MAKEGLOSSARY ${SOURCE.filebase}.glo $MAKEGLOSSARYFLAGS -o ${SOURCE.filebase}.gls' + + env['MAKEACRONYMS'] = 'makeindex' + env['MAKEACRONYMSSTYLE'] = '${SOURCE.filebase}.ist' + env['MAKEACRONYMSFLAGS'] = SCons.Util.CLVar('-s ${MAKEACRONYMSSTYLE} -t ${SOURCE.filebase}.alg') + env['MAKEACRONYMSCOM'] = 'cd ${TARGET.dir} && $MAKEACRONYMS ${SOURCE.filebase}.acn $MAKEACRONYMSFLAGS -o ${SOURCE.filebase}.acr' + + env['MAKENCL'] = 'makeindex' + env['MAKENCLSTYLE'] = 'nomencl.ist' + env['MAKENCLFLAGS'] = '-s ${MAKENCLSTYLE} -t ${SOURCE.filebase}.nlg' + env['MAKENCLCOM'] = 'cd ${TARGET.dir} && $MAKENCL ${SOURCE.filebase}.nlo $MAKENCLFLAGS -o ${SOURCE.filebase}.nls' + +def exists(env): + return env.Detect('tex') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/textfile.py b/engine/SCons/Tool/textfile.py new file mode 100644 index 0000000..bfac681 --- /dev/null +++ b/engine/SCons/Tool/textfile.py @@ -0,0 +1,175 @@ +# -*- python -*- +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__doc__ = """ +Textfile/Substfile builder for SCons. + + Create file 'target' which typically is a textfile. The 'source' + may be any combination of strings, Nodes, or lists of same. A + 'linesep' will be put between any part written and defaults to + os.linesep. + + The only difference between the Textfile builder and the Substfile + builder is that strings are converted to Value() nodes for the + former and File() nodes for the latter. To insert files in the + former or strings in the latter, wrap them in a File() or Value(), + respectively. + + The values of SUBST_DICT first have any construction variables + expanded (its keys are not expanded). If a value of SUBST_DICT is + a python callable function, it is called and the result is expanded + as the value. Values are substituted in a "random" order; if any + substitution could be further expanded by another subsitition, it + is unpredictible whether the expansion will occur. +""" + +__revision__ = "src/engine/SCons/Tool/textfile.py 4577 2009/12/27 19:43:56 scons" + +import SCons + +import os +import re + +from SCons.Node import Node +from SCons.Node.Python import Value +from SCons.Util import is_String, is_Sequence, is_Dict + +def _do_subst(node, subs): + """ + Fetch the node contents and replace all instances of the keys with + their values. For example, if subs is + {'%VERSION%': '1.2345', '%BASE%': 'MyProg', '%prefix%': '/bin'}, + then all instances of %VERSION% in the file will be replaced with + 1.2345 and so forth. + """ + contents = node.get_text_contents() + if not subs: return contents + for (k,v) in subs: + contents = re.sub(k, v, contents) + return contents + +def _action(target, source, env): + # prepare the line separator + linesep = env['LINESEPARATOR'] + if linesep is None: + linesep = os.linesep + elif is_String(linesep): + pass + elif isinstance(linesep, Value): + linesep = linesep.get_text_contents() + else: + raise SCons.Errors.UserError( + 'unexpected type/class for LINESEPARATOR: %s' + % repr(linesep), None) + + # create a dictionary to use for the substitutions + if not env.has_key('SUBST_DICT'): + subs = None # no substitutions + else: + d = env['SUBST_DICT'] + if is_Dict(d): + d = d.items() + elif is_Sequence(d): + pass + else: + raise SCons.Errors.UserError('SUBST_DICT must be dict or sequence') + subs = [] + for (k,v) in d: + if callable(v): + v = v() + if is_String(v): + v = env.subst(v) + else: + v = str(v) + subs.append((k,v)) + + # write the file + try: + fd = open(target[0].get_path(), "wb") + except (OSError,IOError), e: + raise SCons.Errors.UserError("Can't write target file %s" % target[0]) + # separate lines by 'linesep' only if linesep is not empty + lsep = None + for s in source: + if lsep: fd.write(lsep) + fd.write(_do_subst(s, subs)) + lsep = linesep + fd.close() + +def _strfunc(target, source, env): + return "Creating '%s'" % target[0] + +def _convert_list_R(newlist, sources): + for elem in sources: + if is_Sequence(elem): + _convert_list_R(newlist, elem) + elif isinstance(elem, Node): + newlist.append(elem) + else: + newlist.append(Value(elem)) +def _convert_list(target, source, env): + if len(target) != 1: + raise SCons.Errors.UserError("Only one target file allowed") + newlist = [] + _convert_list_R(newlist, source) + return target, newlist + +_common_varlist = ['SUBST_DICT', 'LINESEPARATOR'] + +_text_varlist = _common_varlist + ['TEXTFILEPREFIX', 'TEXTFILESUFFIX'] +_text_builder = SCons.Builder.Builder( + action = SCons.Action.Action(_action, _strfunc, varlist = _text_varlist), + source_factory = Value, + emitter = _convert_list, + prefix = '$TEXTFILEPREFIX', + suffix = '$TEXTFILESUFFIX', + ) + +_subst_varlist = _common_varlist + ['SUBSTFILEPREFIX', 'TEXTFILESUFFIX'] +_subst_builder = SCons.Builder.Builder( + action = SCons.Action.Action(_action, _strfunc, varlist = _subst_varlist), + source_factory = SCons.Node.FS.File, + emitter = _convert_list, + prefix = '$SUBSTFILEPREFIX', + suffix = '$SUBSTFILESUFFIX', + src_suffix = ['.in'], + ) + +def generate(env): + env['LINESEPARATOR'] = os.linesep + env['BUILDERS']['Textfile'] = _text_builder + env['TEXTFILEPREFIX'] = '' + env['TEXTFILESUFFIX'] = '.txt' + env['BUILDERS']['Substfile'] = _subst_builder + env['SUBSTFILEPREFIX'] = '' + env['SUBSTFILESUFFIX'] = '' + +def exists(env): + return 1 + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/tlib.py b/engine/SCons/Tool/tlib.py new file mode 100644 index 0000000..601d114 --- /dev/null +++ b/engine/SCons/Tool/tlib.py @@ -0,0 +1,53 @@ +"""SCons.Tool.tlib + +XXX + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/tlib.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Tool +import SCons.Tool.bcc32 +import SCons.Util + +def generate(env): + SCons.Tool.bcc32.findIt('tlib', env) + """Add Builders and construction variables for ar to an Environment.""" + SCons.Tool.createStaticLibBuilder(env) + env['AR'] = 'tlib' + env['ARFLAGS'] = SCons.Util.CLVar('') + env['ARCOM'] = '$AR $TARGET $ARFLAGS /a $SOURCES' + env['LIBPREFIX'] = '' + env['LIBSUFFIX'] = '.lib' + +def exists(env): + return SCons.Tool.bcc32.findIt('tlib', env) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/wix.py b/engine/SCons/Tool/wix.py new file mode 100644 index 0000000..896160c --- /dev/null +++ b/engine/SCons/Tool/wix.py @@ -0,0 +1,100 @@ +"""SCons.Tool.wix + +Tool-specific initialization for wix, the Windows Installer XML Tool. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/wix.py 4577 2009/12/27 19:43:56 scons" + +import SCons.Builder +import SCons.Action +import os +import string + +def generate(env): + """Add Builders and construction variables for WiX to an Environment.""" + if not exists(env): + return + + env['WIXCANDLEFLAGS'] = ['-nologo'] + env['WIXCANDLEINCLUDE'] = [] + env['WIXCANDLECOM'] = '$WIXCANDLE $WIXCANDLEFLAGS -I $WIXCANDLEINCLUDE -o ${TARGET} ${SOURCE}' + + env['WIXLIGHTFLAGS'].append( '-nologo' ) + env['WIXLIGHTCOM'] = "$WIXLIGHT $WIXLIGHTFLAGS -out ${TARGET} ${SOURCES}" + + object_builder = SCons.Builder.Builder( + action = '$WIXCANDLECOM', + suffix = '.wxiobj', + src_suffix = '.wxs') + + linker_builder = SCons.Builder.Builder( + action = '$WIXLIGHTCOM', + src_suffix = '.wxiobj', + src_builder = object_builder) + + env['BUILDERS']['WiX'] = linker_builder + +def exists(env): + env['WIXCANDLE'] = 'candle.exe' + env['WIXLIGHT'] = 'light.exe' + + # try to find the candle.exe and light.exe tools and + # add the install directory to light libpath. + #for path in os.environ['PATH'].split(os.pathsep): + for path in string.split(os.environ['PATH'], os.pathsep): + if not path: + continue + + # workaround for some weird python win32 bug. + if path[0] == '"' and path[-1:]=='"': + path = path[1:-1] + + # normalize the path + path = os.path.normpath(path) + + # search for the tools in the PATH environment variable + try: + if env['WIXCANDLE'] in os.listdir(path) and\ + env['WIXLIGHT'] in os.listdir(path): + env.PrependENVPath('PATH', path) + env['WIXLIGHTFLAGS'] = [ os.path.join( path, 'wixui.wixlib' ), + '-loc', + os.path.join( path, 'WixUI_en-us.wxl' ) ] + return 1 + except OSError: + pass # ignore this, could be a stale PATH entry. + + return None + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/yacc.py b/engine/SCons/Tool/yacc.py new file mode 100644 index 0000000..a31b63c --- /dev/null +++ b/engine/SCons/Tool/yacc.py @@ -0,0 +1,131 @@ +"""SCons.Tool.yacc + +Tool-specific initialization for yacc. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/yacc.py 4577 2009/12/27 19:43:56 scons" + +import os.path +import string + +import SCons.Defaults +import SCons.Tool +import SCons.Util + +YaccAction = SCons.Action.Action("$YACCCOM", "$YACCCOMSTR") + +def _yaccEmitter(target, source, env, ysuf, hsuf): + yaccflags = env.subst("$YACCFLAGS", target=target, source=source) + flags = SCons.Util.CLVar(yaccflags) + targetBase, targetExt = os.path.splitext(SCons.Util.to_String(target[0])) + + if '.ym' in ysuf: # If using Objective-C + target = [targetBase + ".m"] # the extension is ".m". + + + # If -d is specified on the command line, yacc will emit a .h + # or .hpp file with the same name as the .c or .cpp output file. + if '-d' in flags: + target.append(targetBase + env.subst(hsuf, target=target, source=source)) + + # If -g is specified on the command line, yacc will emit a .vcg + # file with the same base name as the .y, .yacc, .ym or .yy file. + if "-g" in flags: + base, ext = os.path.splitext(SCons.Util.to_String(source[0])) + target.append(base + env.subst("$YACCVCGFILESUFFIX")) + + # With --defines and --graph, the name of the file is totally defined + # in the options. + fileGenOptions = ["--defines=", "--graph="] + for option in flags: + for fileGenOption in fileGenOptions: + l = len(fileGenOption) + if option[:l] == fileGenOption: + # A file generating option is present, so add the file + # name to the list of targets. + fileName = string.strip(option[l:]) + target.append(fileName) + + return (target, source) + +def yEmitter(target, source, env): + return _yaccEmitter(target, source, env, ['.y', '.yacc'], '$YACCHFILESUFFIX') + +def ymEmitter(target, source, env): + return _yaccEmitter(target, source, env, ['.ym'], '$YACCHFILESUFFIX') + +def yyEmitter(target, source, env): + return _yaccEmitter(target, source, env, ['.yy'], '$YACCHXXFILESUFFIX') + +def generate(env): + """Add Builders and construction variables for yacc to an Environment.""" + c_file, cxx_file = SCons.Tool.createCFileBuilders(env) + + # C + c_file.add_action('.y', YaccAction) + c_file.add_emitter('.y', yEmitter) + + c_file.add_action('.yacc', YaccAction) + c_file.add_emitter('.yacc', yEmitter) + + # Objective-C + c_file.add_action('.ym', YaccAction) + c_file.add_emitter('.ym', ymEmitter) + + # C++ + cxx_file.add_action('.yy', YaccAction) + cxx_file.add_emitter('.yy', yyEmitter) + + env['YACC'] = env.Detect('bison') or 'yacc' + env['YACCFLAGS'] = SCons.Util.CLVar('') + env['YACCCOM'] = '$YACC $YACCFLAGS -o $TARGET $SOURCES' + env['YACCHFILESUFFIX'] = '.h' + + # Apparently, OS X now creates file.hpp like everybody else + # I have no idea when it changed; it was fixed in 10.4 + #if env['PLATFORM'] == 'darwin': + # # Bison on Mac OS X just appends ".h" to the generated target .cc + # # or .cpp file name. Hooray for delayed expansion of variables. + # env['YACCHXXFILESUFFIX'] = '${TARGET.suffix}.h' + #else: + # env['YACCHXXFILESUFFIX'] = '.hpp' + env['YACCHXXFILESUFFIX'] = '.hpp' + + env['YACCVCGFILESUFFIX'] = '.vcg' + +def exists(env): + return env.Detect(['bison', 'yacc']) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Tool/zip.py b/engine/SCons/Tool/zip.py new file mode 100644 index 0000000..4bf884f --- /dev/null +++ b/engine/SCons/Tool/zip.py @@ -0,0 +1,100 @@ +"""SCons.Tool.zip + +Tool-specific initialization for zip. + +There normally shouldn't be any need to import this module directly. +It will usually be imported through the generic SCons.Tool.Tool() +selection method. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Tool/zip.py 4577 2009/12/27 19:43:56 scons" + +import os.path + +import SCons.Builder +import SCons.Defaults +import SCons.Node.FS +import SCons.Util + +try: + import zipfile + internal_zip = 1 +except ImportError: + internal_zip = 0 + +if internal_zip: + zipcompression = zipfile.ZIP_DEFLATED + def zip(target, source, env): + def visit(arg, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + arg.write(path) + compression = env.get('ZIPCOMPRESSION', 0) + zf = zipfile.ZipFile(str(target[0]), 'w', compression) + for s in source: + if s.isdir(): + os.path.walk(str(s), visit, zf) + else: + zf.write(str(s)) + zf.close() +else: + zipcompression = 0 + zip = "$ZIP $ZIPFLAGS ${TARGET.abspath} $SOURCES" + + +zipAction = SCons.Action.Action(zip, varlist=['ZIPCOMPRESSION']) + +ZipBuilder = SCons.Builder.Builder(action = SCons.Action.Action('$ZIPCOM', '$ZIPCOMSTR'), + source_factory = SCons.Node.FS.Entry, + source_scanner = SCons.Defaults.DirScanner, + suffix = '$ZIPSUFFIX', + multi = 1) + + +def generate(env): + """Add Builders and construction variables for zip to an Environment.""" + try: + bld = env['BUILDERS']['Zip'] + except KeyError: + bld = ZipBuilder + env['BUILDERS']['Zip'] = bld + + env['ZIP'] = 'zip' + env['ZIPFLAGS'] = SCons.Util.CLVar('') + env['ZIPCOM'] = zipAction + env['ZIPCOMPRESSION'] = zipcompression + env['ZIPSUFFIX'] = '.zip' + +def exists(env): + return internal_zip or env.Detect('zip') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Util.py b/engine/SCons/Util.py new file mode 100644 index 0000000..e1f33c7 --- /dev/null +++ b/engine/SCons/Util.py @@ -0,0 +1,1645 @@ +"""SCons.Util + +Various utility functions go here. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Util.py 4577 2009/12/27 19:43:56 scons" + +import copy +import os +import os.path +import re +import string +import sys +import types + +from UserDict import UserDict +from UserList import UserList +from UserString import UserString + +# Don't "from types import ..." these because we need to get at the +# types module later to look for UnicodeType. +DictType = types.DictType +InstanceType = types.InstanceType +ListType = types.ListType +StringType = types.StringType +TupleType = types.TupleType + +def dictify(keys, values, result={}): + for k, v in zip(keys, values): + result[k] = v + return result + +_altsep = os.altsep +if _altsep is None and sys.platform == 'win32': + # My ActivePython 2.0.1 doesn't set os.altsep! What gives? + _altsep = '/' +if _altsep: + def rightmost_separator(path, sep, _altsep=_altsep): + rfind = string.rfind + return max(rfind(path, sep), rfind(path, _altsep)) +else: + rightmost_separator = string.rfind + +# First two from the Python Cookbook, just for completeness. +# (Yeah, yeah, YAGNI...) +def containsAny(str, set): + """Check whether sequence str contains ANY of the items in set.""" + for c in set: + if c in str: return 1 + return 0 + +def containsAll(str, set): + """Check whether sequence str contains ALL of the items in set.""" + for c in set: + if c not in str: return 0 + return 1 + +def containsOnly(str, set): + """Check whether sequence str contains ONLY items in set.""" + for c in str: + if c not in set: return 0 + return 1 + +def splitext(path): + "Same as os.path.splitext() but faster." + sep = rightmost_separator(path, os.sep) + dot = string.rfind(path, '.') + # An ext is only real if it has at least one non-digit char + if dot > sep and not containsOnly(path[dot:], "0123456789."): + return path[:dot],path[dot:] + else: + return path,"" + +def updrive(path): + """ + Make the drive letter (if any) upper case. + This is useful because Windows is inconsitent on the case + of the drive letter, which can cause inconsistencies when + calculating command signatures. + """ + drive, rest = os.path.splitdrive(path) + if drive: + path = string.upper(drive) + rest + return path + +class NodeList(UserList): + """This class is almost exactly like a regular list of Nodes + (actually it can hold any object), with one important difference. + If you try to get an attribute from this list, it will return that + attribute from every item in the list. For example: + + >>> someList = NodeList([ ' foo ', ' bar ' ]) + >>> someList.strip() + [ 'foo', 'bar' ] + """ + def __nonzero__(self): + return len(self.data) != 0 + + def __str__(self): + return string.join(map(str, self.data)) + + def __iter__(self): + return iter(self.data) + + def __call__(self, *args, **kwargs): + result = map(lambda x, args=args, kwargs=kwargs: apply(x, + args, + kwargs), + self.data) + return self.__class__(result) + + def __getattr__(self, name): + result = map(lambda x, n=name: getattr(x, n), self.data) + return self.__class__(result) + + +_get_env_var = re.compile(r'^\$([_a-zA-Z]\w*|{[_a-zA-Z]\w*})$') + +def get_environment_var(varstr): + """Given a string, first determine if it looks like a reference + to a single environment variable, like "$FOO" or "${FOO}". + If so, return that variable with no decorations ("FOO"). + If not, return None.""" + mo=_get_env_var.match(to_String(varstr)) + if mo: + var = mo.group(1) + if var[0] == '{': + return var[1:-1] + else: + return var + else: + return None + +class DisplayEngine: + def __init__(self): + self.__call__ = self.print_it + + def print_it(self, text, append_newline=1): + if append_newline: text = text + '\n' + try: + sys.stdout.write(text) + except IOError: + # Stdout might be connected to a pipe that has been closed + # by now. The most likely reason for the pipe being closed + # is that the user has press ctrl-c. It this is the case, + # then SCons is currently shutdown. We therefore ignore + # IOError's here so that SCons can continue and shutdown + # properly so that the .sconsign is correctly written + # before SCons exits. + pass + + def dont_print(self, text, append_newline=1): + pass + + def set_mode(self, mode): + if mode: + self.__call__ = self.print_it + else: + self.__call__ = self.dont_print + +def render_tree(root, child_func, prune=0, margin=[0], visited={}): + """ + Render a tree of nodes into an ASCII tree view. + root - the root node of the tree + child_func - the function called to get the children of a node + prune - don't visit the same node twice + margin - the format of the left margin to use for children of root. + 1 results in a pipe, and 0 results in no pipe. + visited - a dictionary of visited nodes in the current branch if not prune, + or in the whole tree if prune. + """ + + rname = str(root) + + children = child_func(root) + retval = "" + for pipe in margin[:-1]: + if pipe: + retval = retval + "| " + else: + retval = retval + " " + + if visited.has_key(rname): + return retval + "+-[" + rname + "]\n" + + retval = retval + "+-" + rname + "\n" + if not prune: + visited = copy.copy(visited) + visited[rname] = 1 + + for i in range(len(children)): + margin.append(i<len(children)-1) + retval = retval + render_tree(children[i], child_func, prune, margin, visited +) + margin.pop() + + return retval + +IDX = lambda N: N and 1 or 0 + +def print_tree(root, child_func, prune=0, showtags=0, margin=[0], visited={}): + """ + Print a tree of nodes. This is like render_tree, except it prints + lines directly instead of creating a string representation in memory, + so that huge trees can be printed. + + root - the root node of the tree + child_func - the function called to get the children of a node + prune - don't visit the same node twice + showtags - print status information to the left of each node line + margin - the format of the left margin to use for children of root. + 1 results in a pipe, and 0 results in no pipe. + visited - a dictionary of visited nodes in the current branch if not prune, + or in the whole tree if prune. + """ + + rname = str(root) + + if showtags: + + if showtags == 2: + print ' E = exists' + print ' R = exists in repository only' + print ' b = implicit builder' + print ' B = explicit builder' + print ' S = side effect' + print ' P = precious' + print ' A = always build' + print ' C = current' + print ' N = no clean' + print ' H = no cache' + print '' + + tags = ['['] + tags.append(' E'[IDX(root.exists())]) + tags.append(' R'[IDX(root.rexists() and not root.exists())]) + tags.append(' BbB'[[0,1][IDX(root.has_explicit_builder())] + + [0,2][IDX(root.has_builder())]]) + tags.append(' S'[IDX(root.side_effect)]) + tags.append(' P'[IDX(root.precious)]) + tags.append(' A'[IDX(root.always_build)]) + tags.append(' C'[IDX(root.is_up_to_date())]) + tags.append(' N'[IDX(root.noclean)]) + tags.append(' H'[IDX(root.nocache)]) + tags.append(']') + + else: + tags = [] + + def MMM(m): + return [" ","| "][m] + margins = map(MMM, margin[:-1]) + + children = child_func(root) + + if prune and visited.has_key(rname) and children: + print string.join(tags + margins + ['+-[', rname, ']'], '') + return + + print string.join(tags + margins + ['+-', rname], '') + + visited[rname] = 1 + + if children: + margin.append(1) + idx = IDX(showtags) + for C in children[:-1]: + print_tree(C, child_func, prune, idx, margin, visited) + margin[-1] = 0 + print_tree(children[-1], child_func, prune, idx, margin, visited) + margin.pop() + + + +# Functions for deciding if things are like various types, mainly to +# handle UserDict, UserList and UserString like their underlying types. +# +# Yes, all of this manual testing breaks polymorphism, and the real +# Pythonic way to do all of this would be to just try it and handle the +# exception, but handling the exception when it's not the right type is +# often too slow. + +try: + class mystr(str): + pass +except TypeError: + # An older Python version without new-style classes. + # + # The actual implementations here have been selected after timings + # coded up in in bench/is_types.py (from the SCons source tree, + # see the scons-src distribution), mostly against Python 1.5.2. + # Key results from those timings: + # + # -- Storing the type of the object in a variable (t = type(obj)) + # slows down the case where it's a native type and the first + # comparison will match, but nicely speeds up the case where + # it's a different native type. Since that's going to be + # common, it's a good tradeoff. + # + # -- The data show that calling isinstance() on an object that's + # a native type (dict, list or string) is expensive enough + # that checking up front for whether the object is of type + # InstanceType is a pretty big win, even though it does slow + # down the case where it really *is* an object instance a + # little bit. + def is_Dict(obj): + t = type(obj) + return t is DictType or \ + (t is InstanceType and isinstance(obj, UserDict)) + + def is_List(obj): + t = type(obj) + return t is ListType \ + or (t is InstanceType and isinstance(obj, UserList)) + + def is_Sequence(obj): + t = type(obj) + return t is ListType \ + or t is TupleType \ + or (t is InstanceType and isinstance(obj, UserList)) + + def is_Tuple(obj): + t = type(obj) + return t is TupleType + + if hasattr(types, 'UnicodeType'): + def is_String(obj): + t = type(obj) + return t is StringType \ + or t is UnicodeType \ + or (t is InstanceType and isinstance(obj, UserString)) + else: + def is_String(obj): + t = type(obj) + return t is StringType \ + or (t is InstanceType and isinstance(obj, UserString)) + + def is_Scalar(obj): + return is_String(obj) or not is_Sequence(obj) + + def flatten(obj, result=None): + """Flatten a sequence to a non-nested list. + + Flatten() converts either a single scalar or a nested sequence + to a non-nested list. Note that flatten() considers strings + to be scalars instead of sequences like Python would. + """ + if is_Scalar(obj): + return [obj] + if result is None: + result = [] + for item in obj: + if is_Scalar(item): + result.append(item) + else: + flatten_sequence(item, result) + return result + + def flatten_sequence(sequence, result=None): + """Flatten a sequence to a non-nested list. + + Same as flatten(), but it does not handle the single scalar + case. This is slightly more efficient when one knows that + the sequence to flatten can not be a scalar. + """ + if result is None: + result = [] + for item in sequence: + if is_Scalar(item): + result.append(item) + else: + flatten_sequence(item, result) + return result + + # + # Generic convert-to-string functions that abstract away whether or + # not the Python we're executing has Unicode support. The wrapper + # to_String_for_signature() will use a for_signature() method if the + # specified object has one. + # + if hasattr(types, 'UnicodeType'): + UnicodeType = types.UnicodeType + def to_String(s): + if isinstance(s, UserString): + t = type(s.data) + else: + t = type(s) + if t is UnicodeType: + return unicode(s) + else: + return str(s) + else: + to_String = str + + def to_String_for_signature(obj): + try: + f = obj.for_signature + except AttributeError: + return to_String_for_subst(obj) + else: + return f() + + def to_String_for_subst(s): + if is_Sequence( s ): + return string.join( map(to_String_for_subst, s) ) + + return to_String( s ) + +else: + # A modern Python version with new-style classes, so we can just use + # isinstance(). + # + # We are using the following trick to speed-up these + # functions. Default arguments are used to take a snapshot of the + # the global functions and constants used by these functions. This + # transforms accesses to global variable into local variables + # accesses (i.e. LOAD_FAST instead of LOAD_GLOBAL). + + DictTypes = (dict, UserDict) + ListTypes = (list, UserList) + SequenceTypes = (list, tuple, UserList) + + # Empirically, Python versions with new-style classes all have + # unicode. + # + # Note that profiling data shows a speed-up when comparing + # explicitely with str and unicode instead of simply comparing + # with basestring. (at least on Python 2.5.1) + StringTypes = (str, unicode, UserString) + + # Empirically, it is faster to check explicitely for str and + # unicode than for basestring. + BaseStringTypes = (str, unicode) + + def is_Dict(obj, isinstance=isinstance, DictTypes=DictTypes): + return isinstance(obj, DictTypes) + + def is_List(obj, isinstance=isinstance, ListTypes=ListTypes): + return isinstance(obj, ListTypes) + + def is_Sequence(obj, isinstance=isinstance, SequenceTypes=SequenceTypes): + return isinstance(obj, SequenceTypes) + + def is_Tuple(obj, isinstance=isinstance, tuple=tuple): + return isinstance(obj, tuple) + + def is_String(obj, isinstance=isinstance, StringTypes=StringTypes): + return isinstance(obj, StringTypes) + + def is_Scalar(obj, isinstance=isinstance, StringTypes=StringTypes, SequenceTypes=SequenceTypes): + # Profiling shows that there is an impressive speed-up of 2x + # when explicitely checking for strings instead of just not + # sequence when the argument (i.e. obj) is already a string. + # But, if obj is a not string than it is twice as fast to + # check only for 'not sequence'. The following code therefore + # assumes that the obj argument is a string must of the time. + return isinstance(obj, StringTypes) or not isinstance(obj, SequenceTypes) + + def do_flatten(sequence, result, isinstance=isinstance, + StringTypes=StringTypes, SequenceTypes=SequenceTypes): + for item in sequence: + if isinstance(item, StringTypes) or not isinstance(item, SequenceTypes): + result.append(item) + else: + do_flatten(item, result) + + def flatten(obj, isinstance=isinstance, StringTypes=StringTypes, + SequenceTypes=SequenceTypes, do_flatten=do_flatten): + """Flatten a sequence to a non-nested list. + + Flatten() converts either a single scalar or a nested sequence + to a non-nested list. Note that flatten() considers strings + to be scalars instead of sequences like Python would. + """ + if isinstance(obj, StringTypes) or not isinstance(obj, SequenceTypes): + return [obj] + result = [] + for item in obj: + if isinstance(item, StringTypes) or not isinstance(item, SequenceTypes): + result.append(item) + else: + do_flatten(item, result) + return result + + def flatten_sequence(sequence, isinstance=isinstance, StringTypes=StringTypes, + SequenceTypes=SequenceTypes, do_flatten=do_flatten): + """Flatten a sequence to a non-nested list. + + Same as flatten(), but it does not handle the single scalar + case. This is slightly more efficient when one knows that + the sequence to flatten can not be a scalar. + """ + result = [] + for item in sequence: + if isinstance(item, StringTypes) or not isinstance(item, SequenceTypes): + result.append(item) + else: + do_flatten(item, result) + return result + + + # + # Generic convert-to-string functions that abstract away whether or + # not the Python we're executing has Unicode support. The wrapper + # to_String_for_signature() will use a for_signature() method if the + # specified object has one. + # + def to_String(s, + isinstance=isinstance, str=str, + UserString=UserString, BaseStringTypes=BaseStringTypes): + if isinstance(s,BaseStringTypes): + # Early out when already a string! + return s + elif isinstance(s, UserString): + # s.data can only be either a unicode or a regular + # string. Please see the UserString initializer. + return s.data + else: + return str(s) + + def to_String_for_subst(s, + isinstance=isinstance, join=string.join, str=str, to_String=to_String, + BaseStringTypes=BaseStringTypes, SequenceTypes=SequenceTypes, + UserString=UserString): + + # Note that the test cases are sorted by order of probability. + if isinstance(s, BaseStringTypes): + return s + elif isinstance(s, SequenceTypes): + l = [] + for e in s: + l.append(to_String_for_subst(e)) + return join( s ) + elif isinstance(s, UserString): + # s.data can only be either a unicode or a regular + # string. Please see the UserString initializer. + return s.data + else: + return str(s) + + def to_String_for_signature(obj, to_String_for_subst=to_String_for_subst, + AttributeError=AttributeError): + try: + f = obj.for_signature + except AttributeError: + return to_String_for_subst(obj) + else: + return f() + + + +# The SCons "semi-deep" copy. +# +# This makes separate copies of lists (including UserList objects) +# dictionaries (including UserDict objects) and tuples, but just copies +# references to anything else it finds. +# +# A special case is any object that has a __semi_deepcopy__() method, +# which we invoke to create the copy, which is used by the BuilderDict +# class because of its extra initialization argument. +# +# The dispatch table approach used here is a direct rip-off from the +# normal Python copy module. + +_semi_deepcopy_dispatch = d = {} + +def _semi_deepcopy_dict(x): + copy = {} + for key, val in x.items(): + # The regular Python copy.deepcopy() also deepcopies the key, + # as follows: + # + # copy[semi_deepcopy(key)] = semi_deepcopy(val) + # + # Doesn't seem like we need to, but we'll comment it just in case. + copy[key] = semi_deepcopy(val) + return copy +d[types.DictionaryType] = _semi_deepcopy_dict + +def _semi_deepcopy_list(x): + return map(semi_deepcopy, x) +d[types.ListType] = _semi_deepcopy_list + +def _semi_deepcopy_tuple(x): + return tuple(map(semi_deepcopy, x)) +d[types.TupleType] = _semi_deepcopy_tuple + +def _semi_deepcopy_inst(x): + if hasattr(x, '__semi_deepcopy__'): + return x.__semi_deepcopy__() + elif isinstance(x, UserDict): + return x.__class__(_semi_deepcopy_dict(x)) + elif isinstance(x, UserList): + return x.__class__(_semi_deepcopy_list(x)) + else: + return x +d[types.InstanceType] = _semi_deepcopy_inst + +def semi_deepcopy(x): + copier = _semi_deepcopy_dispatch.get(type(x)) + if copier: + return copier(x) + else: + return x + + + +class Proxy: + """A simple generic Proxy class, forwarding all calls to + subject. So, for the benefit of the python newbie, what does + this really mean? Well, it means that you can take an object, let's + call it 'objA', and wrap it in this Proxy class, with a statement + like this + + proxyObj = Proxy(objA), + + Then, if in the future, you do something like this + + x = proxyObj.var1, + + since Proxy does not have a 'var1' attribute (but presumably objA does), + the request actually is equivalent to saying + + x = objA.var1 + + Inherit from this class to create a Proxy.""" + + def __init__(self, subject): + """Wrap an object as a Proxy object""" + self.__subject = subject + + def __getattr__(self, name): + """Retrieve an attribute from the wrapped object. If the named + attribute doesn't exist, AttributeError is raised""" + return getattr(self.__subject, name) + + def get(self): + """Retrieve the entire wrapped object""" + return self.__subject + + def __cmp__(self, other): + if issubclass(other.__class__, self.__subject.__class__): + return cmp(self.__subject, other) + return cmp(self.__dict__, other.__dict__) + +# attempt to load the windows registry module: +can_read_reg = 0 +try: + import _winreg + + can_read_reg = 1 + hkey_mod = _winreg + + RegOpenKeyEx = _winreg.OpenKeyEx + RegEnumKey = _winreg.EnumKey + RegEnumValue = _winreg.EnumValue + RegQueryValueEx = _winreg.QueryValueEx + RegError = _winreg.error + +except ImportError: + try: + import win32api + import win32con + can_read_reg = 1 + hkey_mod = win32con + + RegOpenKeyEx = win32api.RegOpenKeyEx + RegEnumKey = win32api.RegEnumKey + RegEnumValue = win32api.RegEnumValue + RegQueryValueEx = win32api.RegQueryValueEx + RegError = win32api.error + + except ImportError: + class _NoError(Exception): + pass + RegError = _NoError + +if can_read_reg: + HKEY_CLASSES_ROOT = hkey_mod.HKEY_CLASSES_ROOT + HKEY_LOCAL_MACHINE = hkey_mod.HKEY_LOCAL_MACHINE + HKEY_CURRENT_USER = hkey_mod.HKEY_CURRENT_USER + HKEY_USERS = hkey_mod.HKEY_USERS + + def RegGetValue(root, key): + """This utility function returns a value in the registry + without having to open the key first. Only available on + Windows platforms with a version of Python that can read the + registry. Returns the same thing as + SCons.Util.RegQueryValueEx, except you just specify the entire + path to the value, and don't have to bother opening the key + first. So: + + Instead of: + k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, + r'SOFTWARE\Microsoft\Windows\CurrentVersion') + out = SCons.Util.RegQueryValueEx(k, + 'ProgramFilesDir') + + You can write: + out = SCons.Util.RegGetValue(SCons.Util.HKEY_LOCAL_MACHINE, + r'SOFTWARE\Microsoft\Windows\CurrentVersion\ProgramFilesDir') + """ + # I would use os.path.split here, but it's not a filesystem + # path... + p = key.rfind('\\') + 1 + keyp = key[:p-1] # -1 to omit trailing slash + val = key[p:] + k = RegOpenKeyEx(root, keyp) + return RegQueryValueEx(k,val) +else: + try: + e = WindowsError + except NameError: + # Make sure we have a definition of WindowsError so we can + # run platform-independent tests of Windows functionality on + # platforms other than Windows. (WindowsError is, in fact, an + # OSError subclass on Windows.) + class WindowsError(OSError): + pass + import __builtin__ + __builtin__.WindowsError = WindowsError + else: + del e + + HKEY_CLASSES_ROOT = None + HKEY_LOCAL_MACHINE = None + HKEY_CURRENT_USER = None + HKEY_USERS = None + + def RegGetValue(root, key): + raise WindowsError + + def RegOpenKeyEx(root, key): + raise WindowsError + +if sys.platform == 'win32': + + def WhereIs(file, path=None, pathext=None, reject=[]): + if path is None: + try: + path = os.environ['PATH'] + except KeyError: + return None + if is_String(path): + path = string.split(path, os.pathsep) + if pathext is None: + try: + pathext = os.environ['PATHEXT'] + except KeyError: + pathext = '.COM;.EXE;.BAT;.CMD' + if is_String(pathext): + pathext = string.split(pathext, os.pathsep) + for ext in pathext: + if string.lower(ext) == string.lower(file[-len(ext):]): + pathext = [''] + break + if not is_List(reject) and not is_Tuple(reject): + reject = [reject] + for dir in path: + f = os.path.join(dir, file) + for ext in pathext: + fext = f + ext + if os.path.isfile(fext): + try: + reject.index(fext) + except ValueError: + return os.path.normpath(fext) + continue + return None + +elif os.name == 'os2': + + def WhereIs(file, path=None, pathext=None, reject=[]): + if path is None: + try: + path = os.environ['PATH'] + except KeyError: + return None + if is_String(path): + path = string.split(path, os.pathsep) + if pathext is None: + pathext = ['.exe', '.cmd'] + for ext in pathext: + if string.lower(ext) == string.lower(file[-len(ext):]): + pathext = [''] + break + if not is_List(reject) and not is_Tuple(reject): + reject = [reject] + for dir in path: + f = os.path.join(dir, file) + for ext in pathext: + fext = f + ext + if os.path.isfile(fext): + try: + reject.index(fext) + except ValueError: + return os.path.normpath(fext) + continue + return None + +else: + + def WhereIs(file, path=None, pathext=None, reject=[]): + import stat + if path is None: + try: + path = os.environ['PATH'] + except KeyError: + return None + if is_String(path): + path = string.split(path, os.pathsep) + if not is_List(reject) and not is_Tuple(reject): + reject = [reject] + for d in path: + f = os.path.join(d, file) + if os.path.isfile(f): + try: + st = os.stat(f) + except OSError: + # os.stat() raises OSError, not IOError if the file + # doesn't exist, so in this case we let IOError get + # raised so as to not mask possibly serious disk or + # network issues. + continue + if stat.S_IMODE(st[stat.ST_MODE]) & 0111: + try: + reject.index(f) + except ValueError: + return os.path.normpath(f) + continue + return None + +def PrependPath(oldpath, newpath, sep = os.pathsep, + delete_existing=1, canonicalize=None): + """This prepends newpath elements to the given oldpath. Will only + add any particular path once (leaving the first one it encounters + and ignoring the rest, to preserve path order), and will + os.path.normpath and os.path.normcase all paths to help assure + this. This can also handle the case where the given old path + variable is a list instead of a string, in which case a list will + be returned instead of a string. + + Example: + Old Path: "/foo/bar:/foo" + New Path: "/biz/boom:/foo" + Result: "/biz/boom:/foo:/foo/bar" + + If delete_existing is 0, then adding a path that exists will + not move it to the beginning; it will stay where it is in the + list. + + If canonicalize is not None, it is applied to each element of + newpath before use. + """ + + orig = oldpath + is_list = 1 + paths = orig + if not is_List(orig) and not is_Tuple(orig): + paths = string.split(paths, sep) + is_list = 0 + + if is_String(newpath): + newpaths = string.split(newpath, sep) + elif not is_List(newpath) and not is_Tuple(newpath): + newpaths = [ newpath ] # might be a Dir + else: + newpaths = newpath + + if canonicalize: + newpaths=map(canonicalize, newpaths) + + if not delete_existing: + # First uniquify the old paths, making sure to + # preserve the first instance (in Unix/Linux, + # the first one wins), and remembering them in normpaths. + # Then insert the new paths at the head of the list + # if they're not already in the normpaths list. + result = [] + normpaths = [] + for path in paths: + if not path: + continue + normpath = os.path.normpath(os.path.normcase(path)) + if normpath not in normpaths: + result.append(path) + normpaths.append(normpath) + newpaths.reverse() # since we're inserting at the head + for path in newpaths: + if not path: + continue + normpath = os.path.normpath(os.path.normcase(path)) + if normpath not in normpaths: + result.insert(0, path) + normpaths.append(normpath) + paths = result + + else: + newpaths = newpaths + paths # prepend new paths + + normpaths = [] + paths = [] + # now we add them only if they are unique + for path in newpaths: + normpath = os.path.normpath(os.path.normcase(path)) + if path and not normpath in normpaths: + paths.append(path) + normpaths.append(normpath) + + if is_list: + return paths + else: + return string.join(paths, sep) + +def AppendPath(oldpath, newpath, sep = os.pathsep, + delete_existing=1, canonicalize=None): + """This appends new path elements to the given old path. Will + only add any particular path once (leaving the last one it + encounters and ignoring the rest, to preserve path order), and + will os.path.normpath and os.path.normcase all paths to help + assure this. This can also handle the case where the given old + path variable is a list instead of a string, in which case a list + will be returned instead of a string. + + Example: + Old Path: "/foo/bar:/foo" + New Path: "/biz/boom:/foo" + Result: "/foo/bar:/biz/boom:/foo" + + If delete_existing is 0, then adding a path that exists + will not move it to the end; it will stay where it is in the list. + + If canonicalize is not None, it is applied to each element of + newpath before use. + """ + + orig = oldpath + is_list = 1 + paths = orig + if not is_List(orig) and not is_Tuple(orig): + paths = string.split(paths, sep) + is_list = 0 + + if is_String(newpath): + newpaths = string.split(newpath, sep) + elif not is_List(newpath) and not is_Tuple(newpath): + newpaths = [ newpath ] # might be a Dir + else: + newpaths = newpath + + if canonicalize: + newpaths=map(canonicalize, newpaths) + + if not delete_existing: + # add old paths to result, then + # add new paths if not already present + # (I thought about using a dict for normpaths for speed, + # but it's not clear hashing the strings would be faster + # than linear searching these typically short lists.) + result = [] + normpaths = [] + for path in paths: + if not path: + continue + result.append(path) + normpaths.append(os.path.normpath(os.path.normcase(path))) + for path in newpaths: + if not path: + continue + normpath = os.path.normpath(os.path.normcase(path)) + if normpath not in normpaths: + result.append(path) + normpaths.append(normpath) + paths = result + else: + # start w/ new paths, add old ones if not present, + # then reverse. + newpaths = paths + newpaths # append new paths + newpaths.reverse() + + normpaths = [] + paths = [] + # now we add them only if they are unique + for path in newpaths: + normpath = os.path.normpath(os.path.normcase(path)) + if path and not normpath in normpaths: + paths.append(path) + normpaths.append(normpath) + paths.reverse() + + if is_list: + return paths + else: + return string.join(paths, sep) + +if sys.platform == 'cygwin': + def get_native_path(path): + """Transforms an absolute path into a native path for the system. In + Cygwin, this converts from a Cygwin path to a Windows one.""" + return string.replace(os.popen('cygpath -w ' + path).read(), '\n', '') +else: + def get_native_path(path): + """Transforms an absolute path into a native path for the system. + Non-Cygwin version, just leave the path alone.""" + return path + +display = DisplayEngine() + +def Split(arg): + if is_List(arg) or is_Tuple(arg): + return arg + elif is_String(arg): + return string.split(arg) + else: + return [arg] + +class CLVar(UserList): + """A class for command-line construction variables. + + This is a list that uses Split() to split an initial string along + white-space arguments, and similarly to split any strings that get + added. This allows us to Do the Right Thing with Append() and + Prepend() (as well as straight Python foo = env['VAR'] + 'arg1 + arg2') regardless of whether a user adds a list or a string to a + command-line construction variable. + """ + def __init__(self, seq = []): + UserList.__init__(self, Split(seq)) + def __add__(self, other): + return UserList.__add__(self, CLVar(other)) + def __radd__(self, other): + return UserList.__radd__(self, CLVar(other)) + def __coerce__(self, other): + return (self, CLVar(other)) + def __str__(self): + return string.join(self.data) + +# A dictionary that preserves the order in which items are added. +# Submitted by David Benjamin to ActiveState's Python Cookbook web site: +# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/107747 +# Including fixes/enhancements from the follow-on discussions. +class OrderedDict(UserDict): + def __init__(self, dict = None): + self._keys = [] + UserDict.__init__(self, dict) + + def __delitem__(self, key): + UserDict.__delitem__(self, key) + self._keys.remove(key) + + def __setitem__(self, key, item): + UserDict.__setitem__(self, key, item) + if key not in self._keys: self._keys.append(key) + + def clear(self): + UserDict.clear(self) + self._keys = [] + + def copy(self): + dict = OrderedDict() + dict.update(self) + return dict + + def items(self): + return zip(self._keys, self.values()) + + def keys(self): + return self._keys[:] + + def popitem(self): + try: + key = self._keys[-1] + except IndexError: + raise KeyError('dictionary is empty') + + val = self[key] + del self[key] + + return (key, val) + + def setdefault(self, key, failobj = None): + UserDict.setdefault(self, key, failobj) + if key not in self._keys: self._keys.append(key) + + def update(self, dict): + for (key, val) in dict.items(): + self.__setitem__(key, val) + + def values(self): + return map(self.get, self._keys) + +class Selector(OrderedDict): + """A callable ordered dictionary that maps file suffixes to + dictionary values. We preserve the order in which items are added + so that get_suffix() calls always return the first suffix added.""" + def __call__(self, env, source, ext=None): + if ext is None: + try: + ext = source[0].suffix + except IndexError: + ext = "" + try: + return self[ext] + except KeyError: + # Try to perform Environment substitution on the keys of + # the dictionary before giving up. + s_dict = {} + for (k,v) in self.items(): + if k is not None: + s_k = env.subst(k) + if s_dict.has_key(s_k): + # We only raise an error when variables point + # to the same suffix. If one suffix is literal + # and a variable suffix contains this literal, + # the literal wins and we don't raise an error. + raise KeyError, (s_dict[s_k][0], k, s_k) + s_dict[s_k] = (k,v) + try: + return s_dict[ext][1] + except KeyError: + try: + return self[None] + except KeyError: + return None + + +if sys.platform == 'cygwin': + # On Cygwin, os.path.normcase() lies, so just report back the + # fact that the underlying Windows OS is case-insensitive. + def case_sensitive_suffixes(s1, s2): + return 0 +else: + def case_sensitive_suffixes(s1, s2): + return (os.path.normcase(s1) != os.path.normcase(s2)) + +def adjustixes(fname, pre, suf, ensure_suffix=False): + if pre: + path, fn = os.path.split(os.path.normpath(fname)) + if fn[:len(pre)] != pre: + fname = os.path.join(path, pre + fn) + # Only append a suffix if the suffix we're going to add isn't already + # there, and if either we've been asked to ensure the specific suffix + # is present or there's no suffix on it at all. + if suf and fname[-len(suf):] != suf and \ + (ensure_suffix or not splitext(fname)[1]): + fname = fname + suf + return fname + + + +# From Tim Peters, +# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560 +# ASPN: Python Cookbook: Remove duplicates from a sequence +# (Also in the printed Python Cookbook.) + +def unique(s): + """Return a list of the elements in s, but without duplicates. + + For example, unique([1,2,3,1,2,3]) is some permutation of [1,2,3], + unique("abcabc") some permutation of ["a", "b", "c"], and + unique(([1, 2], [2, 3], [1, 2])) some permutation of + [[2, 3], [1, 2]]. + + For best speed, all sequence elements should be hashable. Then + unique() will usually work in linear time. + + If not possible, the sequence elements should enjoy a total + ordering, and if list(s).sort() doesn't raise TypeError it's + assumed that they do enjoy a total ordering. Then unique() will + usually work in O(N*log2(N)) time. + + If that's not possible either, the sequence elements must support + equality-testing. Then unique() will usually work in quadratic + time. + """ + + n = len(s) + if n == 0: + return [] + + # Try using a dict first, as that's the fastest and will usually + # work. If it doesn't work, it will usually fail quickly, so it + # usually doesn't cost much to *try* it. It requires that all the + # sequence elements be hashable, and support equality comparison. + u = {} + try: + for x in s: + u[x] = 1 + except TypeError: + pass # move on to the next method + else: + return u.keys() + del u + + # We can't hash all the elements. Second fastest is to sort, + # which brings the equal elements together; then duplicates are + # easy to weed out in a single pass. + # NOTE: Python's list.sort() was designed to be efficient in the + # presence of many duplicate elements. This isn't true of all + # sort functions in all languages or libraries, so this approach + # is more effective in Python than it may be elsewhere. + try: + t = list(s) + t.sort() + except TypeError: + pass # move on to the next method + else: + assert n > 0 + last = t[0] + lasti = i = 1 + while i < n: + if t[i] != last: + t[lasti] = last = t[i] + lasti = lasti + 1 + i = i + 1 + return t[:lasti] + del t + + # Brute force is all that's left. + u = [] + for x in s: + if x not in u: + u.append(x) + return u + + + +# From Alex Martelli, +# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560 +# ASPN: Python Cookbook: Remove duplicates from a sequence +# First comment, dated 2001/10/13. +# (Also in the printed Python Cookbook.) + +def uniquer(seq, idfun=None): + if idfun is None: + def idfun(x): return x + seen = {} + result = [] + for item in seq: + marker = idfun(item) + # in old Python versions: + # if seen.has_key(marker) + # but in new ones: + if marker in seen: continue + seen[marker] = 1 + result.append(item) + return result + +# A more efficient implementation of Alex's uniquer(), this avoids the +# idfun() argument and function-call overhead by assuming that all +# items in the sequence are hashable. + +def uniquer_hashables(seq): + seen = {} + result = [] + for item in seq: + #if not item in seen: + if not seen.has_key(item): + seen[item] = 1 + result.append(item) + return result + + + +# Much of the logic here was originally based on recipe 4.9 from the +# Python CookBook, but we had to dumb it way down for Python 1.5.2. +class LogicalLines: + + def __init__(self, fileobj): + self.fileobj = fileobj + + def readline(self): + result = [] + while 1: + line = self.fileobj.readline() + if not line: + break + if line[-2:] == '\\\n': + result.append(line[:-2]) + else: + result.append(line) + break + return string.join(result, '') + + def readlines(self): + result = [] + while 1: + line = self.readline() + if not line: + break + result.append(line) + return result + + + +class UniqueList(UserList): + def __init__(self, seq = []): + UserList.__init__(self, seq) + self.unique = True + def __make_unique(self): + if not self.unique: + self.data = uniquer_hashables(self.data) + self.unique = True + def __lt__(self, other): + self.__make_unique() + return UserList.__lt__(self, other) + def __le__(self, other): + self.__make_unique() + return UserList.__le__(self, other) + def __eq__(self, other): + self.__make_unique() + return UserList.__eq__(self, other) + def __ne__(self, other): + self.__make_unique() + return UserList.__ne__(self, other) + def __gt__(self, other): + self.__make_unique() + return UserList.__gt__(self, other) + def __ge__(self, other): + self.__make_unique() + return UserList.__ge__(self, other) + def __cmp__(self, other): + self.__make_unique() + return UserList.__cmp__(self, other) + def __len__(self): + self.__make_unique() + return UserList.__len__(self) + def __getitem__(self, i): + self.__make_unique() + return UserList.__getitem__(self, i) + def __setitem__(self, i, item): + UserList.__setitem__(self, i, item) + self.unique = False + def __getslice__(self, i, j): + self.__make_unique() + return UserList.__getslice__(self, i, j) + def __setslice__(self, i, j, other): + UserList.__setslice__(self, i, j, other) + self.unique = False + def __add__(self, other): + result = UserList.__add__(self, other) + result.unique = False + return result + def __radd__(self, other): + result = UserList.__radd__(self, other) + result.unique = False + return result + def __iadd__(self, other): + result = UserList.__iadd__(self, other) + result.unique = False + return result + def __mul__(self, other): + result = UserList.__mul__(self, other) + result.unique = False + return result + def __rmul__(self, other): + result = UserList.__rmul__(self, other) + result.unique = False + return result + def __imul__(self, other): + result = UserList.__imul__(self, other) + result.unique = False + return result + def append(self, item): + UserList.append(self, item) + self.unique = False + def insert(self, i): + UserList.insert(self, i) + self.unique = False + def count(self, item): + self.__make_unique() + return UserList.count(self, item) + def index(self, item): + self.__make_unique() + return UserList.index(self, item) + def reverse(self): + self.__make_unique() + UserList.reverse(self) + def sort(self, *args, **kwds): + self.__make_unique() + #return UserList.sort(self, *args, **kwds) + return apply(UserList.sort, (self,)+args, kwds) + def extend(self, other): + UserList.extend(self, other) + self.unique = False + + + +class Unbuffered: + """ + A proxy class that wraps a file object, flushing after every write, + and delegating everything else to the wrapped object. + """ + def __init__(self, file): + self.file = file + def write(self, arg): + try: + self.file.write(arg) + self.file.flush() + except IOError: + # Stdout might be connected to a pipe that has been closed + # by now. The most likely reason for the pipe being closed + # is that the user has press ctrl-c. It this is the case, + # then SCons is currently shutdown. We therefore ignore + # IOError's here so that SCons can continue and shutdown + # properly so that the .sconsign is correctly written + # before SCons exits. + pass + def __getattr__(self, attr): + return getattr(self.file, attr) + +def make_path_relative(path): + """ makes an absolute path name to a relative pathname. + """ + if os.path.isabs(path): + drive_s,path = os.path.splitdrive(path) + + import re + if not drive_s: + path=re.compile("/*(.*)").findall(path)[0] + else: + path=path[1:] + + assert( not os.path.isabs( path ) ), path + return path + + + +# The original idea for AddMethod() and RenameFunction() come from the +# following post to the ActiveState Python Cookbook: +# +# ASPN: Python Cookbook : Install bound methods in an instance +# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/223613 +# +# That code was a little fragile, though, so the following changes +# have been wrung on it: +# +# * Switched the installmethod() "object" and "function" arguments, +# so the order reflects that the left-hand side is the thing being +# "assigned to" and the right-hand side is the value being assigned. +# +# * Changed explicit type-checking to the "try: klass = object.__class__" +# block in installmethod() below so that it still works with the +# old-style classes that SCons uses. +# +# * Replaced the by-hand creation of methods and functions with use of +# the "new" module, as alluded to in Alex Martelli's response to the +# following Cookbook post: +# +# ASPN: Python Cookbook : Dynamically added methods to a class +# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/81732 + +def AddMethod(object, function, name = None): + """ + Adds either a bound method to an instance or an unbound method to + a class. If name is ommited the name of the specified function + is used by default. + Example: + a = A() + def f(self, x, y): + self.z = x + y + AddMethod(f, A, "add") + a.add(2, 4) + print a.z + AddMethod(lambda self, i: self.l[i], a, "listIndex") + print a.listIndex(5) + """ + import new + + if name is None: + name = function.func_name + else: + function = RenameFunction(function, name) + + try: + klass = object.__class__ + except AttributeError: + # "object" is really a class, so it gets an unbound method. + object.__dict__[name] = new.instancemethod(function, None, object) + else: + # "object" is really an instance, so it gets a bound method. + object.__dict__[name] = new.instancemethod(function, object, klass) + +def RenameFunction(function, name): + """ + Returns a function identical to the specified function, but with + the specified name. + """ + import new + + # Compatibility for Python 1.5 and 2.1. Can be removed in favor of + # passing function.func_defaults directly to new.function() once + # we base on Python 2.2 or later. + func_defaults = function.func_defaults + if func_defaults is None: + func_defaults = () + + return new.function(function.func_code, + function.func_globals, + name, + func_defaults) + + +md5 = False +def MD5signature(s): + return str(s) + +def MD5filesignature(fname, chunksize=65536): + f = open(fname, "rb") + result = f.read() + f.close() + return result + +try: + import hashlib +except ImportError: + pass +else: + if hasattr(hashlib, 'md5'): + md5 = True + def MD5signature(s): + m = hashlib.md5() + m.update(str(s)) + return m.hexdigest() + + def MD5filesignature(fname, chunksize=65536): + m = hashlib.md5() + f = open(fname, "rb") + while 1: + blck = f.read(chunksize) + if not blck: + break + m.update(str(blck)) + f.close() + return m.hexdigest() + +def MD5collect(signatures): + """ + Collects a list of signatures into an aggregate signature. + + signatures - a list of signatures + returns - the aggregate signature + """ + if len(signatures) == 1: + return signatures[0] + else: + return MD5signature(string.join(signatures, ', ')) + + + +# Wrap the intern() function so it doesn't throw exceptions if ineligible +# arguments are passed. The intern() function was moved into the sys module in +# Python 3. +try: + intern +except NameError: + from sys import intern + +def silent_intern(x): + """ + Perform intern() on the passed argument and return the result. + If the input is ineligible (e.g. a unicode string) the original argument is + returned and no exception is thrown. + """ + try: + return intern(x) + except TypeError: + return x + + + +# From Dinu C. Gherman, +# Python Cookbook, second edition, recipe 6.17, p. 277. +# Also: +# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/68205 +# ASPN: Python Cookbook: Null Object Design Pattern + +# TODO(1.5): +#class Null(object): +class Null: + """ Null objects always and reliably "do nothing." """ + def __new__(cls, *args, **kwargs): + if not '_inst' in vars(cls): + #cls._inst = type.__new__(cls, *args, **kwargs) + cls._inst = apply(type.__new__, (cls,) + args, kwargs) + return cls._inst + def __init__(self, *args, **kwargs): + pass + def __call__(self, *args, **kwargs): + return self + def __repr__(self): + return "Null(0x%08X)" % id(self) + def __nonzero__(self): + return False + def __getattr__(self, name): + return self + def __setattr__(self, name, value): + return self + def __delattr__(self, name): + return self + +class NullSeq(Null): + def __len__(self): + return 0 + def __iter__(self): + return iter(()) + def __getitem__(self, i): + return self + def __delitem__(self, i): + return self + def __setitem__(self, i, v): + return self + + +del __revision__ + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Variables/BoolVariable.py b/engine/SCons/Variables/BoolVariable.py new file mode 100644 index 0000000..19e52bf --- /dev/null +++ b/engine/SCons/Variables/BoolVariable.py @@ -0,0 +1,91 @@ +"""engine.SCons.Variables.BoolVariable + +This file defines the option type for SCons implementing true/false values. + +Usage example: + + opts = Variables() + opts.Add(BoolVariable('embedded', 'build for an embedded system', 0)) + ... + if env['embedded'] == 1: + ... +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Variables/BoolVariable.py 4577 2009/12/27 19:43:56 scons" + +__all__ = ['BoolVariable',] + +import string + +import SCons.Errors + +__true_strings = ('y', 'yes', 'true', 't', '1', 'on' , 'all' ) +__false_strings = ('n', 'no', 'false', 'f', '0', 'off', 'none') + + +def _text2bool(val): + """ + Converts strings to True/False depending on the 'truth' expressed by + the string. If the string can't be converted, the original value + will be returned. + + See '__true_strings' and '__false_strings' for values considered + 'true' or 'false respectivly. + + This is usable as 'converter' for SCons' Variables. + """ + lval = string.lower(val) + if lval in __true_strings: return True + if lval in __false_strings: return False + raise ValueError("Invalid value for boolean option: %s" % val) + + +def _validator(key, val, env): + """ + Validates the given value to be either '0' or '1'. + + This is usable as 'validator' for SCons' Variables. + """ + if not env[key] in (True, False): + raise SCons.Errors.UserError( + 'Invalid value for boolean option %s: %s' % (key, env[key])) + + +def BoolVariable(key, help, default): + """ + The input parameters describe a boolen option, thus they are + returned with the correct converter and validator appended. The + 'help' text will by appended by '(yes|no) to show the valid + valued. The result is usable for input to opts.Add(). + """ + return (key, '%s (yes|no)' % help, default, + _validator, _text2bool) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Variables/EnumVariable.py b/engine/SCons/Variables/EnumVariable.py new file mode 100644 index 0000000..0a01772 --- /dev/null +++ b/engine/SCons/Variables/EnumVariable.py @@ -0,0 +1,107 @@ +"""engine.SCons.Variables.EnumVariable + +This file defines the option type for SCons allowing only specified +input-values. + +Usage example: + + opts = Variables() + opts.Add(EnumVariable('debug', 'debug output and symbols', 'no', + allowed_values=('yes', 'no', 'full'), + map={}, ignorecase=2)) + ... + if env['debug'] == 'full': + ... +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Variables/EnumVariable.py 4577 2009/12/27 19:43:56 scons" + +__all__ = ['EnumVariable',] + +import string + +import SCons.Errors + +def _validator(key, val, env, vals): + if not val in vals: + raise SCons.Errors.UserError( + 'Invalid value for option %s: %s' % (key, val)) + + +def EnumVariable(key, help, default, allowed_values, map={}, ignorecase=0): + """ + The input parameters describe a option with only certain values + allowed. They are returned with an appropriate converter and + validator appended. The result is usable for input to + Variables.Add(). + + 'key' and 'default' are the values to be passed on to Variables.Add(). + + 'help' will be appended by the allowed values automatically + + 'allowed_values' is a list of strings, which are allowed as values + for this option. + + The 'map'-dictionary may be used for converting the input value + into canonical values (eg. for aliases). + + 'ignorecase' defines the behaviour of the validator: + + If ignorecase == 0, the validator/converter are case-sensitive. + If ignorecase == 1, the validator/converter are case-insensitive. + If ignorecase == 2, the validator/converter is case-insensitive and + the converted value will always be lower-case. + + The 'validator' tests whether the value is in the list of allowed + values. The 'converter' converts input values according to the + given 'map'-dictionary (unmapped input values are returned + unchanged). + """ + help = '%s (%s)' % (help, string.join(allowed_values, '|')) + # define validator + if ignorecase >= 1: + validator = lambda key, val, env, vals=allowed_values: \ + _validator(key, string.lower(val), env, vals) + else: + validator = lambda key, val, env, vals=allowed_values: \ + _validator(key, val, env, vals) + # define converter + if ignorecase == 2: + converter = lambda val, map=map: \ + string.lower(map.get(string.lower(val), val)) + elif ignorecase == 1: + converter = lambda val, map=map: \ + map.get(string.lower(val), val) + else: + converter = lambda val, map=map: \ + map.get(val, val) + return (key, help, default, validator, converter) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Variables/ListVariable.py b/engine/SCons/Variables/ListVariable.py new file mode 100644 index 0000000..4eab726 --- /dev/null +++ b/engine/SCons/Variables/ListVariable.py @@ -0,0 +1,139 @@ +"""engine.SCons.Variables.ListVariable + +This file defines the option type for SCons implementing 'lists'. + +A 'list' option may either be 'all', 'none' or a list of names +separated by comma. After the option has been processed, the option +value holds either the named list elements, all list elemens or no +list elements at all. + +Usage example: + + list_of_libs = Split('x11 gl qt ical') + + opts = Variables() + opts.Add(ListVariable('shared', + 'libraries to build as shared libraries', + 'all', + elems = list_of_libs)) + ... + for lib in list_of_libs: + if lib in env['shared']: + env.SharedObject(...) + else: + env.Object(...) +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Variables/ListVariable.py 4577 2009/12/27 19:43:56 scons" + +# Know Bug: This should behave like a Set-Type, but does not really, +# since elements can occur twice. + +__all__ = ['ListVariable',] + +import string +import UserList + +import SCons.Util + + +class _ListVariable(UserList.UserList): + def __init__(self, initlist=[], allowedElems=[]): + UserList.UserList.__init__(self, filter(None, initlist)) + self.allowedElems = allowedElems[:] + self.allowedElems.sort() + + def __cmp__(self, other): + raise NotImplementedError + def __eq__(self, other): + raise NotImplementedError + def __ge__(self, other): + raise NotImplementedError + def __gt__(self, other): + raise NotImplementedError + def __le__(self, other): + raise NotImplementedError + def __lt__(self, other): + raise NotImplementedError + def __str__(self): + if len(self) == 0: + return 'none' + self.data.sort() + if self.data == self.allowedElems: + return 'all' + else: + return string.join(self, ',') + def prepare_to_store(self): + return self.__str__() + +def _converter(val, allowedElems, mapdict): + """ + """ + if val == 'none': + val = [] + elif val == 'all': + val = allowedElems + else: + val = filter(None, string.split(val, ',')) + val = map(lambda v, m=mapdict: m.get(v, v), val) + notAllowed = filter(lambda v, aE=allowedElems: not v in aE, val) + if notAllowed: + raise ValueError("Invalid value(s) for option: %s" % + string.join(notAllowed, ',')) + return _ListVariable(val, allowedElems) + + +## def _validator(key, val, env): +## """ +## """ +## # todo: write validater for pgk list +## return 1 + + +def ListVariable(key, help, default, names, map={}): + """ + The input parameters describe a 'package list' option, thus they + are returned with the correct converter and validater appended. The + result is usable for input to opts.Add() . + + A 'package list' option may either be 'all', 'none' or a list of + package names (separated by space). + """ + names_str = 'allowed names: %s' % string.join(names, ' ') + if SCons.Util.is_List(default): + default = string.join(default, ',') + help = string.join( + (help, '(all|none|comma-separated list of names)', names_str), + '\n ') + return (key, help, default, + None, #_validator, + lambda val, elems=names, m=map: _converter(val, elems, m)) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Variables/PackageVariable.py b/engine/SCons/Variables/PackageVariable.py new file mode 100644 index 0000000..ef46f66 --- /dev/null +++ b/engine/SCons/Variables/PackageVariable.py @@ -0,0 +1,109 @@ +"""engine.SCons.Variables.PackageVariable + +This file defines the option type for SCons implementing 'package +activation'. + +To be used whenever a 'package' may be enabled/disabled and the +package path may be specified. + +Usage example: + + Examples: + x11=no (disables X11 support) + x11=yes (will search for the package installation dir) + x11=/usr/local/X11 (will check this path for existance) + + To replace autoconf's --with-xxx=yyy + + opts = Variables() + opts.Add(PackageVariable('x11', + 'use X11 installed here (yes = search some places', + 'yes')) + ... + if env['x11'] == True: + dir = ... search X11 in some standard places ... + env['x11'] = dir + if env['x11']: + ... build with x11 ... +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Variables/PackageVariable.py 4577 2009/12/27 19:43:56 scons" + +__all__ = ['PackageVariable',] + +import string + +import SCons.Errors + +__enable_strings = ('1', 'yes', 'true', 'on', 'enable', 'search') +__disable_strings = ('0', 'no', 'false', 'off', 'disable') + +def _converter(val): + """ + """ + lval = string.lower(val) + if lval in __enable_strings: return True + if lval in __disable_strings: return False + #raise ValueError("Invalid value for boolean option: %s" % val) + return val + + +def _validator(key, val, env, searchfunc): + # NB: searchfunc is currenty undocumented and unsupported + """ + """ + # todo: write validator, check for path + import os + if env[key] is True: + if searchfunc: + env[key] = searchfunc(key, val) + elif env[key] and not os.path.exists(val): + raise SCons.Errors.UserError( + 'Path does not exist for option %s: %s' % (key, val)) + + +def PackageVariable(key, help, default, searchfunc=None): + # NB: searchfunc is currenty undocumented and unsupported + """ + The input parameters describe a 'package list' option, thus they + are returned with the correct converter and validator appended. The + result is usable for input to opts.Add() . + + A 'package list' option may either be 'all', 'none' or a list of + package names (seperated by space). + """ + help = string.join( + (help, '( yes | no | /path/to/%s )' % key), + '\n ') + return (key, help, default, + lambda k, v, e, f=searchfunc: _validator(k,v,e,f), + _converter) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Variables/PathVariable.py b/engine/SCons/Variables/PathVariable.py new file mode 100644 index 0000000..bbfffdf --- /dev/null +++ b/engine/SCons/Variables/PathVariable.py @@ -0,0 +1,147 @@ +"""SCons.Variables.PathVariable + +This file defines an option type for SCons implementing path settings. + +To be used whenever a a user-specified path override should be allowed. + +Arguments to PathVariable are: + option-name = name of this option on the command line (e.g. "prefix") + option-help = help string for option + option-dflt = default value for this option + validator = [optional] validator for option value. Predefined + validators are: + + PathAccept -- accepts any path setting; no validation + PathIsDir -- path must be an existing directory + PathIsDirCreate -- path must be a dir; will create + PathIsFile -- path must be a file + PathExists -- path must exist (any type) [default] + + The validator is a function that is called and which + should return True or False to indicate if the path + is valid. The arguments to the validator function + are: (key, val, env). The key is the name of the + option, the val is the path specified for the option, + and the env is the env to which the Otions have been + added. + +Usage example: + + Examples: + prefix=/usr/local + + opts = Variables() + + opts = Variables() + opts.Add(PathVariable('qtdir', + 'where the root of Qt is installed', + qtdir, PathIsDir)) + opts.Add(PathVariable('qt_includes', + 'where the Qt includes are installed', + '$qtdir/includes', PathIsDirCreate)) + opts.Add(PathVariable('qt_libraries', + 'where the Qt library is installed', + '$qtdir/lib')) + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Variables/PathVariable.py 4577 2009/12/27 19:43:56 scons" + +__all__ = ['PathVariable',] + +import os +import os.path + +import SCons.Errors + +class _PathVariableClass: + + def PathAccept(self, key, val, env): + """Accepts any path, no checking done.""" + pass + + def PathIsDir(self, key, val, env): + """Validator to check if Path is a directory.""" + if not os.path.isdir(val): + if os.path.isfile(val): + m = 'Directory path for option %s is a file: %s' + else: + m = 'Directory path for option %s does not exist: %s' + raise SCons.Errors.UserError(m % (key, val)) + + def PathIsDirCreate(self, key, val, env): + """Validator to check if Path is a directory, + creating it if it does not exist.""" + if os.path.isfile(val): + m = 'Path for option %s is a file, not a directory: %s' + raise SCons.Errors.UserError(m % (key, val)) + if not os.path.isdir(val): + os.makedirs(val) + + def PathIsFile(self, key, val, env): + """validator to check if Path is a file""" + if not os.path.isfile(val): + if os.path.isdir(val): + m = 'File path for option %s is a directory: %s' + else: + m = 'File path for option %s does not exist: %s' + raise SCons.Errors.UserError(m % (key, val)) + + def PathExists(self, key, val, env): + """validator to check if Path exists""" + if not os.path.exists(val): + m = 'Path for option %s does not exist: %s' + raise SCons.Errors.UserError(m % (key, val)) + + def __call__(self, key, help, default, validator=None): + # NB: searchfunc is currenty undocumented and unsupported + """ + The input parameters describe a 'path list' option, thus they + are returned with the correct converter and validator appended. The + result is usable for input to opts.Add() . + + The 'default' option specifies the default path to use if the + user does not specify an override with this option. + + validator is a validator, see this file for examples + """ + if validator is None: + validator = self.PathExists + + if SCons.Util.is_List(key) or SCons.Util.is_Tuple(key): + return (key, '%s ( /path/to/%s )' % (help, key[0]), default, + validator, None) + else: + return (key, '%s ( /path/to/%s )' % (help, key), default, + validator, None) + +PathVariable = _PathVariableClass() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Variables/__init__.py b/engine/SCons/Variables/__init__.py new file mode 100644 index 0000000..e4eba6c --- /dev/null +++ b/engine/SCons/Variables/__init__.py @@ -0,0 +1,317 @@ +"""engine.SCons.Variables + +This file defines the Variables class that is used to add user-friendly +customizable variables to an SCons build. +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/Variables/__init__.py 4577 2009/12/27 19:43:56 scons" + +import os.path +import string +import sys + +import SCons.Environment +import SCons.Errors +import SCons.Util +import SCons.Warnings + +from BoolVariable import BoolVariable # okay +from EnumVariable import EnumVariable # okay +from ListVariable import ListVariable # naja +from PackageVariable import PackageVariable # naja +from PathVariable import PathVariable # okay + + +class Variables: + instance=None + + """ + Holds all the options, updates the environment with the variables, + and renders the help text. + """ + def __init__(self, files=[], args={}, is_global=1): + """ + files - [optional] List of option configuration files to load + (backward compatibility) If a single string is passed it is + automatically placed in a file list + """ + self.options = [] + self.args = args + if not SCons.Util.is_List(files): + if files: + files = [ files ] + else: + files = [] + self.files = files + self.unknown = {} + + # create the singleton instance + if is_global: + self=Variables.instance + + if not Variables.instance: + Variables.instance=self + + def _do_add(self, key, help="", default=None, validator=None, converter=None): + class Variable: + pass + + option = Variable() + + # if we get a list or a tuple, we take the first element as the + # option key and store the remaining in aliases. + if SCons.Util.is_List(key) or SCons.Util.is_Tuple(key): + option.key = key[0] + option.aliases = key[1:] + else: + option.key = key + option.aliases = [ key ] + option.help = help + option.default = default + option.validator = validator + option.converter = converter + + self.options.append(option) + + # options might be added after the 'unknown' dict has been set up, + # so we remove the key and all its aliases from that dict + for alias in list(option.aliases) + [ option.key ]: + # TODO(1.5) + #if alias in self.unknown: + if alias in self.unknown.keys(): + del self.unknown[alias] + + def keys(self): + """ + Returns the keywords for the options + """ + return map(lambda o: o.key, self.options) + + def Add(self, key, help="", default=None, validator=None, converter=None, **kw): + """ + Add an option. + + key - the name of the variable, or a list or tuple of arguments + help - optional help text for the options + default - optional default value + validator - optional function that is called to validate the option's value + Called with (key, value, environment) + converter - optional function that is called to convert the option's value before + putting it in the environment. + """ + + if SCons.Util.is_List(key) or type(key) == type(()): + apply(self._do_add, key) + return + + if not SCons.Util.is_String(key) or \ + not SCons.Environment.is_valid_construction_var(key): + raise SCons.Errors.UserError, "Illegal Variables.Add() key `%s'" % str(key) + + self._do_add(key, help, default, validator, converter) + + def AddVariables(self, *optlist): + """ + Add a list of options. + + Each list element is a tuple/list of arguments to be passed on + to the underlying method for adding options. + + Example: + opt.AddVariables( + ('debug', '', 0), + ('CC', 'The C compiler'), + ('VALIDATE', 'An option for testing validation', 'notset', + validator, None), + ) + """ + for o in optlist: + apply(self._do_add, o) + + + def Update(self, env, args=None): + """ + Update an environment with the option variables. + + env - the environment to update. + """ + + values = {} + + # first set the defaults: + for option in self.options: + if not option.default is None: + values[option.key] = option.default + + # next set the value specified in the options file + for filename in self.files: + if os.path.exists(filename): + dir = os.path.split(os.path.abspath(filename))[0] + if dir: + sys.path.insert(0, dir) + try: + values['__name__'] = filename + exec open(filename, 'rU').read() in {}, values + finally: + if dir: + del sys.path[0] + del values['__name__'] + + # set the values specified on the command line + if args is None: + args = self.args + + for arg, value in args.items(): + added = False + for option in self.options: + if arg in list(option.aliases) + [ option.key ]: + values[option.key] = value + added = True + if not added: + self.unknown[arg] = value + + # put the variables in the environment: + # (don't copy over variables that are not declared as options) + for option in self.options: + try: + env[option.key] = values[option.key] + except KeyError: + pass + + # Call the convert functions: + for option in self.options: + if option.converter and values.has_key(option.key): + value = env.subst('${%s}'%option.key) + try: + try: + env[option.key] = option.converter(value) + except TypeError: + env[option.key] = option.converter(value, env) + except ValueError, x: + raise SCons.Errors.UserError, 'Error converting option: %s\n%s'%(option.key, x) + + + # Finally validate the values: + for option in self.options: + if option.validator and values.has_key(option.key): + option.validator(option.key, env.subst('${%s}'%option.key), env) + + def UnknownVariables(self): + """ + Returns any options in the specified arguments lists that + were not known, declared options in this object. + """ + return self.unknown + + def Save(self, filename, env): + """ + Saves all the options in the given file. This file can + then be used to load the options next run. This can be used + to create an option cache file. + + filename - Name of the file to save into + env - the environment get the option values from + """ + + # Create the file and write out the header + try: + fh = open(filename, 'w') + + try: + # Make an assignment in the file for each option + # within the environment that was assigned a value + # other than the default. + for option in self.options: + try: + value = env[option.key] + try: + prepare = value.prepare_to_store + except AttributeError: + try: + eval(repr(value)) + except KeyboardInterrupt: + raise + except: + # Convert stuff that has a repr() that + # cannot be evaluated into a string + value = SCons.Util.to_String(value) + else: + value = prepare() + + defaultVal = env.subst(SCons.Util.to_String(option.default)) + if option.converter: + defaultVal = option.converter(defaultVal) + + if str(env.subst('${%s}' % option.key)) != str(defaultVal): + fh.write('%s = %s\n' % (option.key, repr(value))) + except KeyError: + pass + finally: + fh.close() + + except IOError, x: + raise SCons.Errors.UserError, 'Error writing options to file: %s\n%s' % (filename, x) + + def GenerateHelpText(self, env, sort=None): + """ + Generate the help text for the options. + + env - an environment that is used to get the current values + of the options. + """ + + if sort: + options = self.options[:] + options.sort(lambda x,y,func=sort: func(x.key,y.key)) + else: + options = self.options + + def format(opt, self=self, env=env): + if env.has_key(opt.key): + actual = env.subst('${%s}' % opt.key) + else: + actual = None + return self.FormatVariableHelpText(env, opt.key, opt.help, opt.default, actual, opt.aliases) + lines = filter(None, map(format, options)) + + return string.join(lines, '') + + format = '\n%s: %s\n default: %s\n actual: %s\n' + format_ = '\n%s: %s\n default: %s\n actual: %s\n aliases: %s\n' + + def FormatVariableHelpText(self, env, key, help, default, actual, aliases=[]): + # Don't display the key name itself as an alias. + aliases = filter(lambda a, k=key: a != k, aliases) + if len(aliases)==0: + return self.format % (key, help, default, actual) + else: + return self.format_ % (key, help, default, actual, aliases) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/Warnings.py b/engine/SCons/Warnings.py new file mode 100644 index 0000000..b2d3659 --- /dev/null +++ b/engine/SCons/Warnings.py @@ -0,0 +1,228 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +"""SCons.Warnings + +This file implements the warnings framework for SCons. + +""" + +__revision__ = "src/engine/SCons/Warnings.py 4577 2009/12/27 19:43:56 scons" + +import string +import sys + +import SCons.Errors + +class Warning(SCons.Errors.UserError): + pass + +class MandatoryWarning(Warning): + pass + + + +class FutureDeprecatedWarning(Warning): + pass + +class DeprecatedWarning(Warning): + pass + +class MandatoryDeprecatedWarning(MandatoryWarning): + pass + + + +# NOTE: If you add a new warning class, add it to the man page, too! + +class CacheWriteErrorWarning(Warning): + pass + +class CorruptSConsignWarning(Warning): + pass + +class DependencyWarning(Warning): + pass + +class DeprecatedCopyWarning(DeprecatedWarning): + pass + +class DeprecatedOptionsWarning(DeprecatedWarning): + pass + +class DeprecatedSourceSignaturesWarning(DeprecatedWarning): + pass + +class DeprecatedTargetSignaturesWarning(DeprecatedWarning): + pass + +class DuplicateEnvironmentWarning(Warning): + pass + +class FutureReservedVariableWarning(Warning): + pass + +class LinkWarning(Warning): + pass + +class MisleadingKeywordsWarning(Warning): + pass + +class MissingSConscriptWarning(Warning): + pass + +class NoMD5ModuleWarning(Warning): + pass + +class NoMetaclassSupportWarning(Warning): + pass + +class NoObjectCountWarning(Warning): + pass + +class NoParallelSupportWarning(Warning): + pass + +class PythonVersionWarning(DeprecatedWarning): + pass + +class ReservedVariableWarning(Warning): + pass + +class StackSizeWarning(Warning): + pass + +class TaskmasterNeedsExecuteWarning(FutureDeprecatedWarning): + pass + +class VisualCMissingWarning(Warning): + pass + +# Used when MSVC_VERSION and MSVS_VERSION do not point to the +# same version (MSVS_VERSION is deprecated) +class VisualVersionMismatch(Warning): + pass + +class VisualStudioMissingWarning(Warning): + pass + +class FortranCxxMixWarning(LinkWarning): + pass + +_warningAsException = 0 + +# The below is a list of 2-tuples. The first element is a class object. +# The second element is true if that class is enabled, false if it is disabled. +_enabled = [] + +_warningOut = None + +def suppressWarningClass(clazz): + """Suppresses all warnings that are of type clazz or + derived from clazz.""" + _enabled.insert(0, (clazz, 0)) + +def enableWarningClass(clazz): + """Suppresses all warnings that are of type clazz or + derived from clazz.""" + _enabled.insert(0, (clazz, 1)) + +def warningAsException(flag=1): + """Turn warnings into exceptions. Returns the old value of the flag.""" + global _warningAsException + old = _warningAsException + _warningAsException = flag + return old + +def warn(clazz, *args): + global _enabled, _warningAsException, _warningOut + + warning = clazz(args) + for clazz, flag in _enabled: + if isinstance(warning, clazz): + if flag: + if _warningAsException: + raise warning + + if _warningOut: + _warningOut(warning) + break + +def process_warn_strings(arguments): + """Process string specifications of enabling/disabling warnings, + as passed to the --warn option or the SetOption('warn') function. + + + An argument to this option should be of the form <warning-class> + or no-<warning-class>. The warning class is munged in order + to get an actual class name from the classes above, which we + need to pass to the {enable,disable}WarningClass() functions. + The supplied <warning-class> is split on hyphens, each element + is capitalized, then smushed back together. Then the string + "Warning" is appended to get the class name. + + For example, 'deprecated' will enable the DeprecatedWarning + class. 'no-dependency' will disable the .DependencyWarning + class. + + As a special case, --warn=all and --warn=no-all will enable or + disable (respectively) the base Warning class of all warnings. + + """ + + def _capitalize(s): + if s[:5] == "scons": + return "SCons" + s[5:] + else: + return string.capitalize(s) + + for arg in arguments: + + elems = string.split(string.lower(arg), '-') + enable = 1 + if elems[0] == 'no': + enable = 0 + del elems[0] + + if len(elems) == 1 and elems[0] == 'all': + class_name = "Warning" + else: + class_name = string.join(map(_capitalize, elems), '') + "Warning" + try: + clazz = globals()[class_name] + except KeyError: + sys.stderr.write("No warning type: '%s'\n" % arg) + else: + if enable: + enableWarningClass(clazz) + elif issubclass(clazz, MandatoryDeprecatedWarning): + fmt = "Can not disable mandataory warning: '%s'\n" + sys.stderr.write(fmt % arg) + else: + suppressWarningClass(clazz) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/__init__.py b/engine/SCons/__init__.py new file mode 100644 index 0000000..e599c08 --- /dev/null +++ b/engine/SCons/__init__.py @@ -0,0 +1,49 @@ +"""SCons + +The main package for the SCons software construction utility. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/__init__.py 4577 2009/12/27 19:43:56 scons" + +__version__ = "1.2.0.d20091224" + +__build__ = "r4577[MODIFIED]" + +__buildsys__ = "scons-dev" + +__date__ = "2009/12/27 19:43:56" + +__developer__ = "scons" + +# make sure compatibility is always in place +import SCons.compat + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/compat/__init__.py b/engine/SCons/compat/__init__.py new file mode 100644 index 0000000..06a0de4 --- /dev/null +++ b/engine/SCons/compat/__init__.py @@ -0,0 +1,302 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__doc__ = """ +SCons compatibility package for old Python versions + +This subpackage holds modules that provide backwards-compatible +implementations of various things that we'd like to use in SCons but which +only show up in later versions of Python than the early, old version(s) +we still support. + +Other code will not generally reference things in this package through +the SCons.compat namespace. The modules included here add things to +the __builtin__ namespace or the global module list so that the rest +of our code can use the objects and names imported here regardless of +Python version. + +Simply enough, things that go in the __builtin__ name space come from +our builtins module. + +The rest of the things here will be in individual compatibility modules +that are either: 1) suitably modified copies of the future modules that +we want to use; or 2) backwards compatible re-implementations of the +specific portions of a future module's API that we want to use. + +GENERAL WARNINGS: Implementations of functions in the SCons.compat +modules are *NOT* guaranteed to be fully compliant with these functions in +later versions of Python. We are only concerned with adding functionality +that we actually use in SCons, so be wary if you lift this code for +other uses. (That said, making these more nearly the same as later, +official versions is still a desirable goal, we just don't need to be +obsessive about it.) + +We name the compatibility modules with an initial '_scons_' (for example, +_scons_subprocess.py is our compatibility module for subprocess) so +that we can still try to import the real module name and fall back to +our compatibility module if we get an ImportError. The import_as() +function defined below loads the module as the "real" name (without the +'_scons'), after which all of the "import {module}" statements in the +rest of our code will find our pre-loaded compatibility module. +""" + +__revision__ = "src/engine/SCons/compat/__init__.py 4577 2009/12/27 19:43:56 scons" + +def import_as(module, name): + """ + Imports the specified module (from our local directory) as the + specified name. + """ + import imp + import os.path + dir = os.path.split(__file__)[0] + file, filename, suffix_mode_type = imp.find_module(module, [dir]) + imp.load_module(name, file, filename, suffix_mode_type) + +import builtins + +try: + import hashlib +except ImportError: + # Pre-2.5 Python has no hashlib module. + try: + import_as('_scons_hashlib', 'hashlib') + except ImportError: + # If we failed importing our compatibility module, it probably + # means this version of Python has no md5 module. Don't do + # anything and let the higher layer discover this fact, so it + # can fall back to using timestamp. + pass + +try: + set +except NameError: + # Pre-2.4 Python has no native set type + try: + # Python 2.2 and 2.3 can use the copy of the 2.[45] sets module + # that we grabbed. + import_as('_scons_sets', 'sets') + except (ImportError, SyntaxError): + # Python 1.5 (ImportError, no __future_ module) and 2.1 + # (SyntaxError, no generators in __future__) will blow up + # trying to import the 2.[45] sets module, so back off to a + # custom sets module that can be discarded easily when we + # stop supporting those versions. + import_as('_scons_sets15', 'sets') + import __builtin__ + import sets + __builtin__.set = sets.Set + +import fnmatch +try: + fnmatch.filter +except AttributeError: + # Pre-2.2 Python has no fnmatch.filter() function. + def filter(names, pat): + """Return the subset of the list NAMES that match PAT""" + import os,posixpath + result=[] + pat = os.path.normcase(pat) + if not fnmatch._cache.has_key(pat): + import re + res = fnmatch.translate(pat) + fnmatch._cache[pat] = re.compile(res) + match = fnmatch._cache[pat].match + if os.path is posixpath: + # normcase on posix is NOP. Optimize it away from the loop. + for name in names: + if match(name): + result.append(name) + else: + for name in names: + if match(os.path.normcase(name)): + result.append(name) + return result + fnmatch.filter = filter + del filter + +try: + import itertools +except ImportError: + # Pre-2.3 Python has no itertools module. + import_as('_scons_itertools', 'itertools') + +# If we need the compatibility version of textwrap, it must be imported +# before optparse, which uses it. +try: + import textwrap +except ImportError: + # Pre-2.3 Python has no textwrap module. + import_as('_scons_textwrap', 'textwrap') + +try: + import optparse +except ImportError: + # Pre-2.3 Python has no optparse module. + import_as('_scons_optparse', 'optparse') + +import os +try: + os.devnull +except AttributeError: + # Pre-2.4 Python has no os.devnull attribute + import sys + _names = sys.builtin_module_names + if 'posix' in _names: + os.devnull = '/dev/null' + elif 'nt' in _names: + os.devnull = 'nul' + os.path.devnull = os.devnull +try: + os.path.lexists +except AttributeError: + # Pre-2.4 Python has no os.path.lexists function + def lexists(path): + return os.path.exists(path) or os.path.islink(path) + os.path.lexists = lexists + + +try: + import platform +except ImportError: + # Pre-2.3 Python has no platform module. + import_as('_scons_platform', 'platform') + + +import shlex +try: + shlex.split +except AttributeError: + # Pre-2.3 Python has no shlex.split() function. + # + # The full white-space splitting semantics of shlex.split() are + # complicated to reproduce by hand, so just use a compatibility + # version of the shlex module cribbed from Python 2.5 with some + # minor modifications for older Python versions. + del shlex + import_as('_scons_shlex', 'shlex') + + +import shutil +try: + shutil.move +except AttributeError: + # Pre-2.3 Python has no shutil.move() function. + # + # Cribbed from Python 2.5. + import os + + def move(src, dst): + """Recursively move a file or directory to another location. + + If the destination is on our current filesystem, then simply use + rename. Otherwise, copy src to the dst and then remove src. + A lot more could be done here... A look at a mv.c shows a lot of + the issues this implementation glosses over. + + """ + try: + os.rename(src, dst) + except OSError: + if os.path.isdir(src): + if shutil.destinsrc(src, dst): + raise Error, "Cannot move a directory '%s' into itself '%s'." % (src, dst) + shutil.copytree(src, dst, symlinks=True) + shutil.rmtree(src) + else: + shutil.copy2(src,dst) + os.unlink(src) + shutil.move = move + del move + + def destinsrc(src, dst): + src = os.path.abspath(src) + return os.path.abspath(dst)[:len(src)] == src + shutil.destinsrc = destinsrc + del destinsrc + + +try: + import subprocess +except ImportError: + # Pre-2.4 Python has no subprocess module. + import_as('_scons_subprocess', 'subprocess') + +import sys +try: + sys.version_info +except AttributeError: + # Pre-1.6 Python has no sys.version_info + import string + version_string = string.split(sys.version)[0] + version_ints = map(int, string.split(version_string, '.')) + sys.version_info = tuple(version_ints + ['final', 0]) + +try: + import UserString +except ImportError: + # Pre-1.6 Python has no UserString module. + import_as('_scons_UserString', 'UserString') + +import tempfile +try: + tempfile.mkstemp +except AttributeError: + # Pre-2.3 Python has no tempfile.mkstemp function, so try to simulate it. + # adapted from the mkstemp implementation in python 3. + import os + import errno + def mkstemp(*args, **kw): + text = False + # TODO (1.5) + #if 'text' in kw : + if 'text' in kw.keys() : + text = kw['text'] + del kw['text'] + elif len( args ) == 4 : + text = args[3] + args = args[:3] + flags = os.O_RDWR | os.O_CREAT | os.O_EXCL + if not text and hasattr( os, 'O_BINARY' ) : + flags = flags | os.O_BINARY + while True: + try : + name = apply(tempfile.mktemp, args, kw) + fd = os.open( name, flags, 0600 ) + return (fd, os.path.abspath(name)) + except OSError, e: + if e.errno == errno.EEXIST: + continue + raise + + tempfile.mkstemp = mkstemp + del mkstemp + + + + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/compat/_scons_UserString.py b/engine/SCons/compat/_scons_UserString.py new file mode 100644 index 0000000..a2656d8 --- /dev/null +++ b/engine/SCons/compat/_scons_UserString.py @@ -0,0 +1,98 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/compat/_scons_UserString.py 4577 2009/12/27 19:43:56 scons" + +__doc__ = """ +A user-defined wrapper around string objects + +This class is "borrowed" from the Python 2.2 UserString and modified +slightly for use with SCons. It is *NOT* guaranteed to be fully compliant +with the standard UserString class from all later versions of Python. +In particular, it does not necessarily contain all of the methods found +in later versions. +""" + +import types + +StringType = types.StringType + +if hasattr(types, 'UnicodeType'): + UnicodeType = types.UnicodeType + def is_String(obj): + return type(obj) in (StringType, UnicodeType) +else: + def is_String(obj): + return type(obj) is StringType + +class UserString: + def __init__(self, seq): + if is_String(seq): + self.data = seq + elif isinstance(seq, UserString): + self.data = seq.data[:] + else: + self.data = str(seq) + def __str__(self): return str(self.data) + def __repr__(self): return repr(self.data) + def __int__(self): return int(self.data) + def __long__(self): return long(self.data) + def __float__(self): return float(self.data) + def __complex__(self): return complex(self.data) + def __hash__(self): return hash(self.data) + + def __cmp__(self, string): + if isinstance(string, UserString): + return cmp(self.data, string.data) + else: + return cmp(self.data, string) + def __contains__(self, char): + return char in self.data + + def __len__(self): return len(self.data) + def __getitem__(self, index): return self.__class__(self.data[index]) + def __getslice__(self, start, end): + start = max(start, 0); end = max(end, 0) + return self.__class__(self.data[start:end]) + + def __add__(self, other): + if isinstance(other, UserString): + return self.__class__(self.data + other.data) + elif is_String(other): + return self.__class__(self.data + other) + else: + return self.__class__(self.data + str(other)) + def __radd__(self, other): + if is_String(other): + return self.__class__(other + self.data) + else: + return self.__class__(str(other) + self.data) + def __mul__(self, n): + return self.__class__(self.data*n) + __rmul__ = __mul__ + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/compat/_scons_hashlib.py b/engine/SCons/compat/_scons_hashlib.py new file mode 100644 index 0000000..1e02d61 --- /dev/null +++ b/engine/SCons/compat/_scons_hashlib.py @@ -0,0 +1,91 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__doc__ = """ +hashlib backwards-compatibility module for older (pre-2.5) Python versions + +This does not not NOT (repeat, *NOT*) provide complete hashlib +functionality. It only wraps the portions of MD5 functionality used +by SCons, in an interface that looks like hashlib (or enough for our +purposes, anyway). In fact, this module will raise an ImportError if +the underlying md5 module isn't available. +""" + +__revision__ = "src/engine/SCons/compat/_scons_hashlib.py 4577 2009/12/27 19:43:56 scons" + +import md5 +import string + +class md5obj: + + md5_module = md5 + + def __init__(self, name, string=''): + if not name in ('MD5', 'md5'): + raise ValueError, "unsupported hash type" + self.name = 'md5' + self.m = self.md5_module.md5() + + def __repr__(self): + return '<%s HASH object @ %#x>' % (self.name, id(self)) + + def copy(self): + import copy + result = copy.copy(self) + result.m = self.m.copy() + return result + + def digest(self): + return self.m.digest() + + def update(self, arg): + return self.m.update(arg) + + if hasattr(md5.md5(), 'hexdigest'): + + def hexdigest(self): + return self.m.hexdigest() + + else: + + # Objects created by the underlying md5 module have no native + # hexdigest() method (*cough* 1.5.2 *cough*), so provide an + # equivalent lifted from elsewhere. + def hexdigest(self): + h = string.hexdigits + r = '' + for c in self.digest(): + i = ord(c) + r = r + h[(i >> 4) & 0xF] + h[i & 0xF] + return r + +new = md5obj + +def md5(string=''): + return md5obj('md5', string) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/compat/_scons_itertools.py b/engine/SCons/compat/_scons_itertools.py new file mode 100644 index 0000000..b62da68 --- /dev/null +++ b/engine/SCons/compat/_scons_itertools.py @@ -0,0 +1,124 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/compat/_scons_itertools.py 4577 2009/12/27 19:43:56 scons" + +__doc__ = """ +Implementations of itertools functions for Python versions that don't +have iterators. + +These implement the functions by creating the entire list, not returning +it element-by-element as the real itertools functions do. This means +that early Python versions won't get the performance benefit of using +the itertools, but we can still use them so the later Python versions +do get the advantages of using iterators. + +Because we return the entire list, we intentionally do not implement the +itertools functions that "return" infinitely-long lists: the count(), +cycle() and repeat() functions. Other functions below have remained +unimplemented simply because they aren't being used (yet) and it wasn't +obvious how to do it. Or, conversely, we only implemented those functions +that *were* easy to implement (mostly because the Python documentation +contained examples of equivalent code). + +Note that these do not have independent unit tests, so it's possible +that there are bugs. +""" + +def chain(*iterables): + result = [] + for x in iterables: + result.extend(list(x)) + return result + +def count(n=0): + # returns infinite length, should not be supported + raise NotImplementedError + +def cycle(iterable): + # returns infinite length, should not be supported + raise NotImplementedError + +def dropwhile(predicate, iterable): + result = [] + for x in iterable: + if not predicate(x): + result.append(x) + break + result.extend(iterable) + return result + +def groupby(iterable, *args): + raise NotImplementedError + +def ifilter(predicate, iterable): + result = [] + if predicate is None: + predicate = bool + for x in iterable: + if predicate(x): + result.append(x) + return result + +def ifilterfalse(predicate, iterable): + result = [] + if predicate is None: + predicate = bool + for x in iterable: + if not predicate(x): + result.append(x) + return result + +def imap(function, *iterables): + return apply(map, (function,) + tuple(iterables)) + +def islice(*args, **kw): + raise NotImplementedError + +def izip(*iterables): + return apply(zip, iterables) + +def repeat(*args, **kw): + # returns infinite length, should not be supported + raise NotImplementedError + +def starmap(*args, **kw): + raise NotImplementedError + +def takewhile(predicate, iterable): + result = [] + for x in iterable: + if predicate(x): + result.append(x) + else: + break + return result + +def tee(*args, **kw): + raise NotImplementedError + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/compat/_scons_optparse.py b/engine/SCons/compat/_scons_optparse.py new file mode 100644 index 0000000..219adba --- /dev/null +++ b/engine/SCons/compat/_scons_optparse.py @@ -0,0 +1,1725 @@ +"""optparse - a powerful, extensible, and easy-to-use option parser. + +By Greg Ward <gward@python.net> + +Originally distributed as Optik; see http://optik.sourceforge.net/ . + +If you have problems with this module, please do not file bugs, +patches, or feature requests with Python; instead, use Optik's +SourceForge project page: + http://sourceforge.net/projects/optik + +For support, use the optik-users@lists.sourceforge.net mailing list +(http://lists.sourceforge.net/lists/listinfo/optik-users). +""" + +# Python developers: please do not make changes to this file, since +# it is automatically generated from the Optik source code. + +__version__ = "1.5.3" + +__all__ = ['Option', + 'SUPPRESS_HELP', + 'SUPPRESS_USAGE', + 'Values', + 'OptionContainer', + 'OptionGroup', + 'OptionParser', + 'HelpFormatter', + 'IndentedHelpFormatter', + 'TitledHelpFormatter', + 'OptParseError', + 'OptionError', + 'OptionConflictError', + 'OptionValueError', + 'BadOptionError'] + +__copyright__ = """ +Copyright (c) 2001-2006 Gregory P. Ward. All rights reserved. +Copyright (c) 2002-2006 Python Software Foundation. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + * Neither the name of the author nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +""" + +import string +import sys, os +import types +import textwrap + +def _repr(self): + return "<%s at 0x%x: %s>" % (self.__class__.__name__, id(self), self) + + +try: + sys.getdefaultencoding +except AttributeError: + def fake_getdefaultencoding(): + return None + sys.getdefaultencoding = fake_getdefaultencoding + +try: + ''.encode +except AttributeError: + def encode_wrapper(s, encoding, replacement): + return s +else: + def encode_wrapper(s, encoding, replacement): + return s.encode(encoding, replacement) + + +# This file was generated from: +# Id: option_parser.py 527 2006-07-23 15:21:30Z greg +# Id: option.py 522 2006-06-11 16:22:03Z gward +# Id: help.py 527 2006-07-23 15:21:30Z greg +# Id: errors.py 509 2006-04-20 00:58:24Z gward + +try: + from gettext import gettext +except ImportError: + def gettext(message): + return message +_ = gettext + + +class OptParseError (Exception): + def __init__(self, msg): + self.msg = msg + + def __str__(self): + return self.msg + + +class OptionError (OptParseError): + """ + Raised if an Option instance is created with invalid or + inconsistent arguments. + """ + + def __init__(self, msg, option): + self.msg = msg + self.option_id = str(option) + + def __str__(self): + if self.option_id: + return "option %s: %s" % (self.option_id, self.msg) + else: + return self.msg + +class OptionConflictError (OptionError): + """ + Raised if conflicting options are added to an OptionParser. + """ + +class OptionValueError (OptParseError): + """ + Raised if an invalid option value is encountered on the command + line. + """ + +class BadOptionError (OptParseError): + """ + Raised if an invalid option is seen on the command line. + """ + def __init__(self, opt_str): + self.opt_str = opt_str + + def __str__(self): + return _("no such option: %s") % self.opt_str + +class AmbiguousOptionError (BadOptionError): + """ + Raised if an ambiguous option is seen on the command line. + """ + def __init__(self, opt_str, possibilities): + BadOptionError.__init__(self, opt_str) + self.possibilities = possibilities + + def __str__(self): + return (_("ambiguous option: %s (%s?)") + % (self.opt_str, string.join(self.possibilities, ", "))) + + +class HelpFormatter: + + """ + Abstract base class for formatting option help. OptionParser + instances should use one of the HelpFormatter subclasses for + formatting help; by default IndentedHelpFormatter is used. + + Instance attributes: + parser : OptionParser + the controlling OptionParser instance + indent_increment : int + the number of columns to indent per nesting level + max_help_position : int + the maximum starting column for option help text + help_position : int + the calculated starting column for option help text; + initially the same as the maximum + width : int + total number of columns for output (pass None to constructor for + this value to be taken from the $COLUMNS environment variable) + level : int + current indentation level + current_indent : int + current indentation level (in columns) + help_width : int + number of columns available for option help text (calculated) + default_tag : str + text to replace with each option's default value, "%default" + by default. Set to false value to disable default value expansion. + option_strings : { Option : str } + maps Option instances to the snippet of help text explaining + the syntax of that option, e.g. "-h, --help" or + "-fFILE, --file=FILE" + _short_opt_fmt : str + format string controlling how short options with values are + printed in help text. Must be either "%s%s" ("-fFILE") or + "%s %s" ("-f FILE"), because those are the two syntaxes that + Optik supports. + _long_opt_fmt : str + similar but for long options; must be either "%s %s" ("--file FILE") + or "%s=%s" ("--file=FILE"). + """ + + NO_DEFAULT_VALUE = "none" + + def __init__(self, + indent_increment, + max_help_position, + width, + short_first): + self.parser = None + self.indent_increment = indent_increment + self.help_position = self.max_help_position = max_help_position + if width is None: + try: + width = int(os.environ['COLUMNS']) + except (KeyError, ValueError): + width = 80 + width = width - 2 + self.width = width + self.current_indent = 0 + self.level = 0 + self.help_width = None # computed later + self.short_first = short_first + self.default_tag = "%default" + self.option_strings = {} + self._short_opt_fmt = "%s %s" + self._long_opt_fmt = "%s=%s" + + def set_parser(self, parser): + self.parser = parser + + def set_short_opt_delimiter(self, delim): + if delim not in ("", " "): + raise ValueError( + "invalid metavar delimiter for short options: %r" % delim) + self._short_opt_fmt = "%s" + delim + "%s" + + def set_long_opt_delimiter(self, delim): + if delim not in ("=", " "): + raise ValueError( + "invalid metavar delimiter for long options: %r" % delim) + self._long_opt_fmt = "%s" + delim + "%s" + + def indent(self): + self.current_indent = self.current_indent + self.indent_increment + self.level = self.level + 1 + + def dedent(self): + self.current_indent = self.current_indent - self.indent_increment + assert self.current_indent >= 0, "Indent decreased below 0." + self.level = self.level - 1 + + def format_usage(self, usage): + raise NotImplementedError, "subclasses must implement" + + def format_heading(self, heading): + raise NotImplementedError, "subclasses must implement" + + def _format_text(self, text): + """ + Format a paragraph of free-form text for inclusion in the + help output at the current indentation level. + """ + text_width = self.width - self.current_indent + indent = " "*self.current_indent + return textwrap.fill(text, + text_width, + initial_indent=indent, + subsequent_indent=indent) + + def format_description(self, description): + if description: + return self._format_text(description) + "\n" + else: + return "" + + def format_epilog(self, epilog): + if epilog: + return "\n" + self._format_text(epilog) + "\n" + else: + return "" + + + def expand_default(self, option): + if self.parser is None or not self.default_tag: + return option.help + + default_value = self.parser.defaults.get(option.dest) + if default_value is NO_DEFAULT or default_value is None: + default_value = self.NO_DEFAULT_VALUE + + return string.replace(option.help, self.default_tag, str(default_value)) + + def format_option(self, option): + # The help for each option consists of two parts: + # * the opt strings and metavars + # eg. ("-x", or "-fFILENAME, --file=FILENAME") + # * the user-supplied help string + # eg. ("turn on expert mode", "read data from FILENAME") + # + # If possible, we write both of these on the same line: + # -x turn on expert mode + # + # But if the opt string list is too long, we put the help + # string on a second line, indented to the same column it would + # start in if it fit on the first line. + # -fFILENAME, --file=FILENAME + # read data from FILENAME + result = [] + opts = self.option_strings[option] + opt_width = self.help_position - self.current_indent - 2 + if len(opts) > opt_width: + opts = "%*s%s\n" % (self.current_indent, "", opts) + indent_first = self.help_position + else: # start help on same line as opts + opts = "%*s%-*s " % (self.current_indent, "", opt_width, opts) + indent_first = 0 + result.append(opts) + if option.help: + help_text = self.expand_default(option) + help_lines = textwrap.wrap(help_text, self.help_width) + result.append("%*s%s\n" % (indent_first, "", help_lines[0])) + for line in help_lines[1:]: + result.append("%*s%s\n" % (self.help_position, "", line)) + elif opts[-1] != "\n": + result.append("\n") + return string.join(result, "") + + def store_option_strings(self, parser): + self.indent() + max_len = 0 + for opt in parser.option_list: + strings = self.format_option_strings(opt) + self.option_strings[opt] = strings + max_len = max(max_len, len(strings) + self.current_indent) + self.indent() + for group in parser.option_groups: + for opt in group.option_list: + strings = self.format_option_strings(opt) + self.option_strings[opt] = strings + max_len = max(max_len, len(strings) + self.current_indent) + self.dedent() + self.dedent() + self.help_position = min(max_len + 2, self.max_help_position) + self.help_width = self.width - self.help_position + + def format_option_strings(self, option): + """Return a comma-separated list of option strings & metavariables.""" + if option.takes_value(): + metavar = option.metavar or string.upper(option.dest) + short_opts = [] + for sopt in option._short_opts: + short_opts.append(self._short_opt_fmt % (sopt, metavar)) + long_opts = [] + for lopt in option._long_opts: + long_opts.append(self._long_opt_fmt % (lopt, metavar)) + else: + short_opts = option._short_opts + long_opts = option._long_opts + + if self.short_first: + opts = short_opts + long_opts + else: + opts = long_opts + short_opts + + return string.join(opts, ", ") + +class IndentedHelpFormatter (HelpFormatter): + """Format help with indented section bodies. + """ + + def __init__(self, + indent_increment=2, + max_help_position=24, + width=None, + short_first=1): + HelpFormatter.__init__( + self, indent_increment, max_help_position, width, short_first) + + def format_usage(self, usage): + return _("Usage: %s\n") % usage + + def format_heading(self, heading): + return "%*s%s:\n" % (self.current_indent, "", heading) + + +class TitledHelpFormatter (HelpFormatter): + """Format help with underlined section headers. + """ + + def __init__(self, + indent_increment=0, + max_help_position=24, + width=None, + short_first=0): + HelpFormatter.__init__ ( + self, indent_increment, max_help_position, width, short_first) + + def format_usage(self, usage): + return "%s %s\n" % (self.format_heading(_("Usage")), usage) + + def format_heading(self, heading): + return "%s\n%s\n" % (heading, "=-"[self.level] * len(heading)) + + +def _parse_num(val, type): + if string.lower(val[:2]) == "0x": # hexadecimal + radix = 16 + elif string.lower(val[:2]) == "0b": # binary + radix = 2 + val = val[2:] or "0" # have to remove "0b" prefix + elif val[:1] == "0": # octal + radix = 8 + else: # decimal + radix = 10 + + return type(val, radix) + +def _parse_int(val): + return _parse_num(val, int) + +def _parse_long(val): + return _parse_num(val, long) + +try: + int('0', 10) +except TypeError: + # Python 1.5.2 doesn't allow a radix value to be passed to int(). + _parse_int = int + +try: + long('0', 10) +except TypeError: + # Python 1.5.2 doesn't allow a radix value to be passed to long(). + _parse_long = long + +_builtin_cvt = { "int" : (_parse_int, _("integer")), + "long" : (_parse_long, _("long integer")), + "float" : (float, _("floating-point")), + "complex" : (complex, _("complex")) } + +def check_builtin(option, opt, value): + (cvt, what) = _builtin_cvt[option.type] + try: + return cvt(value) + except ValueError: + raise OptionValueError( + _("option %s: invalid %s value: %r") % (opt, what, value)) + +def check_choice(option, opt, value): + if value in option.choices: + return value + else: + choices = string.join(map(repr, option.choices), ", ") + raise OptionValueError( + _("option %s: invalid choice: %r (choose from %s)") + % (opt, value, choices)) + +# Not supplying a default is different from a default of None, +# so we need an explicit "not supplied" value. +NO_DEFAULT = ("NO", "DEFAULT") + + +class Option: + """ + Instance attributes: + _short_opts : [string] + _long_opts : [string] + + action : string + type : string + dest : string + default : any + nargs : int + const : any + choices : [string] + callback : function + callback_args : (any*) + callback_kwargs : { string : any } + help : string + metavar : string + """ + + # The list of instance attributes that may be set through + # keyword args to the constructor. + ATTRS = ['action', + 'type', + 'dest', + 'default', + 'nargs', + 'const', + 'choices', + 'callback', + 'callback_args', + 'callback_kwargs', + 'help', + 'metavar'] + + # The set of actions allowed by option parsers. Explicitly listed + # here so the constructor can validate its arguments. + ACTIONS = ("store", + "store_const", + "store_true", + "store_false", + "append", + "append_const", + "count", + "callback", + "help", + "version") + + # The set of actions that involve storing a value somewhere; + # also listed just for constructor argument validation. (If + # the action is one of these, there must be a destination.) + STORE_ACTIONS = ("store", + "store_const", + "store_true", + "store_false", + "append", + "append_const", + "count") + + # The set of actions for which it makes sense to supply a value + # type, ie. which may consume an argument from the command line. + TYPED_ACTIONS = ("store", + "append", + "callback") + + # The set of actions which *require* a value type, ie. that + # always consume an argument from the command line. + ALWAYS_TYPED_ACTIONS = ("store", + "append") + + # The set of actions which take a 'const' attribute. + CONST_ACTIONS = ("store_const", + "append_const") + + # The set of known types for option parsers. Again, listed here for + # constructor argument validation. + TYPES = ("string", "int", "long", "float", "complex", "choice") + + # Dictionary of argument checking functions, which convert and + # validate option arguments according to the option type. + # + # Signature of checking functions is: + # check(option : Option, opt : string, value : string) -> any + # where + # option is the Option instance calling the checker + # opt is the actual option seen on the command-line + # (eg. "-a", "--file") + # value is the option argument seen on the command-line + # + # The return value should be in the appropriate Python type + # for option.type -- eg. an integer if option.type == "int". + # + # If no checker is defined for a type, arguments will be + # unchecked and remain strings. + TYPE_CHECKER = { "int" : check_builtin, + "long" : check_builtin, + "float" : check_builtin, + "complex": check_builtin, + "choice" : check_choice, + } + + + # CHECK_METHODS is a list of unbound method objects; they are called + # by the constructor, in order, after all attributes are + # initialized. The list is created and filled in later, after all + # the methods are actually defined. (I just put it here because I + # like to define and document all class attributes in the same + # place.) Subclasses that add another _check_*() method should + # define their own CHECK_METHODS list that adds their check method + # to those from this class. + CHECK_METHODS = None + + + # -- Constructor/initialization methods ---------------------------- + + def __init__(self, *opts, **attrs): + # Set _short_opts, _long_opts attrs from 'opts' tuple. + # Have to be set now, in case no option strings are supplied. + self._short_opts = [] + self._long_opts = [] + opts = self._check_opt_strings(opts) + self._set_opt_strings(opts) + + # Set all other attrs (action, type, etc.) from 'attrs' dict + self._set_attrs(attrs) + + # Check all the attributes we just set. There are lots of + # complicated interdependencies, but luckily they can be farmed + # out to the _check_*() methods listed in CHECK_METHODS -- which + # could be handy for subclasses! The one thing these all share + # is that they raise OptionError if they discover a problem. + for checker in self.CHECK_METHODS: + checker(self) + + def _check_opt_strings(self, opts): + # Filter out None because early versions of Optik had exactly + # one short option and one long option, either of which + # could be None. + opts = filter(None, opts) + if not opts: + raise TypeError("at least one option string must be supplied") + return opts + + def _set_opt_strings(self, opts): + for opt in opts: + if len(opt) < 2: + raise OptionError( + "invalid option string %r: " + "must be at least two characters long" % opt, self) + elif len(opt) == 2: + if not (opt[0] == "-" and opt[1] != "-"): + raise OptionError( + "invalid short option string %r: " + "must be of the form -x, (x any non-dash char)" % opt, + self) + self._short_opts.append(opt) + else: + if not (opt[0:2] == "--" and opt[2] != "-"): + raise OptionError( + "invalid long option string %r: " + "must start with --, followed by non-dash" % opt, + self) + self._long_opts.append(opt) + + def _set_attrs(self, attrs): + for attr in self.ATTRS: + if attrs.has_key(attr): + setattr(self, attr, attrs[attr]) + del attrs[attr] + else: + if attr == 'default': + setattr(self, attr, NO_DEFAULT) + else: + setattr(self, attr, None) + if attrs: + attrs = attrs.keys() + attrs.sort() + raise OptionError( + "invalid keyword arguments: %s" % string.join(attrs, ", "), + self) + + + # -- Constructor validation methods -------------------------------- + + def _check_action(self): + if self.action is None: + self.action = "store" + elif self.action not in self.ACTIONS: + raise OptionError("invalid action: %r" % self.action, self) + + def _check_type(self): + if self.type is None: + if self.action in self.ALWAYS_TYPED_ACTIONS: + if self.choices is not None: + # The "choices" attribute implies "choice" type. + self.type = "choice" + else: + # No type given? "string" is the most sensible default. + self.type = "string" + else: + # Allow type objects or builtin type conversion functions + # (int, str, etc.) as an alternative to their names. (The + # complicated check of __builtin__ is only necessary for + # Python 2.1 and earlier, and is short-circuited by the + # first check on modern Pythons.) + import __builtin__ + if ( type(self.type) is types.TypeType or + (hasattr(self.type, "__name__") and + getattr(__builtin__, self.type.__name__, None) is self.type) ): + self.type = self.type.__name__ + + if self.type == "str": + self.type = "string" + + if self.type not in self.TYPES: + raise OptionError("invalid option type: %r" % self.type, self) + if self.action not in self.TYPED_ACTIONS: + raise OptionError( + "must not supply a type for action %r" % self.action, self) + + def _check_choice(self): + if self.type == "choice": + if self.choices is None: + raise OptionError( + "must supply a list of choices for type 'choice'", self) + elif type(self.choices) not in (types.TupleType, types.ListType): + raise OptionError( + "choices must be a list of strings ('%s' supplied)" + % string.split(str(type(self.choices)), "'")[1], self) + elif self.choices is not None: + raise OptionError( + "must not supply choices for type %r" % self.type, self) + + def _check_dest(self): + # No destination given, and we need one for this action. The + # self.type check is for callbacks that take a value. + takes_value = (self.action in self.STORE_ACTIONS or + self.type is not None) + if self.dest is None and takes_value: + + # Glean a destination from the first long option string, + # or from the first short option string if no long options. + if self._long_opts: + # eg. "--foo-bar" -> "foo_bar" + self.dest = string.replace(self._long_opts[0][2:], '-', '_') + else: + self.dest = self._short_opts[0][1] + + def _check_const(self): + if self.action not in self.CONST_ACTIONS and self.const is not None: + raise OptionError( + "'const' must not be supplied for action %r" % self.action, + self) + + def _check_nargs(self): + if self.action in self.TYPED_ACTIONS: + if self.nargs is None: + self.nargs = 1 + elif self.nargs is not None: + raise OptionError( + "'nargs' must not be supplied for action %r" % self.action, + self) + + def _check_callback(self): + if self.action == "callback": + if not callable(self.callback): + raise OptionError( + "callback not callable: %r" % self.callback, self) + if (self.callback_args is not None and + type(self.callback_args) is not types.TupleType): + raise OptionError( + "callback_args, if supplied, must be a tuple: not %r" + % self.callback_args, self) + if (self.callback_kwargs is not None and + type(self.callback_kwargs) is not types.DictType): + raise OptionError( + "callback_kwargs, if supplied, must be a dict: not %r" + % self.callback_kwargs, self) + else: + if self.callback is not None: + raise OptionError( + "callback supplied (%r) for non-callback option" + % self.callback, self) + if self.callback_args is not None: + raise OptionError( + "callback_args supplied for non-callback option", self) + if self.callback_kwargs is not None: + raise OptionError( + "callback_kwargs supplied for non-callback option", self) + + + CHECK_METHODS = [_check_action, + _check_type, + _check_choice, + _check_dest, + _check_const, + _check_nargs, + _check_callback] + + + # -- Miscellaneous methods ----------------------------------------- + + def __str__(self): + return string.join(self._short_opts + self._long_opts, "/") + + __repr__ = _repr + + def takes_value(self): + return self.type is not None + + def get_opt_string(self): + if self._long_opts: + return self._long_opts[0] + else: + return self._short_opts[0] + + + # -- Processing methods -------------------------------------------- + + def check_value(self, opt, value): + checker = self.TYPE_CHECKER.get(self.type) + if checker is None: + return value + else: + return checker(self, opt, value) + + def convert_value(self, opt, value): + if value is not None: + if self.nargs == 1: + return self.check_value(opt, value) + else: + return tuple(map(lambda v, o=opt, s=self: s.check_value(o, v), value)) + + def process(self, opt, value, values, parser): + + # First, convert the value(s) to the right type. Howl if any + # value(s) are bogus. + value = self.convert_value(opt, value) + + # And then take whatever action is expected of us. + # This is a separate method to make life easier for + # subclasses to add new actions. + return self.take_action( + self.action, self.dest, opt, value, values, parser) + + def take_action(self, action, dest, opt, value, values, parser): + if action == "store": + setattr(values, dest, value) + elif action == "store_const": + setattr(values, dest, self.const) + elif action == "store_true": + setattr(values, dest, True) + elif action == "store_false": + setattr(values, dest, False) + elif action == "append": + values.ensure_value(dest, []).append(value) + elif action == "append_const": + values.ensure_value(dest, []).append(self.const) + elif action == "count": + setattr(values, dest, values.ensure_value(dest, 0) + 1) + elif action == "callback": + args = self.callback_args or () + kwargs = self.callback_kwargs or {} + apply(self.callback, (self, opt, value, parser,) + args, kwargs) + elif action == "help": + parser.print_help() + parser.exit() + elif action == "version": + parser.print_version() + parser.exit() + else: + raise RuntimeError, "unknown action %r" % self.action + + return 1 + +# class Option + + +SUPPRESS_HELP = "SUPPRESS"+"HELP" +SUPPRESS_USAGE = "SUPPRESS"+"USAGE" + +# For compatibility with Python 2.2 +try: + True, False +except NameError: + (True, False) = (1, 0) + +try: + types.UnicodeType +except AttributeError: + def isbasestring(x): + return isinstance(x, types.StringType) +else: + def isbasestring(x): + return isinstance(x, types.StringType) or isinstance(x, types.UnicodeType) + +class Values: + + def __init__(self, defaults=None): + if defaults: + for (attr, val) in defaults.items(): + setattr(self, attr, val) + + def __str__(self): + return str(self.__dict__) + + __repr__ = _repr + + def __cmp__(self, other): + if isinstance(other, Values): + return cmp(self.__dict__, other.__dict__) + elif isinstance(other, types.DictType): + return cmp(self.__dict__, other) + else: + return -1 + + def _update_careful(self, dict): + """ + Update the option values from an arbitrary dictionary, but only + use keys from dict that already have a corresponding attribute + in self. Any keys in dict without a corresponding attribute + are silently ignored. + """ + for attr in dir(self): + if dict.has_key(attr): + dval = dict[attr] + if dval is not None: + setattr(self, attr, dval) + + def _update_loose(self, dict): + """ + Update the option values from an arbitrary dictionary, + using all keys from the dictionary regardless of whether + they have a corresponding attribute in self or not. + """ + self.__dict__.update(dict) + + def _update(self, dict, mode): + if mode == "careful": + self._update_careful(dict) + elif mode == "loose": + self._update_loose(dict) + else: + raise ValueError, "invalid update mode: %r" % mode + + def read_module(self, modname, mode="careful"): + __import__(modname) + mod = sys.modules[modname] + self._update(vars(mod), mode) + + def read_file(self, filename, mode="careful"): + vars = {} + exec open(filename, 'rU').read() in vars + self._update(vars, mode) + + def ensure_value(self, attr, value): + if not hasattr(self, attr) or getattr(self, attr) is None: + setattr(self, attr, value) + return getattr(self, attr) + + +class OptionContainer: + + """ + Abstract base class. + + Class attributes: + standard_option_list : [Option] + list of standard options that will be accepted by all instances + of this parser class (intended to be overridden by subclasses). + + Instance attributes: + option_list : [Option] + the list of Option objects contained by this OptionContainer + _short_opt : { string : Option } + dictionary mapping short option strings, eg. "-f" or "-X", + to the Option instances that implement them. If an Option + has multiple short option strings, it will appears in this + dictionary multiple times. [1] + _long_opt : { string : Option } + dictionary mapping long option strings, eg. "--file" or + "--exclude", to the Option instances that implement them. + Again, a given Option can occur multiple times in this + dictionary. [1] + defaults : { string : any } + dictionary mapping option destination names to default + values for each destination [1] + + [1] These mappings are common to (shared by) all components of the + controlling OptionParser, where they are initially created. + + """ + + def __init__(self, option_class, conflict_handler, description): + # Initialize the option list and related data structures. + # This method must be provided by subclasses, and it must + # initialize at least the following instance attributes: + # option_list, _short_opt, _long_opt, defaults. + self._create_option_list() + + self.option_class = option_class + self.set_conflict_handler(conflict_handler) + self.set_description(description) + + def _create_option_mappings(self): + # For use by OptionParser constructor -- create the master + # option mappings used by this OptionParser and all + # OptionGroups that it owns. + self._short_opt = {} # single letter -> Option instance + self._long_opt = {} # long option -> Option instance + self.defaults = {} # maps option dest -> default value + + + def _share_option_mappings(self, parser): + # For use by OptionGroup constructor -- use shared option + # mappings from the OptionParser that owns this OptionGroup. + self._short_opt = parser._short_opt + self._long_opt = parser._long_opt + self.defaults = parser.defaults + + def set_conflict_handler(self, handler): + if handler not in ("error", "resolve"): + raise ValueError, "invalid conflict_resolution value %r" % handler + self.conflict_handler = handler + + def set_description(self, description): + self.description = description + + def get_description(self): + return self.description + + + def destroy(self): + """see OptionParser.destroy().""" + del self._short_opt + del self._long_opt + del self.defaults + + + # -- Option-adding methods ----------------------------------------- + + def _check_conflict(self, option): + conflict_opts = [] + for opt in option._short_opts: + if self._short_opt.has_key(opt): + conflict_opts.append((opt, self._short_opt[opt])) + for opt in option._long_opts: + if self._long_opt.has_key(opt): + conflict_opts.append((opt, self._long_opt[opt])) + + if conflict_opts: + handler = self.conflict_handler + if handler == "error": + raise OptionConflictError( + "conflicting option string(s): %s" + % string.join(map(lambda co: co[0], conflict_opts), ", "), + option) + elif handler == "resolve": + for (opt, c_option) in conflict_opts: + if opt[:2] == "--": + c_option._long_opts.remove(opt) + del self._long_opt[opt] + else: + c_option._short_opts.remove(opt) + del self._short_opt[opt] + if not (c_option._short_opts or c_option._long_opts): + c_option.container.option_list.remove(c_option) + + def add_option(self, *args, **kwargs): + """add_option(Option) + add_option(opt_str, ..., kwarg=val, ...) + """ + if type(args[0]) is types.StringType: + option = apply(self.option_class, args, kwargs) + elif len(args) == 1 and not kwargs: + option = args[0] + if not isinstance(option, Option): + raise TypeError, "not an Option instance: %r" % option + else: + raise TypeError, "invalid arguments" + + self._check_conflict(option) + + self.option_list.append(option) + option.container = self + for opt in option._short_opts: + self._short_opt[opt] = option + for opt in option._long_opts: + self._long_opt[opt] = option + + if option.dest is not None: # option has a dest, we need a default + if option.default is not NO_DEFAULT: + self.defaults[option.dest] = option.default + elif not self.defaults.has_key(option.dest): + self.defaults[option.dest] = None + + return option + + def add_options(self, option_list): + for option in option_list: + self.add_option(option) + + # -- Option query/removal methods ---------------------------------- + + def get_option(self, opt_str): + return (self._short_opt.get(opt_str) or + self._long_opt.get(opt_str)) + + def has_option(self, opt_str): + return (self._short_opt.has_key(opt_str) or + self._long_opt.has_key(opt_str)) + + def remove_option(self, opt_str): + option = self._short_opt.get(opt_str) + if option is None: + option = self._long_opt.get(opt_str) + if option is None: + raise ValueError("no such option %r" % opt_str) + + for opt in option._short_opts: + del self._short_opt[opt] + for opt in option._long_opts: + del self._long_opt[opt] + option.container.option_list.remove(option) + + + # -- Help-formatting methods --------------------------------------- + + def format_option_help(self, formatter): + if not self.option_list: + return "" + result = [] + for option in self.option_list: + if not option.help is SUPPRESS_HELP: + result.append(formatter.format_option(option)) + return string.join(result, "") + + def format_description(self, formatter): + return formatter.format_description(self.get_description()) + + def format_help(self, formatter): + result = [] + if self.description: + result.append(self.format_description(formatter)) + if self.option_list: + result.append(self.format_option_help(formatter)) + return string.join(result, "\n") + + +class OptionGroup (OptionContainer): + + def __init__(self, parser, title, description=None): + self.parser = parser + OptionContainer.__init__( + self, parser.option_class, parser.conflict_handler, description) + self.title = title + + def _create_option_list(self): + self.option_list = [] + self._share_option_mappings(self.parser) + + def set_title(self, title): + self.title = title + + def destroy(self): + """see OptionParser.destroy().""" + OptionContainer.destroy(self) + del self.option_list + + # -- Help-formatting methods --------------------------------------- + + def format_help(self, formatter): + result = formatter.format_heading(self.title) + formatter.indent() + result = result + OptionContainer.format_help(self, formatter) + formatter.dedent() + return result + + +class OptionParser (OptionContainer): + + """ + Class attributes: + standard_option_list : [Option] + list of standard options that will be accepted by all instances + of this parser class (intended to be overridden by subclasses). + + Instance attributes: + usage : string + a usage string for your program. Before it is displayed + to the user, "%prog" will be expanded to the name of + your program (self.prog or os.path.basename(sys.argv[0])). + prog : string + the name of the current program (to override + os.path.basename(sys.argv[0])). + epilog : string + paragraph of help text to print after option help + + option_groups : [OptionGroup] + list of option groups in this parser (option groups are + irrelevant for parsing the command-line, but very useful + for generating help) + + allow_interspersed_args : bool = true + if true, positional arguments may be interspersed with options. + Assuming -a and -b each take a single argument, the command-line + -ablah foo bar -bboo baz + will be interpreted the same as + -ablah -bboo -- foo bar baz + If this flag were false, that command line would be interpreted as + -ablah -- foo bar -bboo baz + -- ie. we stop processing options as soon as we see the first + non-option argument. (This is the tradition followed by + Python's getopt module, Perl's Getopt::Std, and other argument- + parsing libraries, but it is generally annoying to users.) + + process_default_values : bool = true + if true, option default values are processed similarly to option + values from the command line: that is, they are passed to the + type-checking function for the option's type (as long as the + default value is a string). (This really only matters if you + have defined custom types; see SF bug #955889.) Set it to false + to restore the behaviour of Optik 1.4.1 and earlier. + + rargs : [string] + the argument list currently being parsed. Only set when + parse_args() is active, and continually trimmed down as + we consume arguments. Mainly there for the benefit of + callback options. + largs : [string] + the list of leftover arguments that we have skipped while + parsing options. If allow_interspersed_args is false, this + list is always empty. + values : Values + the set of option values currently being accumulated. Only + set when parse_args() is active. Also mainly for callbacks. + + Because of the 'rargs', 'largs', and 'values' attributes, + OptionParser is not thread-safe. If, for some perverse reason, you + need to parse command-line arguments simultaneously in different + threads, use different OptionParser instances. + + """ + + standard_option_list = [] + + def __init__(self, + usage=None, + option_list=None, + option_class=Option, + version=None, + conflict_handler="error", + description=None, + formatter=None, + add_help_option=True, + prog=None, + epilog=None): + OptionContainer.__init__( + self, option_class, conflict_handler, description) + self.set_usage(usage) + self.prog = prog + self.version = version + self.allow_interspersed_args = True + self.process_default_values = True + if formatter is None: + formatter = IndentedHelpFormatter() + self.formatter = formatter + self.formatter.set_parser(self) + self.epilog = epilog + + # Populate the option list; initial sources are the + # standard_option_list class attribute, the 'option_list' + # argument, and (if applicable) the _add_version_option() and + # _add_help_option() methods. + self._populate_option_list(option_list, + add_help=add_help_option) + + self._init_parsing_state() + + + def destroy(self): + """ + Declare that you are done with this OptionParser. This cleans up + reference cycles so the OptionParser (and all objects referenced by + it) can be garbage-collected promptly. After calling destroy(), the + OptionParser is unusable. + """ + OptionContainer.destroy(self) + for group in self.option_groups: + group.destroy() + del self.option_list + del self.option_groups + del self.formatter + + + # -- Private methods ----------------------------------------------- + # (used by our or OptionContainer's constructor) + + def _create_option_list(self): + self.option_list = [] + self.option_groups = [] + self._create_option_mappings() + + def _add_help_option(self): + self.add_option("-h", "--help", + action="help", + help=_("show this help message and exit")) + + def _add_version_option(self): + self.add_option("--version", + action="version", + help=_("show program's version number and exit")) + + def _populate_option_list(self, option_list, add_help=True): + if self.standard_option_list: + self.add_options(self.standard_option_list) + if option_list: + self.add_options(option_list) + if self.version: + self._add_version_option() + if add_help: + self._add_help_option() + + def _init_parsing_state(self): + # These are set in parse_args() for the convenience of callbacks. + self.rargs = None + self.largs = None + self.values = None + + + # -- Simple modifier methods --------------------------------------- + + def set_usage(self, usage): + if usage is None: + self.usage = _("%prog [options]") + elif usage is SUPPRESS_USAGE: + self.usage = None + # For backwards compatibility with Optik 1.3 and earlier. + elif string.lower(usage)[:7] == "usage: ": + self.usage = usage[7:] + else: + self.usage = usage + + def enable_interspersed_args(self): + self.allow_interspersed_args = True + + def disable_interspersed_args(self): + self.allow_interspersed_args = False + + def set_process_default_values(self, process): + self.process_default_values = process + + def set_default(self, dest, value): + self.defaults[dest] = value + + def set_defaults(self, **kwargs): + self.defaults.update(kwargs) + + def _get_all_options(self): + options = self.option_list[:] + for group in self.option_groups: + options.extend(group.option_list) + return options + + def get_default_values(self): + if not self.process_default_values: + # Old, pre-Optik 1.5 behaviour. + return Values(self.defaults) + + defaults = self.defaults.copy() + for option in self._get_all_options(): + default = defaults.get(option.dest) + if isbasestring(default): + opt_str = option.get_opt_string() + defaults[option.dest] = option.check_value(opt_str, default) + + return Values(defaults) + + + # -- OptionGroup methods ------------------------------------------- + + def add_option_group(self, *args, **kwargs): + # XXX lots of overlap with OptionContainer.add_option() + if type(args[0]) is types.StringType: + group = apply(OptionGroup, (self,) + args, kwargs) + elif len(args) == 1 and not kwargs: + group = args[0] + if not isinstance(group, OptionGroup): + raise TypeError, "not an OptionGroup instance: %r" % group + if group.parser is not self: + raise ValueError, "invalid OptionGroup (wrong parser)" + else: + raise TypeError, "invalid arguments" + + self.option_groups.append(group) + return group + + def get_option_group(self, opt_str): + option = (self._short_opt.get(opt_str) or + self._long_opt.get(opt_str)) + if option and option.container is not self: + return option.container + return None + + + # -- Option-parsing methods ---------------------------------------- + + def _get_args(self, args): + if args is None: + return sys.argv[1:] + else: + return args[:] # don't modify caller's list + + def parse_args(self, args=None, values=None): + """ + parse_args(args : [string] = sys.argv[1:], + values : Values = None) + -> (values : Values, args : [string]) + + Parse the command-line options found in 'args' (default: + sys.argv[1:]). Any errors result in a call to 'error()', which + by default prints the usage message to stderr and calls + sys.exit() with an error message. On success returns a pair + (values, args) where 'values' is an Values instance (with all + your option values) and 'args' is the list of arguments left + over after parsing options. + """ + rargs = self._get_args(args) + if values is None: + values = self.get_default_values() + + # Store the halves of the argument list as attributes for the + # convenience of callbacks: + # rargs + # the rest of the command-line (the "r" stands for + # "remaining" or "right-hand") + # largs + # the leftover arguments -- ie. what's left after removing + # options and their arguments (the "l" stands for "leftover" + # or "left-hand") + self.rargs = rargs + self.largs = largs = [] + self.values = values + + try: + stop = self._process_args(largs, rargs, values) + except (BadOptionError, OptionValueError), err: + self.error(str(err)) + + args = largs + rargs + return self.check_values(values, args) + + def check_values(self, values, args): + """ + check_values(values : Values, args : [string]) + -> (values : Values, args : [string]) + + Check that the supplied option values and leftover arguments are + valid. Returns the option values and leftover arguments + (possibly adjusted, possibly completely new -- whatever you + like). Default implementation just returns the passed-in + values; subclasses may override as desired. + """ + return (values, args) + + def _process_args(self, largs, rargs, values): + """_process_args(largs : [string], + rargs : [string], + values : Values) + + Process command-line arguments and populate 'values', consuming + options and arguments from 'rargs'. If 'allow_interspersed_args' is + false, stop at the first non-option argument. If true, accumulate any + interspersed non-option arguments in 'largs'. + """ + while rargs: + arg = rargs[0] + # We handle bare "--" explicitly, and bare "-" is handled by the + # standard arg handler since the short arg case ensures that the + # len of the opt string is greater than 1. + if arg == "--": + del rargs[0] + return + elif arg[0:2] == "--": + # process a single long option (possibly with value(s)) + self._process_long_opt(rargs, values) + elif arg[:1] == "-" and len(arg) > 1: + # process a cluster of short options (possibly with + # value(s) for the last one only) + self._process_short_opts(rargs, values) + elif self.allow_interspersed_args: + largs.append(arg) + del rargs[0] + else: + return # stop now, leave this arg in rargs + + # Say this is the original argument list: + # [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)] + # ^ + # (we are about to process arg(i)). + # + # Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of + # [arg0, ..., arg(i-1)] (any options and their arguments will have + # been removed from largs). + # + # The while loop will usually consume 1 or more arguments per pass. + # If it consumes 1 (eg. arg is an option that takes no arguments), + # then after _process_arg() is done the situation is: + # + # largs = subset of [arg0, ..., arg(i)] + # rargs = [arg(i+1), ..., arg(N-1)] + # + # If allow_interspersed_args is false, largs will always be + # *empty* -- still a subset of [arg0, ..., arg(i-1)], but + # not a very interesting subset! + + def _match_long_opt(self, opt): + """_match_long_opt(opt : string) -> string + + Determine which long option string 'opt' matches, ie. which one + it is an unambiguous abbrevation for. Raises BadOptionError if + 'opt' doesn't unambiguously match any long option string. + """ + return _match_abbrev(opt, self._long_opt) + + def _process_long_opt(self, rargs, values): + arg = rargs.pop(0) + + # Value explicitly attached to arg? Pretend it's the next + # argument. + if "=" in arg: + (opt, next_arg) = string.split(arg, "=", 1) + rargs.insert(0, next_arg) + had_explicit_value = True + else: + opt = arg + had_explicit_value = False + + opt = self._match_long_opt(opt) + option = self._long_opt[opt] + if option.takes_value(): + nargs = option.nargs + if len(rargs) < nargs: + if nargs == 1: + self.error(_("%s option requires an argument") % opt) + else: + self.error(_("%s option requires %d arguments") + % (opt, nargs)) + elif nargs == 1: + value = rargs.pop(0) + else: + value = tuple(rargs[0:nargs]) + del rargs[0:nargs] + + elif had_explicit_value: + self.error(_("%s option does not take a value") % opt) + + else: + value = None + + option.process(opt, value, values, self) + + def _process_short_opts(self, rargs, values): + arg = rargs.pop(0) + stop = False + i = 1 + for ch in arg[1:]: + opt = "-" + ch + option = self._short_opt.get(opt) + i = i + 1 # we have consumed a character + + if not option: + raise BadOptionError(opt) + if option.takes_value(): + # Any characters left in arg? Pretend they're the + # next arg, and stop consuming characters of arg. + if i < len(arg): + rargs.insert(0, arg[i:]) + stop = True + + nargs = option.nargs + if len(rargs) < nargs: + if nargs == 1: + self.error(_("%s option requires an argument") % opt) + else: + self.error(_("%s option requires %d arguments") + % (opt, nargs)) + elif nargs == 1: + value = rargs.pop(0) + else: + value = tuple(rargs[0:nargs]) + del rargs[0:nargs] + + else: # option doesn't take a value + value = None + + option.process(opt, value, values, self) + + if stop: + break + + + # -- Feedback methods ---------------------------------------------- + + def get_prog_name(self): + if self.prog is None: + return os.path.basename(sys.argv[0]) + else: + return self.prog + + def expand_prog_name(self, s): + return string.replace(s, "%prog", self.get_prog_name()) + + def get_description(self): + return self.expand_prog_name(self.description) + + def exit(self, status=0, msg=None): + if msg: + sys.stderr.write(msg) + sys.exit(status) + + def error(self, msg): + """error(msg : string) + + Print a usage message incorporating 'msg' to stderr and exit. + If you override this in a subclass, it should not return -- it + should either exit or raise an exception. + """ + self.print_usage(sys.stderr) + self.exit(2, "%s: error: %s\n" % (self.get_prog_name(), msg)) + + def get_usage(self): + if self.usage: + return self.formatter.format_usage( + self.expand_prog_name(self.usage)) + else: + return "" + + def print_usage(self, file=None): + """print_usage(file : file = stdout) + + Print the usage message for the current program (self.usage) to + 'file' (default stdout). Any occurence of the string "%prog" in + self.usage is replaced with the name of the current program + (basename of sys.argv[0]). Does nothing if self.usage is empty + or not defined. + """ + if self.usage: + file.write(self.get_usage() + '\n') + + def get_version(self): + if self.version: + return self.expand_prog_name(self.version) + else: + return "" + + def print_version(self, file=None): + """print_version(file : file = stdout) + + Print the version message for this program (self.version) to + 'file' (default stdout). As with print_usage(), any occurence + of "%prog" in self.version is replaced by the current program's + name. Does nothing if self.version is empty or undefined. + """ + if self.version: + file.write(self.get_version() + '\n') + + def format_option_help(self, formatter=None): + if formatter is None: + formatter = self.formatter + formatter.store_option_strings(self) + result = [] + result.append(formatter.format_heading(_("Options"))) + formatter.indent() + if self.option_list: + result.append(OptionContainer.format_option_help(self, formatter)) + result.append("\n") + for group in self.option_groups: + result.append(group.format_help(formatter)) + result.append("\n") + formatter.dedent() + # Drop the last "\n", or the header if no options or option groups: + return string.join(result[:-1], "") + + def format_epilog(self, formatter): + return formatter.format_epilog(self.epilog) + + def format_help(self, formatter=None): + if formatter is None: + formatter = self.formatter + result = [] + if self.usage: + result.append(self.get_usage() + "\n") + if self.description: + result.append(self.format_description(formatter) + "\n") + result.append(self.format_option_help(formatter)) + result.append(self.format_epilog(formatter)) + return string.join(result, "") + + # used by test suite + def _get_encoding(self, file): + encoding = getattr(file, "encoding", None) + if not encoding: + encoding = sys.getdefaultencoding() + return encoding + + def print_help(self, file=None): + """print_help(file : file = stdout) + + Print an extended help message, listing all options and any + help text provided with them, to 'file' (default stdout). + """ + if file is None: + file = sys.stdout + encoding = self._get_encoding(file) + file.write(encode_wrapper(self.format_help(), encoding, "replace")) + +# class OptionParser + + +def _match_abbrev(s, wordmap): + """_match_abbrev(s : string, wordmap : {string : Option}) -> string + + Return the string key in 'wordmap' for which 's' is an unambiguous + abbreviation. If 's' is found to be ambiguous or doesn't match any of + 'words', raise BadOptionError. + """ + # Is there an exact match? + if wordmap.has_key(s): + return s + else: + # Isolate all words with s as a prefix. + possibilities = filter(lambda w, s=s: w[:len(s)] == s, wordmap.keys()) + # No exact match, so there had better be just one possibility. + if len(possibilities) == 1: + return possibilities[0] + elif not possibilities: + raise BadOptionError(s) + else: + # More than one possible completion: ambiguous prefix. + possibilities.sort() + raise AmbiguousOptionError(s, possibilities) + + +# Some day, there might be many Option classes. As of Optik 1.3, the +# preferred way to instantiate Options is indirectly, via make_option(), +# which will become a factory function when there are many Option +# classes. +make_option = Option + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/compat/_scons_sets.py b/engine/SCons/compat/_scons_sets.py new file mode 100644 index 0000000..12dbead --- /dev/null +++ b/engine/SCons/compat/_scons_sets.py @@ -0,0 +1,583 @@ +"""Classes to represent arbitrary sets (including sets of sets). + +This module implements sets using dictionaries whose values are +ignored. The usual operations (union, intersection, deletion, etc.) +are provided as both methods and operators. + +Important: sets are not sequences! While they support 'x in s', +'len(s)', and 'for x in s', none of those operations are unique for +sequences; for example, mappings support all three as well. The +characteristic operation for sequences is subscripting with small +integers: s[i], for i in range(len(s)). Sets don't support +subscripting at all. Also, sequences allow multiple occurrences and +their elements have a definite order; sets on the other hand don't +record multiple occurrences and don't remember the order of element +insertion (which is why they don't support s[i]). + +The following classes are provided: + +BaseSet -- All the operations common to both mutable and immutable + sets. This is an abstract class, not meant to be directly + instantiated. + +Set -- Mutable sets, subclass of BaseSet; not hashable. + +ImmutableSet -- Immutable sets, subclass of BaseSet; hashable. + An iterable argument is mandatory to create an ImmutableSet. + +_TemporarilyImmutableSet -- A wrapper around a Set, hashable, + giving the same hash value as the immutable set equivalent + would have. Do not use this class directly. + +Only hashable objects can be added to a Set. In particular, you cannot +really add a Set as an element to another Set; if you try, what is +actually added is an ImmutableSet built from it (it compares equal to +the one you tried adding). + +When you ask if `x in y' where x is a Set and y is a Set or +ImmutableSet, x is wrapped into a _TemporarilyImmutableSet z, and +what's tested is actually `z in y'. + +""" + +# Code history: +# +# - Greg V. Wilson wrote the first version, using a different approach +# to the mutable/immutable problem, and inheriting from dict. +# +# - Alex Martelli modified Greg's version to implement the current +# Set/ImmutableSet approach, and make the data an attribute. +# +# - Guido van Rossum rewrote much of the code, made some API changes, +# and cleaned up the docstrings. +# +# - Raymond Hettinger added a number of speedups and other +# improvements. + +from __future__ import generators +try: + from itertools import ifilter, ifilterfalse +except ImportError: + # Code to make the module run under Py2.2 + def ifilter(predicate, iterable): + if predicate is None: + def predicate(x): + return x + for x in iterable: + if predicate(x): + yield x + def ifilterfalse(predicate, iterable): + if predicate is None: + def predicate(x): + return x + for x in iterable: + if not predicate(x): + yield x + try: + True, False + except NameError: + True, False = (0==0, 0!=0) + +__all__ = ['BaseSet', 'Set', 'ImmutableSet'] + +class BaseSet(object): + """Common base class for mutable and immutable sets.""" + + __slots__ = ['_data'] + + # Constructor + + def __init__(self): + """This is an abstract class.""" + # Don't call this from a concrete subclass! + if self.__class__ is BaseSet: + raise TypeError, ("BaseSet is an abstract class. " + "Use Set or ImmutableSet.") + + # Standard protocols: __len__, __repr__, __str__, __iter__ + + def __len__(self): + """Return the number of elements of a set.""" + return len(self._data) + + def __repr__(self): + """Return string representation of a set. + + This looks like 'Set([<list of elements>])'. + """ + return self._repr() + + # __str__ is the same as __repr__ + __str__ = __repr__ + + def _repr(self, sorted=False): + elements = self._data.keys() + if sorted: + elements.sort() + return '%s(%r)' % (self.__class__.__name__, elements) + + def __iter__(self): + """Return an iterator over the elements or a set. + + This is the keys iterator for the underlying dict. + """ + return self._data.iterkeys() + + # Three-way comparison is not supported. However, because __eq__ is + # tried before __cmp__, if Set x == Set y, x.__eq__(y) returns True and + # then cmp(x, y) returns 0 (Python doesn't actually call __cmp__ in this + # case). + + def __cmp__(self, other): + raise TypeError, "can't compare sets using cmp()" + + # Equality comparisons using the underlying dicts. Mixed-type comparisons + # are allowed here, where Set == z for non-Set z always returns False, + # and Set != z always True. This allows expressions like "x in y" to + # give the expected result when y is a sequence of mixed types, not + # raising a pointless TypeError just because y contains a Set, or x is + # a Set and y contain's a non-set ("in" invokes only __eq__). + # Subtle: it would be nicer if __eq__ and __ne__ could return + # NotImplemented instead of True or False. Then the other comparand + # would get a chance to determine the result, and if the other comparand + # also returned NotImplemented then it would fall back to object address + # comparison (which would always return False for __eq__ and always + # True for __ne__). However, that doesn't work, because this type + # *also* implements __cmp__: if, e.g., __eq__ returns NotImplemented, + # Python tries __cmp__ next, and the __cmp__ here then raises TypeError. + + def __eq__(self, other): + if isinstance(other, BaseSet): + return self._data == other._data + else: + return False + + def __ne__(self, other): + if isinstance(other, BaseSet): + return self._data != other._data + else: + return True + + # Copying operations + + def copy(self): + """Return a shallow copy of a set.""" + result = self.__class__() + result._data.update(self._data) + return result + + __copy__ = copy # For the copy module + + def __deepcopy__(self, memo): + """Return a deep copy of a set; used by copy module.""" + # This pre-creates the result and inserts it in the memo + # early, in case the deep copy recurses into another reference + # to this same set. A set can't be an element of itself, but + # it can certainly contain an object that has a reference to + # itself. + from copy import deepcopy + result = self.__class__() + memo[id(self)] = result + data = result._data + value = True + for elt in self: + data[deepcopy(elt, memo)] = value + return result + + # Standard set operations: union, intersection, both differences. + # Each has an operator version (e.g. __or__, invoked with |) and a + # method version (e.g. union). + # Subtle: Each pair requires distinct code so that the outcome is + # correct when the type of other isn't suitable. For example, if + # we did "union = __or__" instead, then Set().union(3) would return + # NotImplemented instead of raising TypeError (albeit that *why* it + # raises TypeError as-is is also a bit subtle). + + def __or__(self, other): + """Return the union of two sets as a new set. + + (I.e. all elements that are in either set.) + """ + if not isinstance(other, BaseSet): + return NotImplemented + return self.union(other) + + def union(self, other): + """Return the union of two sets as a new set. + + (I.e. all elements that are in either set.) + """ + result = self.__class__(self) + result._update(other) + return result + + def __and__(self, other): + """Return the intersection of two sets as a new set. + + (I.e. all elements that are in both sets.) + """ + if not isinstance(other, BaseSet): + return NotImplemented + return self.intersection(other) + + def intersection(self, other): + """Return the intersection of two sets as a new set. + + (I.e. all elements that are in both sets.) + """ + if not isinstance(other, BaseSet): + other = Set(other) + if len(self) <= len(other): + little, big = self, other + else: + little, big = other, self + common = ifilter(big._data.has_key, little) + return self.__class__(common) + + def __xor__(self, other): + """Return the symmetric difference of two sets as a new set. + + (I.e. all elements that are in exactly one of the sets.) + """ + if not isinstance(other, BaseSet): + return NotImplemented + return self.symmetric_difference(other) + + def symmetric_difference(self, other): + """Return the symmetric difference of two sets as a new set. + + (I.e. all elements that are in exactly one of the sets.) + """ + result = self.__class__() + data = result._data + value = True + selfdata = self._data + try: + otherdata = other._data + except AttributeError: + otherdata = Set(other)._data + for elt in ifilterfalse(otherdata.has_key, selfdata): + data[elt] = value + for elt in ifilterfalse(selfdata.has_key, otherdata): + data[elt] = value + return result + + def __sub__(self, other): + """Return the difference of two sets as a new Set. + + (I.e. all elements that are in this set and not in the other.) + """ + if not isinstance(other, BaseSet): + return NotImplemented + return self.difference(other) + + def difference(self, other): + """Return the difference of two sets as a new Set. + + (I.e. all elements that are in this set and not in the other.) + """ + result = self.__class__() + data = result._data + try: + otherdata = other._data + except AttributeError: + otherdata = Set(other)._data + value = True + for elt in ifilterfalse(otherdata.has_key, self): + data[elt] = value + return result + + # Membership test + + def __contains__(self, element): + """Report whether an element is a member of a set. + + (Called in response to the expression `element in self'.) + """ + try: + return element in self._data + except TypeError: + transform = getattr(element, "__as_temporarily_immutable__", None) + if transform is None: + raise # re-raise the TypeError exception we caught + return transform() in self._data + + # Subset and superset test + + def issubset(self, other): + """Report whether another set contains this set.""" + self._binary_sanity_check(other) + if len(self) > len(other): # Fast check for obvious cases + return False + for elt in ifilterfalse(other._data.has_key, self): + return False + return True + + def issuperset(self, other): + """Report whether this set contains another set.""" + self._binary_sanity_check(other) + if len(self) < len(other): # Fast check for obvious cases + return False + for elt in ifilterfalse(self._data.has_key, other): + return False + return True + + # Inequality comparisons using the is-subset relation. + __le__ = issubset + __ge__ = issuperset + + def __lt__(self, other): + self._binary_sanity_check(other) + return len(self) < len(other) and self.issubset(other) + + def __gt__(self, other): + self._binary_sanity_check(other) + return len(self) > len(other) and self.issuperset(other) + + # Assorted helpers + + def _binary_sanity_check(self, other): + # Check that the other argument to a binary operation is also + # a set, raising a TypeError otherwise. + if not isinstance(other, BaseSet): + raise TypeError, "Binary operation only permitted between sets" + + def _compute_hash(self): + # Calculate hash code for a set by xor'ing the hash codes of + # the elements. This ensures that the hash code does not depend + # on the order in which elements are added to the set. This is + # not called __hash__ because a BaseSet should not be hashable; + # only an ImmutableSet is hashable. + result = 0 + for elt in self: + result ^= hash(elt) + return result + + def _update(self, iterable): + # The main loop for update() and the subclass __init__() methods. + data = self._data + + # Use the fast update() method when a dictionary is available. + if isinstance(iterable, BaseSet): + data.update(iterable._data) + return + + value = True + + if type(iterable) in (list, tuple, xrange): + # Optimized: we know that __iter__() and next() can't + # raise TypeError, so we can move 'try:' out of the loop. + it = iter(iterable) + while True: + try: + for element in it: + data[element] = value + return + except TypeError: + transform = getattr(element, "__as_immutable__", None) + if transform is None: + raise # re-raise the TypeError exception we caught + data[transform()] = value + else: + # Safe: only catch TypeError where intended + for element in iterable: + try: + data[element] = value + except TypeError: + transform = getattr(element, "__as_immutable__", None) + if transform is None: + raise # re-raise the TypeError exception we caught + data[transform()] = value + + +class ImmutableSet(BaseSet): + """Immutable set class.""" + + __slots__ = ['_hashcode'] + + # BaseSet + hashing + + def __init__(self, iterable=None): + """Construct an immutable set from an optional iterable.""" + self._hashcode = None + self._data = {} + if iterable is not None: + self._update(iterable) + + def __hash__(self): + if self._hashcode is None: + self._hashcode = self._compute_hash() + return self._hashcode + + def __getstate__(self): + return self._data, self._hashcode + + def __setstate__(self, state): + self._data, self._hashcode = state + +class Set(BaseSet): + """ Mutable set class.""" + + __slots__ = [] + + # BaseSet + operations requiring mutability; no hashing + + def __init__(self, iterable=None): + """Construct a set from an optional iterable.""" + self._data = {} + if iterable is not None: + self._update(iterable) + + def __getstate__(self): + # getstate's results are ignored if it is not + return self._data, + + def __setstate__(self, data): + self._data, = data + + def __hash__(self): + """A Set cannot be hashed.""" + # We inherit object.__hash__, so we must deny this explicitly + raise TypeError, "Can't hash a Set, only an ImmutableSet." + + # In-place union, intersection, differences. + # Subtle: The xyz_update() functions deliberately return None, + # as do all mutating operations on built-in container types. + # The __xyz__ spellings have to return self, though. + + def __ior__(self, other): + """Update a set with the union of itself and another.""" + self._binary_sanity_check(other) + self._data.update(other._data) + return self + + def union_update(self, other): + """Update a set with the union of itself and another.""" + self._update(other) + + def __iand__(self, other): + """Update a set with the intersection of itself and another.""" + self._binary_sanity_check(other) + self._data = (self & other)._data + return self + + def intersection_update(self, other): + """Update a set with the intersection of itself and another.""" + if isinstance(other, BaseSet): + self &= other + else: + self._data = (self.intersection(other))._data + + def __ixor__(self, other): + """Update a set with the symmetric difference of itself and another.""" + self._binary_sanity_check(other) + self.symmetric_difference_update(other) + return self + + def symmetric_difference_update(self, other): + """Update a set with the symmetric difference of itself and another.""" + data = self._data + value = True + if not isinstance(other, BaseSet): + other = Set(other) + if self is other: + self.clear() + for elt in other: + if elt in data: + del data[elt] + else: + data[elt] = value + + def __isub__(self, other): + """Remove all elements of another set from this set.""" + self._binary_sanity_check(other) + self.difference_update(other) + return self + + def difference_update(self, other): + """Remove all elements of another set from this set.""" + data = self._data + if not isinstance(other, BaseSet): + other = Set(other) + if self is other: + self.clear() + for elt in ifilter(data.has_key, other): + del data[elt] + + # Python dict-like mass mutations: update, clear + + def update(self, iterable): + """Add all values from an iterable (such as a list or file).""" + self._update(iterable) + + def clear(self): + """Remove all elements from this set.""" + self._data.clear() + + # Single-element mutations: add, remove, discard + + def add(self, element): + """Add an element to a set. + + This has no effect if the element is already present. + """ + try: + self._data[element] = True + except TypeError: + transform = getattr(element, "__as_immutable__", None) + if transform is None: + raise # re-raise the TypeError exception we caught + self._data[transform()] = True + + def remove(self, element): + """Remove an element from a set; it must be a member. + + If the element is not a member, raise a KeyError. + """ + try: + del self._data[element] + except TypeError: + transform = getattr(element, "__as_temporarily_immutable__", None) + if transform is None: + raise # re-raise the TypeError exception we caught + del self._data[transform()] + + def discard(self, element): + """Remove an element from a set if it is a member. + + If the element is not a member, do nothing. + """ + try: + self.remove(element) + except KeyError: + pass + + def pop(self): + """Remove and return an arbitrary set element.""" + return self._data.popitem()[0] + + def __as_immutable__(self): + # Return a copy of self as an immutable set + return ImmutableSet(self) + + def __as_temporarily_immutable__(self): + # Return self wrapped in a temporarily immutable set + return _TemporarilyImmutableSet(self) + + +class _TemporarilyImmutableSet(BaseSet): + # Wrap a mutable set as if it was temporarily immutable. + # This only supplies hashing and equality comparisons. + + def __init__(self, set): + self._set = set + self._data = set._data # Needed by ImmutableSet.__eq__() + + def __hash__(self): + return self._set._compute_hash() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/compat/_scons_sets15.py b/engine/SCons/compat/_scons_sets15.py new file mode 100644 index 0000000..bafa009 --- /dev/null +++ b/engine/SCons/compat/_scons_sets15.py @@ -0,0 +1,176 @@ +# +# A Set class that works all the way back to Python 1.5. From: +# +# Python Cookbook: Yet another Set class for Python +# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/106469 +# Goncalo Rodriques +# +# This is a pure Pythonic implementation of a set class. The syntax +# and methods implemented are, for the most part, borrowed from +# PEP 218 by Greg Wilson. +# +# Note that this class violates the formal definition of a set() by adding +# a __getitem__() method so we can iterate over a set's elements under +# Python 1.5 and 2.1, which don't support __iter__() and iterator types. +# + +import string + +class Set: + """The set class. It can contain mutable objects.""" + + def __init__(self, seq = None): + """The constructor. It can take any object giving an iterator as an optional + argument to populate the new set.""" + self.elems = [] + if seq: + for elem in seq: + if elem not in self.elems: + hash(elem) + self.elems.append(elem) + + def __str__(self): + return "set([%s])" % string.join(map(str, self.elems), ", ") + + + def copy(self): + """Shallow copy of a set object.""" + return Set(self.elems) + + def __contains__(self, elem): + return elem in self.elems + + def __len__(self): + return len(self.elems) + + def __getitem__(self, index): + # Added so that Python 1.5 can iterate over the elements. + # The cookbook recipe's author didn't like this because there + # really isn't any order in a set object, but this is necessary + # to make the class work well enough for our purposes. + return self.elems[index] + + def items(self): + """Returns a list of the elements in the set.""" + return self.elems + + def add(self, elem): + """Add one element to the set.""" + if elem not in self.elems: + hash(elem) + self.elems.append(elem) + + def remove(self, elem): + """Remove an element from the set. Return an error if elem is not in the set.""" + try: + self.elems.remove(elem) + except ValueError: + raise LookupError, "Object %s is not a member of the set." % str(elem) + + def discard(self, elem): + """Remove an element from the set. Do nothing if elem is not in the set.""" + try: + self.elems.remove(elem) + except ValueError: + pass + + def sort(self, func=cmp): + self.elems.sort(func) + + #Define an iterator for a set. + def __iter__(self): + return iter(self.elems) + + #The basic binary operations with sets. + def __or__(self, other): + """Union of two sets.""" + ret = self.copy() + for elem in other.elems: + if elem not in ret: + ret.elems.append(elem) + return ret + + def __sub__(self, other): + """Difference of two sets.""" + ret = self.copy() + for elem in other.elems: + ret.discard(elem) + return ret + + def __and__(self, other): + """Intersection of two sets.""" + ret = Set() + for elem in self.elems: + if elem in other.elems: + ret.elems.append(elem) + return ret + + def __add__(self, other): + """Symmetric difference of two sets.""" + ret = Set() + temp = other.copy() + for elem in self.elems: + if elem in temp.elems: + temp.elems.remove(elem) + else: + ret.elems.append(elem) + #Add remaining elements. + for elem in temp.elems: + ret.elems.append(elem) + return ret + + def __mul__(self, other): + """Cartesian product of two sets.""" + ret = Set() + for elemself in self.elems: + x = map(lambda other, s=elemself: (s, other), other.elems) + ret.elems.extend(x) + return ret + + #Some of the binary comparisons. + def __lt__(self, other): + """Returns 1 if the lhs set is contained but not equal to the rhs set.""" + if len(self.elems) < len(other.elems): + temp = other.copy() + for elem in self.elems: + if elem in temp.elems: + temp.remove(elem) + else: + return 0 + return len(temp.elems) == 0 + else: + return 0 + + def __le__(self, other): + """Returns 1 if the lhs set is contained in the rhs set.""" + if len(self.elems) <= len(other.elems): + ret = 1 + for elem in self.elems: + if elem not in other.elems: + ret = 0 + break + return ret + else: + return 0 + + def __eq__(self, other): + """Returns 1 if the sets are equal.""" + if len(self.elems) != len(other.elems): + return 0 + else: + return len(self - other) == 0 + + def __cmp__(self, other): + """Returns 1 if the sets are equal.""" + if self.__lt__(other): + return -1 + elif other.__lt__(self): + return 1 + else: + return 0 + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/compat/_scons_shlex.py b/engine/SCons/compat/_scons_shlex.py new file mode 100644 index 0000000..9e30a01 --- /dev/null +++ b/engine/SCons/compat/_scons_shlex.py @@ -0,0 +1,325 @@ +# -*- coding: iso-8859-1 -*- +"""A lexical analyzer class for simple shell-like syntaxes.""" + +# Module and documentation by Eric S. Raymond, 21 Dec 1998 +# Input stacking and error message cleanup added by ESR, March 2000 +# push_source() and pop_source() made explicit by ESR, January 2001. +# Posix compliance, split(), string arguments, and +# iterator interface by Gustavo Niemeyer, April 2003. + +import os.path +import sys +#from collections import deque + +class deque: + def __init__(self): + self.data = [] + def __len__(self): + return len(self.data) + def appendleft(self, item): + self.data.insert(0, item) + def popleft(self): + return self.data.pop(0) + +try: + basestring +except NameError: + import types + def is_basestring(s): + return type(s) is types.StringType +else: + def is_basestring(s): + return isinstance(s, basestring) + +try: + from cStringIO import StringIO +except ImportError: + from StringIO import StringIO + +__all__ = ["shlex", "split"] + +class shlex: + "A lexical analyzer class for simple shell-like syntaxes." + def __init__(self, instream=None, infile=None, posix=False): + if is_basestring(instream): + instream = StringIO(instream) + if instream is not None: + self.instream = instream + self.infile = infile + else: + self.instream = sys.stdin + self.infile = None + self.posix = posix + if posix: + self.eof = None + else: + self.eof = '' + self.commenters = '#' + self.wordchars = ('abcdfeghijklmnopqrstuvwxyz' + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_') + if self.posix: + self.wordchars = self.wordchars + ('ßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿ' + 'ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞ') + self.whitespace = ' \t\r\n' + self.whitespace_split = False + self.quotes = '\'"' + self.escape = '\\' + self.escapedquotes = '"' + self.state = ' ' + self.pushback = deque() + self.lineno = 1 + self.debug = 0 + self.token = '' + self.filestack = deque() + self.source = None + if self.debug: + print 'shlex: reading from %s, line %d' \ + % (self.instream, self.lineno) + + def push_token(self, tok): + "Push a token onto the stack popped by the get_token method" + if self.debug >= 1: + print "shlex: pushing token " + repr(tok) + self.pushback.appendleft(tok) + + def push_source(self, newstream, newfile=None): + "Push an input source onto the lexer's input source stack." + if is_basestring(newstream): + newstream = StringIO(newstream) + self.filestack.appendleft((self.infile, self.instream, self.lineno)) + self.infile = newfile + self.instream = newstream + self.lineno = 1 + if self.debug: + if newfile is not None: + print 'shlex: pushing to file %s' % (self.infile,) + else: + print 'shlex: pushing to stream %s' % (self.instream,) + + def pop_source(self): + "Pop the input source stack." + self.instream.close() + (self.infile, self.instream, self.lineno) = self.filestack.popleft() + if self.debug: + print 'shlex: popping to %s, line %d' \ + % (self.instream, self.lineno) + self.state = ' ' + + def get_token(self): + "Get a token from the input stream (or from stack if it's nonempty)" + if self.pushback: + tok = self.pushback.popleft() + if self.debug >= 1: + print "shlex: popping token " + repr(tok) + return tok + # No pushback. Get a token. + raw = self.read_token() + # Handle inclusions + if self.source is not None: + while raw == self.source: + spec = self.sourcehook(self.read_token()) + if spec: + (newfile, newstream) = spec + self.push_source(newstream, newfile) + raw = self.get_token() + # Maybe we got EOF instead? + while raw == self.eof: + if not self.filestack: + return self.eof + else: + self.pop_source() + raw = self.get_token() + # Neither inclusion nor EOF + if self.debug >= 1: + if raw != self.eof: + print "shlex: token=" + repr(raw) + else: + print "shlex: token=EOF" + return raw + + def read_token(self): + quoted = False + escapedstate = ' ' + while True: + nextchar = self.instream.read(1) + if nextchar == '\n': + self.lineno = self.lineno + 1 + if self.debug >= 3: + print "shlex: in state", repr(self.state), \ + "I see character:", repr(nextchar) + if self.state is None: + self.token = '' # past end of file + break + elif self.state == ' ': + if not nextchar: + self.state = None # end of file + break + elif nextchar in self.whitespace: + if self.debug >= 2: + print "shlex: I see whitespace in whitespace state" + if self.token or (self.posix and quoted): + break # emit current token + else: + continue + elif nextchar in self.commenters: + self.instream.readline() + self.lineno = self.lineno + 1 + elif self.posix and nextchar in self.escape: + escapedstate = 'a' + self.state = nextchar + elif nextchar in self.wordchars: + self.token = nextchar + self.state = 'a' + elif nextchar in self.quotes: + if not self.posix: + self.token = nextchar + self.state = nextchar + elif self.whitespace_split: + self.token = nextchar + self.state = 'a' + else: + self.token = nextchar + if self.token or (self.posix and quoted): + break # emit current token + else: + continue + elif self.state in self.quotes: + quoted = True + if not nextchar: # end of file + if self.debug >= 2: + print "shlex: I see EOF in quotes state" + # XXX what error should be raised here? + raise ValueError, "No closing quotation" + if nextchar == self.state: + if not self.posix: + self.token = self.token + nextchar + self.state = ' ' + break + else: + self.state = 'a' + elif self.posix and nextchar in self.escape and \ + self.state in self.escapedquotes: + escapedstate = self.state + self.state = nextchar + else: + self.token = self.token + nextchar + elif self.state in self.escape: + if not nextchar: # end of file + if self.debug >= 2: + print "shlex: I see EOF in escape state" + # XXX what error should be raised here? + raise ValueError, "No escaped character" + # In posix shells, only the quote itself or the escape + # character may be escaped within quotes. + if escapedstate in self.quotes and \ + nextchar != self.state and nextchar != escapedstate: + self.token = self.token + self.state + self.token = self.token + nextchar + self.state = escapedstate + elif self.state == 'a': + if not nextchar: + self.state = None # end of file + break + elif nextchar in self.whitespace: + if self.debug >= 2: + print "shlex: I see whitespace in word state" + self.state = ' ' + if self.token or (self.posix and quoted): + break # emit current token + else: + continue + elif nextchar in self.commenters: + self.instream.readline() + self.lineno = self.lineno + 1 + if self.posix: + self.state = ' ' + if self.token or (self.posix and quoted): + break # emit current token + else: + continue + elif self.posix and nextchar in self.quotes: + self.state = nextchar + elif self.posix and nextchar in self.escape: + escapedstate = 'a' + self.state = nextchar + elif nextchar in self.wordchars or nextchar in self.quotes \ + or self.whitespace_split: + self.token = self.token + nextchar + else: + self.pushback.appendleft(nextchar) + if self.debug >= 2: + print "shlex: I see punctuation in word state" + self.state = ' ' + if self.token: + break # emit current token + else: + continue + result = self.token + self.token = '' + if self.posix and not quoted and result == '': + result = None + if self.debug > 1: + if result: + print "shlex: raw token=" + repr(result) + else: + print "shlex: raw token=EOF" + return result + + def sourcehook(self, newfile): + "Hook called on a filename to be sourced." + if newfile[0] == '"': + newfile = newfile[1:-1] + # This implements cpp-like semantics for relative-path inclusion. + if is_basestring(self.infile) and not os.path.isabs(newfile): + newfile = os.path.join(os.path.dirname(self.infile), newfile) + return (newfile, open(newfile, "r")) + + def error_leader(self, infile=None, lineno=None): + "Emit a C-compiler-like, Emacs-friendly error-message leader." + if infile is None: + infile = self.infile + if lineno is None: + lineno = self.lineno + return "\"%s\", line %d: " % (infile, lineno) + + def __iter__(self): + return self + + def next(self): + token = self.get_token() + if token == self.eof: + raise StopIteration + return token + +def split(s, comments=False): + lex = shlex(s, posix=True) + lex.whitespace_split = True + if not comments: + lex.commenters = '' + #return list(lex) + result = [] + while True: + token = lex.get_token() + if token == lex.eof: + break + result.append(token) + return result + +if __name__ == '__main__': + if len(sys.argv) == 1: + lexer = shlex() + else: + file = sys.argv[1] + lexer = shlex(open(file), file) + while 1: + tt = lexer.get_token() + if tt: + print "Token: " + repr(tt) + else: + break + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/compat/_scons_subprocess.py b/engine/SCons/compat/_scons_subprocess.py new file mode 100644 index 0000000..4968825 --- /dev/null +++ b/engine/SCons/compat/_scons_subprocess.py @@ -0,0 +1,1296 @@ +# subprocess - Subprocesses with accessible I/O streams +# +# For more information about this module, see PEP 324. +# +# This module should remain compatible with Python 2.2, see PEP 291. +# +# Copyright (c) 2003-2005 by Peter Astrand <astrand@lysator.liu.se> +# +# Licensed to PSF under a Contributor Agreement. +# See http://www.python.org/2.4/license for licensing details. + +r"""subprocess - Subprocesses with accessible I/O streams + +This module allows you to spawn processes, connect to their +input/output/error pipes, and obtain their return codes. This module +intends to replace several other, older modules and functions, like: + +os.system +os.spawn* +os.popen* +popen2.* +commands.* + +Information about how the subprocess module can be used to replace these +modules and functions can be found below. + + + +Using the subprocess module +=========================== +This module defines one class called Popen: + +class Popen(args, bufsize=0, executable=None, + stdin=None, stdout=None, stderr=None, + preexec_fn=None, close_fds=False, shell=False, + cwd=None, env=None, universal_newlines=False, + startupinfo=None, creationflags=0): + + +Arguments are: + +args should be a string, or a sequence of program arguments. The +program to execute is normally the first item in the args sequence or +string, but can be explicitly set by using the executable argument. + +On UNIX, with shell=False (default): In this case, the Popen class +uses os.execvp() to execute the child program. args should normally +be a sequence. A string will be treated as a sequence with the string +as the only item (the program to execute). + +On UNIX, with shell=True: If args is a string, it specifies the +command string to execute through the shell. If args is a sequence, +the first item specifies the command string, and any additional items +will be treated as additional shell arguments. + +On Windows: the Popen class uses CreateProcess() to execute the child +program, which operates on strings. If args is a sequence, it will be +converted to a string using the list2cmdline method. Please note that +not all MS Windows applications interpret the command line the same +way: The list2cmdline is designed for applications using the same +rules as the MS C runtime. + +bufsize, if given, has the same meaning as the corresponding argument +to the built-in open() function: 0 means unbuffered, 1 means line +buffered, any other positive value means use a buffer of +(approximately) that size. A negative bufsize means to use the system +default, which usually means fully buffered. The default value for +bufsize is 0 (unbuffered). + +stdin, stdout and stderr specify the executed programs' standard +input, standard output and standard error file handles, respectively. +Valid values are PIPE, an existing file descriptor (a positive +integer), an existing file object, and None. PIPE indicates that a +new pipe to the child should be created. With None, no redirection +will occur; the child's file handles will be inherited from the +parent. Additionally, stderr can be STDOUT, which indicates that the +stderr data from the applications should be captured into the same +file handle as for stdout. + +If preexec_fn is set to a callable object, this object will be called +in the child process just before the child is executed. + +If close_fds is true, all file descriptors except 0, 1 and 2 will be +closed before the child process is executed. + +if shell is true, the specified command will be executed through the +shell. + +If cwd is not None, the current directory will be changed to cwd +before the child is executed. + +If env is not None, it defines the environment variables for the new +process. + +If universal_newlines is true, the file objects stdout and stderr are +opened as a text files, but lines may be terminated by any of '\n', +the Unix end-of-line convention, '\r', the Macintosh convention or +'\r\n', the Windows convention. All of these external representations +are seen as '\n' by the Python program. Note: This feature is only +available if Python is built with universal newline support (the +default). Also, the newlines attribute of the file objects stdout, +stdin and stderr are not updated by the communicate() method. + +The startupinfo and creationflags, if given, will be passed to the +underlying CreateProcess() function. They can specify things such as +appearance of the main window and priority for the new process. +(Windows only) + + +This module also defines two shortcut functions: + +call(*popenargs, **kwargs): + Run command with arguments. Wait for command to complete, then + return the returncode attribute. + + The arguments are the same as for the Popen constructor. Example: + + retcode = call(["ls", "-l"]) + +check_call(*popenargs, **kwargs): + Run command with arguments. Wait for command to complete. If the + exit code was zero then return, otherwise raise + CalledProcessError. The CalledProcessError object will have the + return code in the returncode attribute. + + The arguments are the same as for the Popen constructor. Example: + + check_call(["ls", "-l"]) + +Exceptions +---------- +Exceptions raised in the child process, before the new program has +started to execute, will be re-raised in the parent. Additionally, +the exception object will have one extra attribute called +'child_traceback', which is a string containing traceback information +from the childs point of view. + +The most common exception raised is OSError. This occurs, for +example, when trying to execute a non-existent file. Applications +should prepare for OSErrors. + +A ValueError will be raised if Popen is called with invalid arguments. + +check_call() will raise CalledProcessError, if the called process +returns a non-zero return code. + + +Security +-------- +Unlike some other popen functions, this implementation will never call +/bin/sh implicitly. This means that all characters, including shell +metacharacters, can safely be passed to child processes. + + +Popen objects +============= +Instances of the Popen class have the following methods: + +poll() + Check if child process has terminated. Returns returncode + attribute. + +wait() + Wait for child process to terminate. Returns returncode attribute. + +communicate(input=None) + Interact with process: Send data to stdin. Read data from stdout + and stderr, until end-of-file is reached. Wait for process to + terminate. The optional stdin argument should be a string to be + sent to the child process, or None, if no data should be sent to + the child. + + communicate() returns a tuple (stdout, stderr). + + Note: The data read is buffered in memory, so do not use this + method if the data size is large or unlimited. + +The following attributes are also available: + +stdin + If the stdin argument is PIPE, this attribute is a file object + that provides input to the child process. Otherwise, it is None. + +stdout + If the stdout argument is PIPE, this attribute is a file object + that provides output from the child process. Otherwise, it is + None. + +stderr + If the stderr argument is PIPE, this attribute is file object that + provides error output from the child process. Otherwise, it is + None. + +pid + The process ID of the child process. + +returncode + The child return code. A None value indicates that the process + hasn't terminated yet. A negative value -N indicates that the + child was terminated by signal N (UNIX only). + + +Replacing older functions with the subprocess module +==================================================== +In this section, "a ==> b" means that b can be used as a replacement +for a. + +Note: All functions in this section fail (more or less) silently if +the executed program cannot be found; this module raises an OSError +exception. + +In the following examples, we assume that the subprocess module is +imported with "from subprocess import *". + + +Replacing /bin/sh shell backquote +--------------------------------- +output=`mycmd myarg` +==> +output = Popen(["mycmd", "myarg"], stdout=PIPE).communicate()[0] + + +Replacing shell pipe line +------------------------- +output=`dmesg | grep hda` +==> +p1 = Popen(["dmesg"], stdout=PIPE) +p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE) +output = p2.communicate()[0] + + +Replacing os.system() +--------------------- +sts = os.system("mycmd" + " myarg") +==> +p = Popen("mycmd" + " myarg", shell=True) +pid, sts = os.waitpid(p.pid, 0) + +Note: + +* Calling the program through the shell is usually not required. + +* It's easier to look at the returncode attribute than the + exitstatus. + +A more real-world example would look like this: + +try: + retcode = call("mycmd" + " myarg", shell=True) + if retcode < 0: + print >>sys.stderr, "Child was terminated by signal", -retcode + else: + print >>sys.stderr, "Child returned", retcode +except OSError, e: + print >>sys.stderr, "Execution failed:", e + + +Replacing os.spawn* +------------------- +P_NOWAIT example: + +pid = os.spawnlp(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg") +==> +pid = Popen(["/bin/mycmd", "myarg"]).pid + + +P_WAIT example: + +retcode = os.spawnlp(os.P_WAIT, "/bin/mycmd", "mycmd", "myarg") +==> +retcode = call(["/bin/mycmd", "myarg"]) + + +Vector example: + +os.spawnvp(os.P_NOWAIT, path, args) +==> +Popen([path] + args[1:]) + + +Environment example: + +os.spawnlpe(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg", env) +==> +Popen(["/bin/mycmd", "myarg"], env={"PATH": "/usr/bin"}) + + +Replacing os.popen* +------------------- +pipe = os.popen(cmd, mode='r', bufsize) +==> +pipe = Popen(cmd, shell=True, bufsize=bufsize, stdout=PIPE).stdout + +pipe = os.popen(cmd, mode='w', bufsize) +==> +pipe = Popen(cmd, shell=True, bufsize=bufsize, stdin=PIPE).stdin + + +(child_stdin, child_stdout) = os.popen2(cmd, mode, bufsize) +==> +p = Popen(cmd, shell=True, bufsize=bufsize, + stdin=PIPE, stdout=PIPE, close_fds=True) +(child_stdin, child_stdout) = (p.stdin, p.stdout) + + +(child_stdin, + child_stdout, + child_stderr) = os.popen3(cmd, mode, bufsize) +==> +p = Popen(cmd, shell=True, bufsize=bufsize, + stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=True) +(child_stdin, + child_stdout, + child_stderr) = (p.stdin, p.stdout, p.stderr) + + +(child_stdin, child_stdout_and_stderr) = os.popen4(cmd, mode, bufsize) +==> +p = Popen(cmd, shell=True, bufsize=bufsize, + stdin=PIPE, stdout=PIPE, stderr=STDOUT, close_fds=True) +(child_stdin, child_stdout_and_stderr) = (p.stdin, p.stdout) + + +Replacing popen2.* +------------------ +Note: If the cmd argument to popen2 functions is a string, the command +is executed through /bin/sh. If it is a list, the command is directly +executed. + +(child_stdout, child_stdin) = popen2.popen2("somestring", bufsize, mode) +==> +p = Popen(["somestring"], shell=True, bufsize=bufsize + stdin=PIPE, stdout=PIPE, close_fds=True) +(child_stdout, child_stdin) = (p.stdout, p.stdin) + + +(child_stdout, child_stdin) = popen2.popen2(["mycmd", "myarg"], bufsize, mode) +==> +p = Popen(["mycmd", "myarg"], bufsize=bufsize, + stdin=PIPE, stdout=PIPE, close_fds=True) +(child_stdout, child_stdin) = (p.stdout, p.stdin) + +The popen2.Popen3 and popen3.Popen4 basically works as subprocess.Popen, +except that: + +* subprocess.Popen raises an exception if the execution fails +* the capturestderr argument is replaced with the stderr argument. +* stdin=PIPE and stdout=PIPE must be specified. +* popen2 closes all filedescriptors by default, but you have to specify + close_fds=True with subprocess.Popen. + + +""" + +import sys +mswindows = (sys.platform == "win32") + +import os +import string +import types +import traceback + +# Exception classes used by this module. +class CalledProcessError(Exception): + """This exception is raised when a process run by check_call() returns + a non-zero exit status. The exit status will be stored in the + returncode attribute.""" + def __init__(self, returncode, cmd): + self.returncode = returncode + self.cmd = cmd + def __str__(self): + return "Command '%s' returned non-zero exit status %d" % (self.cmd, self.returncode) + + +if mswindows: + try: + import threading + except ImportError: + # SCons: the threading module is only used by the communicate() + # method, which we don't actually use, so don't worry if we + # can't import it. + pass + import msvcrt + if 0: # <-- change this to use pywin32 instead of the _subprocess driver + import pywintypes + from win32api import GetStdHandle, STD_INPUT_HANDLE, \ + STD_OUTPUT_HANDLE, STD_ERROR_HANDLE + from win32api import GetCurrentProcess, DuplicateHandle, \ + GetModuleFileName, GetVersion + from win32con import DUPLICATE_SAME_ACCESS, SW_HIDE + from win32pipe import CreatePipe + from win32process import CreateProcess, STARTUPINFO, \ + GetExitCodeProcess, STARTF_USESTDHANDLES, \ + STARTF_USESHOWWINDOW, CREATE_NEW_CONSOLE + from win32event import WaitForSingleObject, INFINITE, WAIT_OBJECT_0 + else: + # SCons: don't die on Python versions that don't have _subprocess. + try: + from _subprocess import * + except ImportError: + pass + class STARTUPINFO: + dwFlags = 0 + hStdInput = None + hStdOutput = None + hStdError = None + wShowWindow = 0 + class pywintypes: + error = IOError +else: + import select + import errno + import fcntl + import pickle + + try: + fcntl.F_GETFD + except AttributeError: + fcntl.F_GETFD = 1 + + try: + fcntl.F_SETFD + except AttributeError: + fcntl.F_SETFD = 2 + +__all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", "CalledProcessError"] + +try: + MAXFD = os.sysconf("SC_OPEN_MAX") +except KeyboardInterrupt: + raise # SCons: don't swallow keyboard interrupts +except: + MAXFD = 256 + +# True/False does not exist on 2.2.0 +try: + False +except NameError: + False = 0 + True = 1 + +try: + isinstance(1, int) +except TypeError: + def is_int(obj): + return type(obj) == type(1) + def is_int_or_long(obj): + return type(obj) in (type(1), type(1L)) +else: + def is_int(obj): + return isinstance(obj, int) + def is_int_or_long(obj): + return isinstance(obj, (int, long)) + +try: + types.StringTypes +except AttributeError: + try: + types.StringTypes = (types.StringType, types.UnicodeType) + except AttributeError: + types.StringTypes = (types.StringType,) + def is_string(obj): + return type(obj) in types.StringTypes +else: + def is_string(obj): + return isinstance(obj, types.StringTypes) + +_active = [] + +def _cleanup(): + for inst in _active[:]: + if inst.poll(_deadstate=sys.maxint) >= 0: + try: + _active.remove(inst) + except ValueError: + # This can happen if two threads create a new Popen instance. + # It's harmless that it was already removed, so ignore. + pass + +PIPE = -1 +STDOUT = -2 + + +def call(*popenargs, **kwargs): + """Run command with arguments. Wait for command to complete, then + return the returncode attribute. + + The arguments are the same as for the Popen constructor. Example: + + retcode = call(["ls", "-l"]) + """ + return apply(Popen, popenargs, kwargs).wait() + + +def check_call(*popenargs, **kwargs): + """Run command with arguments. Wait for command to complete. If + the exit code was zero then return, otherwise raise + CalledProcessError. The CalledProcessError object will have the + return code in the returncode attribute. + + The arguments are the same as for the Popen constructor. Example: + + check_call(["ls", "-l"]) + """ + retcode = apply(call, popenargs, kwargs) + cmd = kwargs.get("args") + if cmd is None: + cmd = popenargs[0] + if retcode: + raise CalledProcessError(retcode, cmd) + return retcode + + +def list2cmdline(seq): + """ + Translate a sequence of arguments into a command line + string, using the same rules as the MS C runtime: + + 1) Arguments are delimited by white space, which is either a + space or a tab. + + 2) A string surrounded by double quotation marks is + interpreted as a single argument, regardless of white space + contained within. A quoted string can be embedded in an + argument. + + 3) A double quotation mark preceded by a backslash is + interpreted as a literal double quotation mark. + + 4) Backslashes are interpreted literally, unless they + immediately precede a double quotation mark. + + 5) If backslashes immediately precede a double quotation mark, + every pair of backslashes is interpreted as a literal + backslash. If the number of backslashes is odd, the last + backslash escapes the next double quotation mark as + described in rule 3. + """ + + # See + # http://msdn.microsoft.com/library/en-us/vccelng/htm/progs_12.asp + result = [] + needquote = False + for arg in seq: + bs_buf = [] + + # Add a space to separate this argument from the others + if result: + result.append(' ') + + needquote = (" " in arg) or ("\t" in arg) + if needquote: + result.append('"') + + for c in arg: + if c == '\\': + # Don't know if we need to double yet. + bs_buf.append(c) + elif c == '"': + # Double backspaces. + result.append('\\' * len(bs_buf)*2) + bs_buf = [] + result.append('\\"') + else: + # Normal char + if bs_buf: + result.extend(bs_buf) + bs_buf = [] + result.append(c) + + # Add remaining backspaces, if any. + if bs_buf: + result.extend(bs_buf) + + if needquote: + result.extend(bs_buf) + result.append('"') + + return string.join(result, '') + + +try: + object +except NameError: + class object: + pass + +class Popen(object): + def __init__(self, args, bufsize=0, executable=None, + stdin=None, stdout=None, stderr=None, + preexec_fn=None, close_fds=False, shell=False, + cwd=None, env=None, universal_newlines=False, + startupinfo=None, creationflags=0): + """Create new Popen instance.""" + _cleanup() + + self._child_created = False + if not is_int_or_long(bufsize): + raise TypeError("bufsize must be an integer") + + if mswindows: + if preexec_fn is not None: + raise ValueError("preexec_fn is not supported on Windows " + "platforms") + if close_fds: + raise ValueError("close_fds is not supported on Windows " + "platforms") + else: + # POSIX + if startupinfo is not None: + raise ValueError("startupinfo is only supported on Windows " + "platforms") + if creationflags != 0: + raise ValueError("creationflags is only supported on Windows " + "platforms") + + self.stdin = None + self.stdout = None + self.stderr = None + self.pid = None + self.returncode = None + self.universal_newlines = universal_newlines + + # Input and output objects. The general principle is like + # this: + # + # Parent Child + # ------ ----- + # p2cwrite ---stdin---> p2cread + # c2pread <--stdout--- c2pwrite + # errread <--stderr--- errwrite + # + # On POSIX, the child objects are file descriptors. On + # Windows, these are Windows file handles. The parent objects + # are file descriptors on both platforms. The parent objects + # are None when not using PIPEs. The child objects are None + # when not redirecting. + + (p2cread, p2cwrite, + c2pread, c2pwrite, + errread, errwrite) = self._get_handles(stdin, stdout, stderr) + + self._execute_child(args, executable, preexec_fn, close_fds, + cwd, env, universal_newlines, + startupinfo, creationflags, shell, + p2cread, p2cwrite, + c2pread, c2pwrite, + errread, errwrite) + + if p2cwrite: + self.stdin = os.fdopen(p2cwrite, 'wb', bufsize) + if c2pread: + if universal_newlines: + self.stdout = os.fdopen(c2pread, 'rU', bufsize) + else: + self.stdout = os.fdopen(c2pread, 'rb', bufsize) + if errread: + if universal_newlines: + self.stderr = os.fdopen(errread, 'rU', bufsize) + else: + self.stderr = os.fdopen(errread, 'rb', bufsize) + + + def _translate_newlines(self, data): + data = data.replace("\r\n", "\n") + data = data.replace("\r", "\n") + return data + + + def __del__(self): + if not self._child_created: + # We didn't get to successfully create a child process. + return + # In case the child hasn't been waited on, check if it's done. + self.poll(_deadstate=sys.maxint) + if self.returncode is None and _active is not None: + # Child is still running, keep us alive until we can wait on it. + _active.append(self) + + + def communicate(self, input=None): + """Interact with process: Send data to stdin. Read data from + stdout and stderr, until end-of-file is reached. Wait for + process to terminate. The optional input argument should be a + string to be sent to the child process, or None, if no data + should be sent to the child. + + communicate() returns a tuple (stdout, stderr).""" + + # Optimization: If we are only using one pipe, or no pipe at + # all, using select() or threads is unnecessary. + if [self.stdin, self.stdout, self.stderr].count(None) >= 2: + stdout = None + stderr = None + if self.stdin: + if input: + self.stdin.write(input) + self.stdin.close() + elif self.stdout: + stdout = self.stdout.read() + elif self.stderr: + stderr = self.stderr.read() + self.wait() + return (stdout, stderr) + + return self._communicate(input) + + + if mswindows: + # + # Windows methods + # + def _get_handles(self, stdin, stdout, stderr): + """Construct and return tupel with IO objects: + p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite + """ + if stdin is None and stdout is None and stderr is None: + return (None, None, None, None, None, None) + + p2cread, p2cwrite = None, None + c2pread, c2pwrite = None, None + errread, errwrite = None, None + + if stdin is None: + p2cread = GetStdHandle(STD_INPUT_HANDLE) + elif stdin == PIPE: + p2cread, p2cwrite = CreatePipe(None, 0) + # Detach and turn into fd + p2cwrite = p2cwrite.Detach() + p2cwrite = msvcrt.open_osfhandle(p2cwrite, 0) + elif is_int(stdin): + p2cread = msvcrt.get_osfhandle(stdin) + else: + # Assuming file-like object + p2cread = msvcrt.get_osfhandle(stdin.fileno()) + p2cread = self._make_inheritable(p2cread) + + if stdout is None: + c2pwrite = GetStdHandle(STD_OUTPUT_HANDLE) + elif stdout == PIPE: + c2pread, c2pwrite = CreatePipe(None, 0) + # Detach and turn into fd + c2pread = c2pread.Detach() + c2pread = msvcrt.open_osfhandle(c2pread, 0) + elif is_int(stdout): + c2pwrite = msvcrt.get_osfhandle(stdout) + else: + # Assuming file-like object + c2pwrite = msvcrt.get_osfhandle(stdout.fileno()) + c2pwrite = self._make_inheritable(c2pwrite) + + if stderr is None: + errwrite = GetStdHandle(STD_ERROR_HANDLE) + elif stderr == PIPE: + errread, errwrite = CreatePipe(None, 0) + # Detach and turn into fd + errread = errread.Detach() + errread = msvcrt.open_osfhandle(errread, 0) + elif stderr == STDOUT: + errwrite = c2pwrite + elif is_int(stderr): + errwrite = msvcrt.get_osfhandle(stderr) + else: + # Assuming file-like object + errwrite = msvcrt.get_osfhandle(stderr.fileno()) + errwrite = self._make_inheritable(errwrite) + + return (p2cread, p2cwrite, + c2pread, c2pwrite, + errread, errwrite) + + + def _make_inheritable(self, handle): + """Return a duplicate of handle, which is inheritable""" + return DuplicateHandle(GetCurrentProcess(), handle, + GetCurrentProcess(), 0, 1, + DUPLICATE_SAME_ACCESS) + + + def _find_w9xpopen(self): + """Find and return absolut path to w9xpopen.exe""" + w9xpopen = os.path.join(os.path.dirname(GetModuleFileName(0)), + "w9xpopen.exe") + if not os.path.exists(w9xpopen): + # Eeek - file-not-found - possibly an embedding + # situation - see if we can locate it in sys.exec_prefix + w9xpopen = os.path.join(os.path.dirname(sys.exec_prefix), + "w9xpopen.exe") + if not os.path.exists(w9xpopen): + raise RuntimeError("Cannot locate w9xpopen.exe, which is " + "needed for Popen to work with your " + "shell or platform.") + return w9xpopen + + + def _execute_child(self, args, executable, preexec_fn, close_fds, + cwd, env, universal_newlines, + startupinfo, creationflags, shell, + p2cread, p2cwrite, + c2pread, c2pwrite, + errread, errwrite): + """Execute program (MS Windows version)""" + + if not isinstance(args, types.StringTypes): + args = list2cmdline(args) + + # Process startup details + if startupinfo is None: + startupinfo = STARTUPINFO() + if None not in (p2cread, c2pwrite, errwrite): + startupinfo.dwFlags = startupinfo.dwFlags | STARTF_USESTDHANDLES + startupinfo.hStdInput = p2cread + startupinfo.hStdOutput = c2pwrite + startupinfo.hStdError = errwrite + + if shell: + startupinfo.dwFlags = startupinfo.dwFlags | STARTF_USESHOWWINDOW + startupinfo.wShowWindow = SW_HIDE + comspec = os.environ.get("COMSPEC", "cmd.exe") + args = comspec + " /c " + args + if (GetVersion() >= 0x80000000L or + os.path.basename(comspec).lower() == "command.com"): + # Win9x, or using command.com on NT. We need to + # use the w9xpopen intermediate program. For more + # information, see KB Q150956 + # (http://web.archive.org/web/20011105084002/http://support.microsoft.com/support/kb/articles/Q150/9/56.asp) + w9xpopen = self._find_w9xpopen() + args = '"%s" %s' % (w9xpopen, args) + # Not passing CREATE_NEW_CONSOLE has been known to + # cause random failures on win9x. Specifically a + # dialog: "Your program accessed mem currently in + # use at xxx" and a hopeful warning about the + # stability of your system. Cost is Ctrl+C wont + # kill children. + creationflags = creationflags | CREATE_NEW_CONSOLE + + # Start the process + try: + hp, ht, pid, tid = CreateProcess(executable, args, + # no special security + None, None, + # must inherit handles to pass std + # handles + 1, + creationflags, + env, + cwd, + startupinfo) + except pywintypes.error, e: + # Translate pywintypes.error to WindowsError, which is + # a subclass of OSError. FIXME: We should really + # translate errno using _sys_errlist (or simliar), but + # how can this be done from Python? + raise apply(WindowsError, e.args) + + # Retain the process handle, but close the thread handle + self._child_created = True + self._handle = hp + self.pid = pid + ht.Close() + + # Child is launched. Close the parent's copy of those pipe + # handles that only the child should have open. You need + # to make sure that no handles to the write end of the + # output pipe are maintained in this process or else the + # pipe will not close when the child process exits and the + # ReadFile will hang. + if p2cread is not None: + p2cread.Close() + if c2pwrite is not None: + c2pwrite.Close() + if errwrite is not None: + errwrite.Close() + + + def poll(self, _deadstate=None): + """Check if child process has terminated. Returns returncode + attribute.""" + if self.returncode is None: + if WaitForSingleObject(self._handle, 0) == WAIT_OBJECT_0: + self.returncode = GetExitCodeProcess(self._handle) + return self.returncode + + + def wait(self): + """Wait for child process to terminate. Returns returncode + attribute.""" + if self.returncode is None: + obj = WaitForSingleObject(self._handle, INFINITE) + self.returncode = GetExitCodeProcess(self._handle) + return self.returncode + + + def _readerthread(self, fh, buffer): + buffer.append(fh.read()) + + + def _communicate(self, input): + stdout = None # Return + stderr = None # Return + + if self.stdout: + stdout = [] + stdout_thread = threading.Thread(target=self._readerthread, + args=(self.stdout, stdout)) + stdout_thread.setDaemon(True) + stdout_thread.start() + if self.stderr: + stderr = [] + stderr_thread = threading.Thread(target=self._readerthread, + args=(self.stderr, stderr)) + stderr_thread.setDaemon(True) + stderr_thread.start() + + if self.stdin: + if input is not None: + self.stdin.write(input) + self.stdin.close() + + if self.stdout: + stdout_thread.join() + if self.stderr: + stderr_thread.join() + + # All data exchanged. Translate lists into strings. + if stdout is not None: + stdout = stdout[0] + if stderr is not None: + stderr = stderr[0] + + # Translate newlines, if requested. We cannot let the file + # object do the translation: It is based on stdio, which is + # impossible to combine with select (unless forcing no + # buffering). + if self.universal_newlines and hasattr(file, 'newlines'): + if stdout: + stdout = self._translate_newlines(stdout) + if stderr: + stderr = self._translate_newlines(stderr) + + self.wait() + return (stdout, stderr) + + else: + # + # POSIX methods + # + def _get_handles(self, stdin, stdout, stderr): + """Construct and return tupel with IO objects: + p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite + """ + p2cread, p2cwrite = None, None + c2pread, c2pwrite = None, None + errread, errwrite = None, None + + if stdin is None: + pass + elif stdin == PIPE: + p2cread, p2cwrite = os.pipe() + elif is_int(stdin): + p2cread = stdin + else: + # Assuming file-like object + p2cread = stdin.fileno() + + if stdout is None: + pass + elif stdout == PIPE: + c2pread, c2pwrite = os.pipe() + elif is_int(stdout): + c2pwrite = stdout + else: + # Assuming file-like object + c2pwrite = stdout.fileno() + + if stderr is None: + pass + elif stderr == PIPE: + errread, errwrite = os.pipe() + elif stderr == STDOUT: + errwrite = c2pwrite + elif is_int(stderr): + errwrite = stderr + else: + # Assuming file-like object + errwrite = stderr.fileno() + + return (p2cread, p2cwrite, + c2pread, c2pwrite, + errread, errwrite) + + + def _set_cloexec_flag(self, fd): + try: + cloexec_flag = fcntl.FD_CLOEXEC + except AttributeError: + cloexec_flag = 1 + + old = fcntl.fcntl(fd, fcntl.F_GETFD) + fcntl.fcntl(fd, fcntl.F_SETFD, old | cloexec_flag) + + + def _close_fds(self, but): + for i in xrange(3, MAXFD): + if i == but: + continue + try: + os.close(i) + except KeyboardInterrupt: + raise # SCons: don't swallow keyboard interrupts + except: + pass + + + def _execute_child(self, args, executable, preexec_fn, close_fds, + cwd, env, universal_newlines, + startupinfo, creationflags, shell, + p2cread, p2cwrite, + c2pread, c2pwrite, + errread, errwrite): + """Execute program (POSIX version)""" + + if is_string(args): + args = [args] + + if shell: + args = ["/bin/sh", "-c"] + args + + if executable is None: + executable = args[0] + + # For transferring possible exec failure from child to parent + # The first char specifies the exception type: 0 means + # OSError, 1 means some other error. + errpipe_read, errpipe_write = os.pipe() + self._set_cloexec_flag(errpipe_write) + + self.pid = os.fork() + self._child_created = True + if self.pid == 0: + # Child + try: + # Close parent's pipe ends + if p2cwrite: + os.close(p2cwrite) + if c2pread: + os.close(c2pread) + if errread: + os.close(errread) + os.close(errpipe_read) + + # Dup fds for child + if p2cread: + os.dup2(p2cread, 0) + if c2pwrite: + os.dup2(c2pwrite, 1) + if errwrite: + os.dup2(errwrite, 2) + + # Close pipe fds. Make sure we don't close the same + # fd more than once, or standard fds. + try: + set + except NameError: + # Fall-back for earlier Python versions, so epydoc + # can use this module directly to execute things. + if p2cread: + os.close(p2cread) + if c2pwrite and c2pwrite not in (p2cread,): + os.close(c2pwrite) + if errwrite and errwrite not in (p2cread, c2pwrite): + os.close(errwrite) + else: + for fd in set((p2cread, c2pwrite, errwrite))-set((0,1,2)): + if fd: os.close(fd) + + # Close all other fds, if asked for + if close_fds: + self._close_fds(but=errpipe_write) + + if cwd is not None: + os.chdir(cwd) + + if preexec_fn: + apply(preexec_fn) + + if env is None: + os.execvp(executable, args) + else: + os.execvpe(executable, args, env) + + except KeyboardInterrupt: + raise # SCons: don't swallow keyboard interrupts + + except: + exc_type, exc_value, tb = sys.exc_info() + # Save the traceback and attach it to the exception object + exc_lines = traceback.format_exception(exc_type, + exc_value, + tb) + exc_value.child_traceback = string.join(exc_lines, '') + os.write(errpipe_write, pickle.dumps(exc_value)) + + # This exitcode won't be reported to applications, so it + # really doesn't matter what we return. + os._exit(255) + + # Parent + os.close(errpipe_write) + if p2cread and p2cwrite: + os.close(p2cread) + if c2pwrite and c2pread: + os.close(c2pwrite) + if errwrite and errread: + os.close(errwrite) + + # Wait for exec to fail or succeed; possibly raising exception + data = os.read(errpipe_read, 1048576) # Exceptions limited to 1 MB + os.close(errpipe_read) + if data != "": + os.waitpid(self.pid, 0) + child_exception = pickle.loads(data) + raise child_exception + + + def _handle_exitstatus(self, sts): + if os.WIFSIGNALED(sts): + self.returncode = -os.WTERMSIG(sts) + elif os.WIFEXITED(sts): + self.returncode = os.WEXITSTATUS(sts) + else: + # Should never happen + raise RuntimeError("Unknown child exit status!") + + + def poll(self, _deadstate=None): + """Check if child process has terminated. Returns returncode + attribute.""" + if self.returncode is None: + try: + pid, sts = os.waitpid(self.pid, os.WNOHANG) + if pid == self.pid: + self._handle_exitstatus(sts) + except os.error: + if _deadstate is not None: + self.returncode = _deadstate + return self.returncode + + + def wait(self): + """Wait for child process to terminate. Returns returncode + attribute.""" + if self.returncode is None: + pid, sts = os.waitpid(self.pid, 0) + self._handle_exitstatus(sts) + return self.returncode + + + def _communicate(self, input): + read_set = [] + write_set = [] + stdout = None # Return + stderr = None # Return + + if self.stdin: + # Flush stdio buffer. This might block, if the user has + # been writing to .stdin in an uncontrolled fashion. + self.stdin.flush() + if input: + write_set.append(self.stdin) + else: + self.stdin.close() + if self.stdout: + read_set.append(self.stdout) + stdout = [] + if self.stderr: + read_set.append(self.stderr) + stderr = [] + + input_offset = 0 + while read_set or write_set: + rlist, wlist, xlist = select.select(read_set, write_set, []) + + if self.stdin in wlist: + # When select has indicated that the file is writable, + # we can write up to PIPE_BUF bytes without risk + # blocking. POSIX defines PIPE_BUF >= 512 + bytes_written = os.write(self.stdin.fileno(), buffer(input, input_offset, 512)) + input_offset = input_offset + bytes_written + if input_offset >= len(input): + self.stdin.close() + write_set.remove(self.stdin) + + if self.stdout in rlist: + data = os.read(self.stdout.fileno(), 1024) + if data == "": + self.stdout.close() + read_set.remove(self.stdout) + stdout.append(data) + + if self.stderr in rlist: + data = os.read(self.stderr.fileno(), 1024) + if data == "": + self.stderr.close() + read_set.remove(self.stderr) + stderr.append(data) + + # All data exchanged. Translate lists into strings. + if stdout is not None: + stdout = string.join(stdout, '') + if stderr is not None: + stderr = string.join(stderr, '') + + # Translate newlines, if requested. We cannot let the file + # object do the translation: It is based on stdio, which is + # impossible to combine with select (unless forcing no + # buffering). + if self.universal_newlines and hasattr(file, 'newlines'): + if stdout: + stdout = self._translate_newlines(stdout) + if stderr: + stderr = self._translate_newlines(stderr) + + self.wait() + return (stdout, stderr) + + +def _demo_posix(): + # + # Example 1: Simple redirection: Get process list + # + plist = Popen(["ps"], stdout=PIPE).communicate()[0] + print "Process list:" + print plist + + # + # Example 2: Change uid before executing child + # + if os.getuid() == 0: + p = Popen(["id"], preexec_fn=lambda: os.setuid(100)) + p.wait() + + # + # Example 3: Connecting several subprocesses + # + print "Looking for 'hda'..." + p1 = Popen(["dmesg"], stdout=PIPE) + p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE) + print repr(p2.communicate()[0]) + + # + # Example 4: Catch execution error + # + print + print "Trying a weird file..." + try: + print Popen(["/this/path/does/not/exist"]).communicate() + except OSError, e: + if e.errno == errno.ENOENT: + print "The file didn't exist. I thought so..." + print "Child traceback:" + print e.child_traceback + else: + print "Error", e.errno + else: + sys.stderr.write( "Gosh. No error.\n" ) + + +def _demo_windows(): + # + # Example 1: Connecting several subprocesses + # + print "Looking for 'PROMPT' in set output..." + p1 = Popen("set", stdout=PIPE, shell=True) + p2 = Popen('find "PROMPT"', stdin=p1.stdout, stdout=PIPE) + print repr(p2.communicate()[0]) + + # + # Example 2: Simple execution of program + # + print "Executing calc..." + p = Popen("calc") + p.wait() + + +if __name__ == "__main__": + if mswindows: + _demo_windows() + else: + _demo_posix() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/compat/_scons_textwrap.py b/engine/SCons/compat/_scons_textwrap.py new file mode 100644 index 0000000..81781af --- /dev/null +++ b/engine/SCons/compat/_scons_textwrap.py @@ -0,0 +1,382 @@ +"""Text wrapping and filling. +""" + +# Copyright (C) 1999-2001 Gregory P. Ward. +# Copyright (C) 2002, 2003 Python Software Foundation. +# Written by Greg Ward <gward@python.net> + +__revision__ = "$Id: textwrap.py,v 1.32.8.2 2004/05/13 01:48:15 gward Exp $" + +import string, re + +try: + unicode +except NameError: + class unicode: + pass + +# Do the right thing with boolean values for all known Python versions +# (so this module can be copied to projects that don't depend on Python +# 2.3, e.g. Optik and Docutils). +try: + True, False +except NameError: + (True, False) = (1, 0) + +__all__ = ['TextWrapper', 'wrap', 'fill'] + +# Hardcode the recognized whitespace characters to the US-ASCII +# whitespace characters. The main reason for doing this is that in +# ISO-8859-1, 0xa0 is non-breaking whitespace, so in certain locales +# that character winds up in string.whitespace. Respecting +# string.whitespace in those cases would 1) make textwrap treat 0xa0 the +# same as any other whitespace char, which is clearly wrong (it's a +# *non-breaking* space), 2) possibly cause problems with Unicode, +# since 0xa0 is not in range(128). +_whitespace = '\t\n\x0b\x0c\r ' + +class TextWrapper: + """ + Object for wrapping/filling text. The public interface consists of + the wrap() and fill() methods; the other methods are just there for + subclasses to override in order to tweak the default behaviour. + If you want to completely replace the main wrapping algorithm, + you'll probably have to override _wrap_chunks(). + + Several instance attributes control various aspects of wrapping: + width (default: 70) + the maximum width of wrapped lines (unless break_long_words + is false) + initial_indent (default: "") + string that will be prepended to the first line of wrapped + output. Counts towards the line's width. + subsequent_indent (default: "") + string that will be prepended to all lines save the first + of wrapped output; also counts towards each line's width. + expand_tabs (default: true) + Expand tabs in input text to spaces before further processing. + Each tab will become 1 .. 8 spaces, depending on its position in + its line. If false, each tab is treated as a single character. + replace_whitespace (default: true) + Replace all whitespace characters in the input text by spaces + after tab expansion. Note that if expand_tabs is false and + replace_whitespace is true, every tab will be converted to a + single space! + fix_sentence_endings (default: false) + Ensure that sentence-ending punctuation is always followed + by two spaces. Off by default because the algorithm is + (unavoidably) imperfect. + break_long_words (default: true) + Break words longer than 'width'. If false, those words will not + be broken, and some lines might be longer than 'width'. + """ + + whitespace_trans = string.maketrans(_whitespace, ' ' * len(_whitespace)) + + unicode_whitespace_trans = {} + try: + uspace = eval("ord(u' ')") + except SyntaxError: + # Python1.5 doesn't understand u'' syntax, in which case we + # won't actually use the unicode translation below, so it + # doesn't matter what value we put in the table. + uspace = ord(' ') + for x in map(ord, _whitespace): + unicode_whitespace_trans[x] = uspace + + # This funky little regex is just the trick for splitting + # text up into word-wrappable chunks. E.g. + # "Hello there -- you goof-ball, use the -b option!" + # splits into + # Hello/ /there/ /--/ /you/ /goof-/ball,/ /use/ /the/ /-b/ /option! + # (after stripping out empty strings). + try: + wordsep_re = re.compile(r'(\s+|' # any whitespace + r'[^\s\w]*\w{2,}-(?=\w{2,})|' # hyphenated words + r'(?<=[\w\!\"\'\&\.\,\?])-{2,}(?=\w))') # em-dash + except re.error: + # Pre-2.0 Python versions don't have the (?<= negative look-behind + # assertion. It mostly doesn't matter for the simple input + # SCons is going to give it, so just leave it out. + wordsep_re = re.compile(r'(\s+|' # any whitespace + r'-*\w{2,}-(?=\w{2,}))') # hyphenated words + + # XXX will there be a locale-or-charset-aware version of + # string.lowercase in 2.3? + sentence_end_re = re.compile(r'[%s]' # lowercase letter + r'[\.\!\?]' # sentence-ending punct. + r'[\"\']?' # optional end-of-quote + % string.lowercase) + + + def __init__(self, + width=70, + initial_indent="", + subsequent_indent="", + expand_tabs=True, + replace_whitespace=True, + fix_sentence_endings=False, + break_long_words=True): + self.width = width + self.initial_indent = initial_indent + self.subsequent_indent = subsequent_indent + self.expand_tabs = expand_tabs + self.replace_whitespace = replace_whitespace + self.fix_sentence_endings = fix_sentence_endings + self.break_long_words = break_long_words + + + # -- Private methods ----------------------------------------------- + # (possibly useful for subclasses to override) + + def _munge_whitespace(self, text): + """_munge_whitespace(text : string) -> string + + Munge whitespace in text: expand tabs and convert all other + whitespace characters to spaces. Eg. " foo\tbar\n\nbaz" + becomes " foo bar baz". + """ + if self.expand_tabs: + text = string.expandtabs(text) + if self.replace_whitespace: + if type(text) == type(''): + text = string.translate(text, self.whitespace_trans) + elif isinstance(text, unicode): + text = string.translate(text, self.unicode_whitespace_trans) + return text + + + def _split(self, text): + """_split(text : string) -> [string] + + Split the text to wrap into indivisible chunks. Chunks are + not quite the same as words; see wrap_chunks() for full + details. As an example, the text + Look, goof-ball -- use the -b option! + breaks into the following chunks: + 'Look,', ' ', 'goof-', 'ball', ' ', '--', ' ', + 'use', ' ', 'the', ' ', '-b', ' ', 'option!' + """ + chunks = self.wordsep_re.split(text) + chunks = filter(None, chunks) + return chunks + + def _fix_sentence_endings(self, chunks): + """_fix_sentence_endings(chunks : [string]) + + Correct for sentence endings buried in 'chunks'. Eg. when the + original text contains "... foo.\nBar ...", munge_whitespace() + and split() will convert that to [..., "foo.", " ", "Bar", ...] + which has one too few spaces; this method simply changes the one + space to two. + """ + i = 0 + pat = self.sentence_end_re + while i < len(chunks)-1: + if chunks[i+1] == " " and pat.search(chunks[i]): + chunks[i+1] = " " + i = i + 2 + else: + i = i + 1 + + def _handle_long_word(self, chunks, cur_line, cur_len, width): + """_handle_long_word(chunks : [string], + cur_line : [string], + cur_len : int, width : int) + + Handle a chunk of text (most likely a word, not whitespace) that + is too long to fit in any line. + """ + space_left = max(width - cur_len, 1) + + # If we're allowed to break long words, then do so: put as much + # of the next chunk onto the current line as will fit. + if self.break_long_words: + cur_line.append(chunks[0][0:space_left]) + chunks[0] = chunks[0][space_left:] + + # Otherwise, we have to preserve the long word intact. Only add + # it to the current line if there's nothing already there -- + # that minimizes how much we violate the width constraint. + elif not cur_line: + cur_line.append(chunks.pop(0)) + + # If we're not allowed to break long words, and there's already + # text on the current line, do nothing. Next time through the + # main loop of _wrap_chunks(), we'll wind up here again, but + # cur_len will be zero, so the next line will be entirely + # devoted to the long word that we can't handle right now. + + def _wrap_chunks(self, chunks): + """_wrap_chunks(chunks : [string]) -> [string] + + Wrap a sequence of text chunks and return a list of lines of + length 'self.width' or less. (If 'break_long_words' is false, + some lines may be longer than this.) Chunks correspond roughly + to words and the whitespace between them: each chunk is + indivisible (modulo 'break_long_words'), but a line break can + come between any two chunks. Chunks should not have internal + whitespace; ie. a chunk is either all whitespace or a "word". + Whitespace chunks will be removed from the beginning and end of + lines, but apart from that whitespace is preserved. + """ + lines = [] + if self.width <= 0: + raise ValueError("invalid width %r (must be > 0)" % self.width) + + while chunks: + + # Start the list of chunks that will make up the current line. + # cur_len is just the length of all the chunks in cur_line. + cur_line = [] + cur_len = 0 + + # Figure out which static string will prefix this line. + if lines: + indent = self.subsequent_indent + else: + indent = self.initial_indent + + # Maximum width for this line. + width = self.width - len(indent) + + # First chunk on line is whitespace -- drop it, unless this + # is the very beginning of the text (ie. no lines started yet). + if string.strip(chunks[0]) == '' and lines: + del chunks[0] + + while chunks: + l = len(chunks[0]) + + # Can at least squeeze this chunk onto the current line. + if cur_len + l <= width: + cur_line.append(chunks.pop(0)) + cur_len = cur_len + l + + # Nope, this line is full. + else: + break + + # The current line is full, and the next chunk is too big to + # fit on *any* line (not just this one). + if chunks and len(chunks[0]) > width: + self._handle_long_word(chunks, cur_line, cur_len, width) + + # If the last chunk on this line is all whitespace, drop it. + if cur_line and string.strip(cur_line[-1]) == '': + del cur_line[-1] + + # Convert current line back to a string and store it in list + # of all lines (return value). + if cur_line: + lines.append(indent + string.join(cur_line, '')) + + return lines + + + # -- Public interface ---------------------------------------------- + + def wrap(self, text): + """wrap(text : string) -> [string] + + Reformat the single paragraph in 'text' so it fits in lines of + no more than 'self.width' columns, and return a list of wrapped + lines. Tabs in 'text' are expanded with string.expandtabs(), + and all other whitespace characters (including newline) are + converted to space. + """ + text = self._munge_whitespace(text) + indent = self.initial_indent + chunks = self._split(text) + if self.fix_sentence_endings: + self._fix_sentence_endings(chunks) + return self._wrap_chunks(chunks) + + def fill(self, text): + """fill(text : string) -> string + + Reformat the single paragraph in 'text' to fit in lines of no + more than 'self.width' columns, and return a new string + containing the entire wrapped paragraph. + """ + return string.join(self.wrap(text), "\n") + + +# -- Convenience interface --------------------------------------------- + +def wrap(text, width=70, **kwargs): + """Wrap a single paragraph of text, returning a list of wrapped lines. + + Reformat the single paragraph in 'text' so it fits in lines of no + more than 'width' columns, and return a list of wrapped lines. By + default, tabs in 'text' are expanded with string.expandtabs(), and + all other whitespace characters (including newline) are converted to + space. See TextWrapper class for available keyword args to customize + wrapping behaviour. + """ + kw = kwargs.copy() + kw['width'] = width + w = apply(TextWrapper, (), kw) + return w.wrap(text) + +def fill(text, width=70, **kwargs): + """Fill a single paragraph of text, returning a new string. + + Reformat the single paragraph in 'text' to fit in lines of no more + than 'width' columns, and return a new string containing the entire + wrapped paragraph. As with wrap(), tabs are expanded and other + whitespace characters converted to space. See TextWrapper class for + available keyword args to customize wrapping behaviour. + """ + kw = kwargs.copy() + kw['width'] = width + w = apply(TextWrapper, (), kw) + return w.fill(text) + + +# -- Loosely related functionality ------------------------------------- + +def dedent(text): + """dedent(text : string) -> string + + Remove any whitespace than can be uniformly removed from the left + of every line in `text`. + + This can be used e.g. to make triple-quoted strings line up with + the left edge of screen/whatever, while still presenting it in the + source code in indented form. + + For example: + + def test(): + # end first line with \ to avoid the empty line! + s = '''\ + hello + world + ''' + print repr(s) # prints ' hello\n world\n ' + print repr(dedent(s)) # prints 'hello\n world\n' + """ + lines = text.expandtabs().split('\n') + margin = None + for line in lines: + content = line.lstrip() + if not content: + continue + indent = len(line) - len(content) + if margin is None: + margin = indent + else: + margin = min(margin, indent) + + if margin is not None and margin > 0: + for i in range(len(lines)): + lines[i] = lines[i][margin:] + + return string.join(lines, '\n') + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/compat/builtins.py b/engine/SCons/compat/builtins.py new file mode 100644 index 0000000..7ce626d --- /dev/null +++ b/engine/SCons/compat/builtins.py @@ -0,0 +1,187 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +# Portions of the following are derived from the compat.py file in +# Twisted, under the following copyright: +# +# Copyright (c) 2001-2004 Twisted Matrix Laboratories + +__doc__ = """ +Compatibility idioms for __builtin__ names + +This module adds names to the __builtin__ module for things that we want +to use in SCons but which don't show up until later Python versions than +the earliest ones we support. + +This module checks for the following __builtin__ names: + + all() + any() + bool() + dict() + True + False + zip() + +Implementations of functions are *NOT* guaranteed to be fully compliant +with these functions in later versions of Python. We are only concerned +with adding functionality that we actually use in SCons, so be wary +if you lift this code for other uses. (That said, making these more +nearly the same as later, official versions is still a desirable goal, +we just don't need to be obsessive about it.) + +If you're looking at this with pydoc and various names don't show up in +the FUNCTIONS or DATA output, that means those names are already built in +to this version of Python and we don't need to add them from this module. +""" + +__revision__ = "src/engine/SCons/compat/builtins.py 4577 2009/12/27 19:43:56 scons" + +import __builtin__ + +try: + all +except NameError: + # Pre-2.5 Python has no all() function. + def all(iterable): + """ + Returns True if all elements of the iterable are true. + """ + for element in iterable: + if not element: + return False + return True + __builtin__.all = all + all = all + +try: + any +except NameError: + # Pre-2.5 Python has no any() function. + def any(iterable): + """ + Returns True if any element of the iterable is true. + """ + for element in iterable: + if element: + return True + return False + __builtin__.any = any + any = any + +try: + bool +except NameError: + # Pre-2.2 Python has no bool() function. + def bool(value): + """Demote a value to 0 or 1, depending on its truth value. + + This is not to be confused with types.BooleanType, which is + way too hard to duplicate in early Python versions to be + worth the trouble. + """ + return not not value + __builtin__.bool = bool + bool = bool + +try: + dict +except NameError: + # Pre-2.2 Python has no dict() keyword. + def dict(seq=[], **kwargs): + """ + New dictionary initialization. + """ + d = {} + for k, v in seq: + d[k] = v + d.update(kwargs) + return d + __builtin__.dict = dict + +try: + False +except NameError: + # Pre-2.2 Python has no False keyword. + __builtin__.False = not 1 + # Assign to False in this module namespace so it shows up in pydoc output. + False = False + +try: + True +except NameError: + # Pre-2.2 Python has no True keyword. + __builtin__.True = not 0 + # Assign to True in this module namespace so it shows up in pydoc output. + True = True + +try: + file +except NameError: + # Pre-2.2 Python has no file() function. + __builtin__.file = open + +# +try: + zip +except NameError: + # Pre-2.2 Python has no zip() function. + def zip(*lists): + """ + Emulates the behavior we need from the built-in zip() function + added in Python 2.2. + + Returns a list of tuples, where each tuple contains the i-th + element rom each of the argument sequences. The returned + list is truncated in length to the length of the shortest + argument sequence. + """ + result = [] + for i in xrange(min(map(len, lists))): + result.append(tuple(map(lambda l, i=i: l[i], lists))) + return result + __builtin__.zip = zip + + + +#if sys.version_info[:3] in ((2, 2, 0), (2, 2, 1)): +# def lstrip(s, c=string.whitespace): +# while s and s[0] in c: +# s = s[1:] +# return s +# def rstrip(s, c=string.whitespace): +# while s and s[-1] in c: +# s = s[:-1] +# return s +# def strip(s, c=string.whitespace, l=lstrip, r=rstrip): +# return l(r(s, c), c) +# +# object.__setattr__(str, 'lstrip', lstrip) +# object.__setattr__(str, 'rstrip', rstrip) +# object.__setattr__(str, 'strip', strip) + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/cpp.py b/engine/SCons/cpp.py new file mode 100644 index 0000000..da72ad9 --- /dev/null +++ b/engine/SCons/cpp.py @@ -0,0 +1,598 @@ +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/cpp.py 4577 2009/12/27 19:43:56 scons" + +__doc__ = """ +SCons C Pre-Processor module +""" + +# TODO(1.5): remove this import +# This module doesn't use anything from SCons by name, but we import SCons +# here to pull in zip() from the SCons.compat layer for early Pythons. +import SCons + +import os +import re +import string + +# +# First "subsystem" of regular expressions that we set up: +# +# Stuff to turn the C preprocessor directives in a file's contents into +# a list of tuples that we can process easily. +# + +# A table of regular expressions that fetch the arguments from the rest of +# a C preprocessor line. Different directives have different arguments +# that we want to fetch, using the regular expressions to which the lists +# of preprocessor directives map. +cpp_lines_dict = { + # Fetch the rest of a #if/#elif/#ifdef/#ifndef as one argument, + # separated from the keyword by white space. + ('if', 'elif', 'ifdef', 'ifndef',) + : '\s+(.+)', + + # Fetch the rest of a #import/#include/#include_next line as one + # argument, with white space optional. + ('import', 'include', 'include_next',) + : '\s*(.+)', + + # We don't care what comes after a #else or #endif line. + ('else', 'endif',) : '', + + # Fetch three arguments from a #define line: + # 1) The #defined keyword. + # 2) The optional parentheses and arguments (if it's a function-like + # macro, '' if it's not). + # 3) The expansion value. + ('define',) : '\s+([_A-Za-z][_A-Za-z0-9_]*)(\([^)]*\))?\s*(.*)', + + # Fetch the #undefed keyword from a #undef line. + ('undef',) : '\s+([_A-Za-z][A-Za-z0-9_]*)', +} + +# Create a table that maps each individual C preprocessor directive to +# the corresponding compiled regular expression that fetches the arguments +# we care about. +Table = {} +for op_list, expr in cpp_lines_dict.items(): + e = re.compile(expr) + for op in op_list: + Table[op] = e +del e +del op +del op_list + +# Create a list of the expressions we'll use to match all of the +# preprocessor directives. These are the same as the directives +# themselves *except* that we must use a negative lookahead assertion +# when matching "if" so it doesn't match the "if" in "ifdef." +override = { + 'if' : 'if(?!def)', +} +l = map(lambda x, o=override: o.get(x, x), Table.keys()) + + +# Turn the list of expressions into one big honkin' regular expression +# that will match all the preprocessor lines at once. This will return +# a list of tuples, one for each preprocessor line. The preprocessor +# directive will be the first element in each tuple, and the rest of +# the line will be the second element. +e = '^\s*#\s*(' + string.join(l, '|') + ')(.*)$' + +# And last but not least, compile the expression. +CPP_Expression = re.compile(e, re.M) + + + + +# +# Second "subsystem" of regular expressions that we set up: +# +# Stuff to translate a C preprocessor expression (as found on a #if or +# #elif line) into an equivalent Python expression that we can eval(). +# + +# A dictionary that maps the C representation of Boolean operators +# to their Python equivalents. +CPP_to_Python_Ops_Dict = { + '!' : ' not ', + '!=' : ' != ', + '&&' : ' and ', + '||' : ' or ', + '?' : ' and ', + ':' : ' or ', + '\r' : '', +} + +CPP_to_Python_Ops_Sub = lambda m, d=CPP_to_Python_Ops_Dict: d[m.group(0)] + +# We have to sort the keys by length so that longer expressions +# come *before* shorter expressions--in particular, "!=" must +# come before "!" in the alternation. Without this, the Python +# re module, as late as version 2.2.2, empirically matches the +# "!" in "!=" first, instead of finding the longest match. +# What's up with that? +l = CPP_to_Python_Ops_Dict.keys() +l.sort(lambda a, b: cmp(len(b), len(a))) + +# Turn the list of keys into one regular expression that will allow us +# to substitute all of the operators at once. +expr = string.join(map(re.escape, l), '|') + +# ...and compile the expression. +CPP_to_Python_Ops_Expression = re.compile(expr) + +# A separate list of expressions to be evaluated and substituted +# sequentially, not all at once. +CPP_to_Python_Eval_List = [ + ['defined\s+(\w+)', '__dict__.has_key("\\1")'], + ['defined\s*\((\w+)\)', '__dict__.has_key("\\1")'], + ['/\*.*\*/', ''], + ['/\*.*', ''], + ['//.*', ''], + ['(0x[0-9A-Fa-f]*)[UL]+', '\\1L'], +] + +# Replace the string representations of the regular expressions in the +# list with compiled versions. +for l in CPP_to_Python_Eval_List: + l[0] = re.compile(l[0]) + +# Wrap up all of the above into a handy function. +def CPP_to_Python(s): + """ + Converts a C pre-processor expression into an equivalent + Python expression that can be evaluated. + """ + s = CPP_to_Python_Ops_Expression.sub(CPP_to_Python_Ops_Sub, s) + for expr, repl in CPP_to_Python_Eval_List: + s = expr.sub(repl, s) + return s + + + +del expr +del l +del override + + + +class FunctionEvaluator: + """ + Handles delayed evaluation of a #define function call. + """ + def __init__(self, name, args, expansion): + """ + Squirrels away the arguments and expansion value of a #define + macro function for later evaluation when we must actually expand + a value that uses it. + """ + self.name = name + self.args = function_arg_separator.split(args) + try: + expansion = string.split(expansion, '##') + except (AttributeError, TypeError): + # Python 1.5 throws TypeError if "expansion" isn't a string, + # later versions throw AttributeError. + pass + self.expansion = expansion + def __call__(self, *values): + """ + Evaluates the expansion of a #define macro function called + with the specified values. + """ + if len(self.args) != len(values): + raise ValueError, "Incorrect number of arguments to `%s'" % self.name + # Create a dictionary that maps the macro arguments to the + # corresponding values in this "call." We'll use this when we + # eval() the expansion so that arguments will get expanded to + # the right values. + locals = {} + for k, v in zip(self.args, values): + locals[k] = v + + parts = [] + for s in self.expansion: + if not s in self.args: + s = repr(s) + parts.append(s) + statement = string.join(parts, ' + ') + + return eval(statement, globals(), locals) + + + +# Find line continuations. +line_continuations = re.compile('\\\\\r?\n') + +# Search for a "function call" macro on an expansion. Returns the +# two-tuple of the "function" name itself, and a string containing the +# arguments within the call parentheses. +function_name = re.compile('(\S+)\(([^)]*)\)') + +# Split a string containing comma-separated function call arguments into +# the separate arguments. +function_arg_separator = re.compile(',\s*') + + + +class PreProcessor: + """ + The main workhorse class for handling C pre-processing. + """ + def __init__(self, current=os.curdir, cpppath=(), dict={}, all=0): + global Table + + cpppath = tuple(cpppath) + + self.searchpath = { + '"' : (current,) + cpppath, + '<' : cpppath + (current,), + } + + # Initialize our C preprocessor namespace for tracking the + # values of #defined keywords. We use this namespace to look + # for keywords on #ifdef/#ifndef lines, and to eval() the + # expressions on #if/#elif lines (after massaging them from C to + # Python). + self.cpp_namespace = dict.copy() + self.cpp_namespace['__dict__'] = self.cpp_namespace + + if all: + self.do_include = self.all_include + + # For efficiency, a dispatch table maps each C preprocessor + # directive (#if, #define, etc.) to the method that should be + # called when we see it. We accomodate state changes (#if, + # #ifdef, #ifndef) by pushing the current dispatch table on a + # stack and changing what method gets called for each relevant + # directive we might see next at this level (#else, #elif). + # #endif will simply pop the stack. + d = { + 'scons_current_file' : self.scons_current_file + } + for op in Table.keys(): + d[op] = getattr(self, 'do_' + op) + self.default_table = d + + # Controlling methods. + + def tupleize(self, contents): + """ + Turns the contents of a file into a list of easily-processed + tuples describing the CPP lines in the file. + + The first element of each tuple is the line's preprocessor + directive (#if, #include, #define, etc., minus the initial '#'). + The remaining elements are specific to the type of directive, as + pulled apart by the regular expression. + """ + global CPP_Expression, Table + contents = line_continuations.sub('', contents) + cpp_tuples = CPP_Expression.findall(contents) + return map(lambda m, t=Table: + (m[0],) + t[m[0]].match(m[1]).groups(), + cpp_tuples) + + def __call__(self, file): + """ + Pre-processes a file. + + This is the main public entry point. + """ + self.current_file = file + return self.process_contents(self.read_file(file), file) + + def process_contents(self, contents, fname=None): + """ + Pre-processes a file contents. + + This is the main internal entry point. + """ + self.stack = [] + self.dispatch_table = self.default_table.copy() + self.current_file = fname + self.tuples = self.tupleize(contents) + + self.initialize_result(fname) + while self.tuples: + t = self.tuples.pop(0) + # Uncomment to see the list of tuples being processed (e.g., + # to validate the CPP lines are being translated correctly). + #print t + self.dispatch_table[t[0]](t) + return self.finalize_result(fname) + + # Dispatch table stack manipulation methods. + + def save(self): + """ + Pushes the current dispatch table on the stack and re-initializes + the current dispatch table to the default. + """ + self.stack.append(self.dispatch_table) + self.dispatch_table = self.default_table.copy() + + def restore(self): + """ + Pops the previous dispatch table off the stack and makes it the + current one. + """ + try: self.dispatch_table = self.stack.pop() + except IndexError: pass + + # Utility methods. + + def do_nothing(self, t): + """ + Null method for when we explicitly want the action for a + specific preprocessor directive to do nothing. + """ + pass + + def scons_current_file(self, t): + self.current_file = t[1] + + def eval_expression(self, t): + """ + Evaluates a C preprocessor expression. + + This is done by converting it to a Python equivalent and + eval()ing it in the C preprocessor namespace we use to + track #define values. + """ + t = CPP_to_Python(string.join(t[1:])) + try: return eval(t, self.cpp_namespace) + except (NameError, TypeError): return 0 + + def initialize_result(self, fname): + self.result = [fname] + + def finalize_result(self, fname): + return self.result[1:] + + def find_include_file(self, t): + """ + Finds the #include file for a given preprocessor tuple. + """ + fname = t[2] + for d in self.searchpath[t[1]]: + if d == os.curdir: + f = fname + else: + f = os.path.join(d, fname) + if os.path.isfile(f): + return f + return None + + def read_file(self, file): + return open(file).read() + + # Start and stop processing include lines. + + def start_handling_includes(self, t=None): + """ + Causes the PreProcessor object to start processing #import, + #include and #include_next lines. + + This method will be called when a #if, #ifdef, #ifndef or #elif + evaluates True, or when we reach the #else in a #if, #ifdef, + #ifndef or #elif block where a condition already evaluated + False. + + """ + d = self.dispatch_table + d['import'] = self.do_import + d['include'] = self.do_include + d['include_next'] = self.do_include + + def stop_handling_includes(self, t=None): + """ + Causes the PreProcessor object to stop processing #import, + #include and #include_next lines. + + This method will be called when a #if, #ifdef, #ifndef or #elif + evaluates False, or when we reach the #else in a #if, #ifdef, + #ifndef or #elif block where a condition already evaluated True. + """ + d = self.dispatch_table + d['import'] = self.do_nothing + d['include'] = self.do_nothing + d['include_next'] = self.do_nothing + + # Default methods for handling all of the preprocessor directives. + # (Note that what actually gets called for a given directive at any + # point in time is really controlled by the dispatch_table.) + + def _do_if_else_condition(self, condition): + """ + Common logic for evaluating the conditions on #if, #ifdef and + #ifndef lines. + """ + self.save() + d = self.dispatch_table + if condition: + self.start_handling_includes() + d['elif'] = self.stop_handling_includes + d['else'] = self.stop_handling_includes + else: + self.stop_handling_includes() + d['elif'] = self.do_elif + d['else'] = self.start_handling_includes + + def do_ifdef(self, t): + """ + Default handling of a #ifdef line. + """ + self._do_if_else_condition(self.cpp_namespace.has_key(t[1])) + + def do_ifndef(self, t): + """ + Default handling of a #ifndef line. + """ + self._do_if_else_condition(not self.cpp_namespace.has_key(t[1])) + + def do_if(self, t): + """ + Default handling of a #if line. + """ + self._do_if_else_condition(self.eval_expression(t)) + + def do_elif(self, t): + """ + Default handling of a #elif line. + """ + d = self.dispatch_table + if self.eval_expression(t): + self.start_handling_includes() + d['elif'] = self.stop_handling_includes + d['else'] = self.stop_handling_includes + + def do_else(self, t): + """ + Default handling of a #else line. + """ + pass + + def do_endif(self, t): + """ + Default handling of a #endif line. + """ + self.restore() + + def do_define(self, t): + """ + Default handling of a #define line. + """ + _, name, args, expansion = t + try: + expansion = int(expansion) + except (TypeError, ValueError): + pass + if args: + evaluator = FunctionEvaluator(name, args[1:-1], expansion) + self.cpp_namespace[name] = evaluator + else: + self.cpp_namespace[name] = expansion + + def do_undef(self, t): + """ + Default handling of a #undef line. + """ + try: del self.cpp_namespace[t[1]] + except KeyError: pass + + def do_import(self, t): + """ + Default handling of a #import line. + """ + # XXX finish this -- maybe borrow/share logic from do_include()...? + pass + + def do_include(self, t): + """ + Default handling of a #include line. + """ + t = self.resolve_include(t) + include_file = self.find_include_file(t) + if include_file: + #print "include_file =", include_file + self.result.append(include_file) + contents = self.read_file(include_file) + new_tuples = [('scons_current_file', include_file)] + \ + self.tupleize(contents) + \ + [('scons_current_file', self.current_file)] + self.tuples[:] = new_tuples + self.tuples + + # Date: Tue, 22 Nov 2005 20:26:09 -0500 + # From: Stefan Seefeld <seefeld@sympatico.ca> + # + # By the way, #include_next is not the same as #include. The difference + # being that #include_next starts its search in the path following the + # path that let to the including file. In other words, if your system + # include paths are ['/foo', '/bar'], and you are looking at a header + # '/foo/baz.h', it might issue an '#include_next <baz.h>' which would + # correctly resolve to '/bar/baz.h' (if that exists), but *not* see + # '/foo/baz.h' again. See http://www.delorie.com/gnu/docs/gcc/cpp_11.html + # for more reasoning. + # + # I have no idea in what context 'import' might be used. + + # XXX is #include_next really the same as #include ? + do_include_next = do_include + + # Utility methods for handling resolution of include files. + + def resolve_include(self, t): + """Resolve a tuple-ized #include line. + + This handles recursive expansion of values without "" or <> + surrounding the name until an initial " or < is found, to handle + #include FILE + where FILE is a #define somewhere else. + """ + s = t[1] + while not s[0] in '<"': + #print "s =", s + try: + s = self.cpp_namespace[s] + except KeyError: + m = function_name.search(s) + s = self.cpp_namespace[m.group(1)] + if callable(s): + args = function_arg_separator.split(m.group(2)) + s = apply(s, args) + if not s: + return None + return (t[0], s[0], s[1:-1]) + + def all_include(self, t): + """ + """ + self.result.append(self.resolve_include(t)) + +class DumbPreProcessor(PreProcessor): + """A preprocessor that ignores all #if/#elif/#else/#endif directives + and just reports back *all* of the #include files (like the classic + SCons scanner did). + + This is functionally equivalent to using a regular expression to + find all of the #include lines, only slower. It exists mainly as + an example of how the main PreProcessor class can be sub-classed + to tailor its behavior. + """ + def __init__(self, *args, **kw): + apply(PreProcessor.__init__, (self,)+args, kw) + d = self.default_table + for func in ['if', 'elif', 'else', 'endif', 'ifdef', 'ifndef']: + d[func] = d[func] = self.do_nothing + +del __revision__ + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/dblite.py b/engine/SCons/dblite.py new file mode 100644 index 0000000..bcb2aa0 --- /dev/null +++ b/engine/SCons/dblite.py @@ -0,0 +1,248 @@ +# dblite.py module contributed by Ralf W. Grosse-Kunstleve. +# Extended for Unicode by Steven Knight. + +import cPickle +import time +import shutil +import os +import types +import __builtin__ + +keep_all_files = 00000 +ignore_corrupt_dbfiles = 0 + +def corruption_warning(filename): + print "Warning: Discarding corrupt database:", filename + +if hasattr(types, 'UnicodeType'): + def is_string(s): + t = type(s) + return t is types.StringType or t is types.UnicodeType +else: + def is_string(s): + return type(s) is types.StringType + +try: + unicode('a') +except NameError: + def unicode(s): return s + +dblite_suffix = '.dblite' +tmp_suffix = '.tmp' + +class dblite: + + # Squirrel away references to the functions in various modules + # that we'll use when our __del__() method calls our sync() method + # during shutdown. We might get destroyed when Python is in the midst + # of tearing down the different modules we import in an essentially + # arbitrary order, and some of the various modules's global attributes + # may already be wiped out from under us. + # + # See the discussion at: + # http://mail.python.org/pipermail/python-bugs-list/2003-March/016877.html + + _open = __builtin__.open + _cPickle_dump = cPickle.dump + _os_chmod = os.chmod + try: + _os_chown = os.chown + except AttributeError: + _os_chown = None + _os_rename = os.rename + _os_unlink = os.unlink + _shutil_copyfile = shutil.copyfile + _time_time = time.time + + def __init__(self, file_base_name, flag, mode): + assert flag in (None, "r", "w", "c", "n") + if (flag is None): flag = "r" + base, ext = os.path.splitext(file_base_name) + if ext == dblite_suffix: + # There's already a suffix on the file name, don't add one. + self._file_name = file_base_name + self._tmp_name = base + tmp_suffix + else: + self._file_name = file_base_name + dblite_suffix + self._tmp_name = file_base_name + tmp_suffix + self._flag = flag + self._mode = mode + self._dict = {} + self._needs_sync = 00000 + if self._os_chown is not None and (os.geteuid()==0 or os.getuid()==0): + # running as root; chown back to current owner/group when done + try: + statinfo = os.stat(self._file_name) + self._chown_to = statinfo.st_uid + self._chgrp_to = statinfo.st_gid + except OSError, e: + # db file doesn't exist yet. + # Check os.environ for SUDO_UID, use if set + self._chown_to = int(os.environ.get('SUDO_UID', -1)) + self._chgrp_to = int(os.environ.get('SUDO_GID', -1)) + else: + self._chown_to = -1 # don't chown + self._chgrp_to = -1 # don't chgrp + if (self._flag == "n"): + self._open(self._file_name, "wb", self._mode) + else: + try: + f = self._open(self._file_name, "rb") + except IOError, e: + if (self._flag != "c"): + raise e + self._open(self._file_name, "wb", self._mode) + else: + p = f.read() + if (len(p) > 0): + try: + self._dict = cPickle.loads(p) + except (cPickle.UnpicklingError, EOFError): + if (ignore_corrupt_dbfiles == 0): raise + if (ignore_corrupt_dbfiles == 1): + corruption_warning(self._file_name) + + def __del__(self): + if (self._needs_sync): + self.sync() + + def sync(self): + self._check_writable() + f = self._open(self._tmp_name, "wb", self._mode) + self._cPickle_dump(self._dict, f, 1) + f.close() + # Windows doesn't allow renaming if the file exists, so unlink + # it first, chmod'ing it to make sure we can do so. On UNIX, we + # may not be able to chmod the file if it's owned by someone else + # (e.g. from a previous run as root). We should still be able to + # unlink() the file if the directory's writable, though, so ignore + # any OSError exception thrown by the chmod() call. + try: self._os_chmod(self._file_name, 0777) + except OSError: pass + self._os_unlink(self._file_name) + self._os_rename(self._tmp_name, self._file_name) + if self._os_chown is not None and self._chown_to > 0: # don't chown to root or -1 + try: + self._os_chown(self._file_name, self._chown_to, self._chgrp_to) + except OSError: + pass + self._needs_sync = 00000 + if (keep_all_files): + self._shutil_copyfile( + self._file_name, + self._file_name + "_" + str(int(self._time_time()))) + + def _check_writable(self): + if (self._flag == "r"): + raise IOError("Read-only database: %s" % self._file_name) + + def __getitem__(self, key): + return self._dict[key] + + def __setitem__(self, key, value): + self._check_writable() + if (not is_string(key)): + raise TypeError, "key `%s' must be a string but is %s" % (key, type(key)) + if (not is_string(value)): + raise TypeError, "value `%s' must be a string but is %s" % (value, type(value)) + self._dict[key] = value + self._needs_sync = 0001 + + def keys(self): + return self._dict.keys() + + def has_key(self, key): + return key in self._dict + + def __contains__(self, key): + return key in self._dict + + def iterkeys(self): + return self._dict.iterkeys() + + __iter__ = iterkeys + + def __len__(self): + return len(self._dict) + +def open(file, flag=None, mode=0666): + return dblite(file, flag, mode) + +def _exercise(): + db = open("tmp", "n") + assert len(db) == 0 + db["foo"] = "bar" + assert db["foo"] == "bar" + db[unicode("ufoo")] = unicode("ubar") + assert db[unicode("ufoo")] == unicode("ubar") + db.sync() + db = open("tmp", "c") + assert len(db) == 2, len(db) + assert db["foo"] == "bar" + db["bar"] = "foo" + assert db["bar"] == "foo" + db[unicode("ubar")] = unicode("ufoo") + assert db[unicode("ubar")] == unicode("ufoo") + db.sync() + db = open("tmp", "r") + assert len(db) == 4, len(db) + assert db["foo"] == "bar" + assert db["bar"] == "foo" + assert db[unicode("ufoo")] == unicode("ubar") + assert db[unicode("ubar")] == unicode("ufoo") + try: + db.sync() + except IOError, e: + assert str(e) == "Read-only database: tmp.dblite" + else: + raise RuntimeError, "IOError expected." + db = open("tmp", "w") + assert len(db) == 4 + db["ping"] = "pong" + db.sync() + try: + db[(1,2)] = "tuple" + except TypeError, e: + assert str(e) == "key `(1, 2)' must be a string but is <type 'tuple'>", str(e) + else: + raise RuntimeError, "TypeError exception expected" + try: + db["list"] = [1,2] + except TypeError, e: + assert str(e) == "value `[1, 2]' must be a string but is <type 'list'>", str(e) + else: + raise RuntimeError, "TypeError exception expected" + db = open("tmp", "r") + assert len(db) == 5 + db = open("tmp", "n") + assert len(db) == 0 + _open("tmp.dblite", "w") + db = open("tmp", "r") + _open("tmp.dblite", "w").write("x") + try: + db = open("tmp", "r") + except cPickle.UnpicklingError: + pass + else: + raise RuntimeError, "cPickle exception expected." + global ignore_corrupt_dbfiles + ignore_corrupt_dbfiles = 2 + db = open("tmp", "r") + assert len(db) == 0 + os.unlink("tmp.dblite") + try: + db = open("tmp", "w") + except IOError, e: + assert str(e) == "[Errno 2] No such file or directory: 'tmp.dblite'", str(e) + else: + raise RuntimeError, "IOError expected." + print "OK" + +if (__name__ == "__main__"): + _exercise() + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/engine/SCons/exitfuncs.py b/engine/SCons/exitfuncs.py new file mode 100644 index 0000000..7f41071 --- /dev/null +++ b/engine/SCons/exitfuncs.py @@ -0,0 +1,77 @@ +"""SCons.exitfuncs + +Register functions which are executed when SCons exits for any reason. + +""" + +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__revision__ = "src/engine/SCons/exitfuncs.py 4577 2009/12/27 19:43:56 scons" + + + +_exithandlers = [] +def _run_exitfuncs(): + """run any registered exit functions + + _exithandlers is traversed in reverse order so functions are executed + last in, first out. + """ + + while _exithandlers: + func, targs, kargs = _exithandlers.pop() + apply(func, targs, kargs) + +def register(func, *targs, **kargs): + """register a function to be executed upon normal program termination + + func - function to be called at exit + targs - optional arguments to pass to func + kargs - optional keyword arguments to pass to func + """ + _exithandlers.append((func, targs, kargs)) + +import sys + +try: + x = sys.exitfunc + + # if x isn't our own exit func executive, assume it's another + # registered exit function - append it to our list... + if x != _run_exitfuncs: + register(x) + +except AttributeError: + pass + +# make our exit function get run by python when it exits: +sys.exitfunc = _run_exitfuncs + +del sys + +# Local Variables: +# tab-width:4 +# indent-tabs-mode:nil +# End: +# vim: set expandtab tabstop=4 shiftwidth=4: |