diff options
author | Jörg Frings-Fürst <debian@jff-webhosting.net> | 2019-08-11 12:17:57 +0200 |
---|---|---|
committer | Jörg Frings-Fürst <debian@jff-webhosting.net> | 2019-08-11 12:17:57 +0200 |
commit | c8ea3b672655ddab746a7aea5a50217057b02b9e (patch) | |
tree | 01a0e712f4cf32c7140cf1a4ae14db4da4202253 /engine/SCons/Node | |
parent | ca7be46fc0013fc037a045b6d4df73776461e821 (diff) | |
parent | f6c9bffb15e04ea412db4df22a3851448221b85a (diff) |
mergedebian/3.1.1-1
Diffstat (limited to 'engine/SCons/Node')
-rw-r--r-- | engine/SCons/Node/Alias.py | 4 | ||||
-rw-r--r-- | engine/SCons/Node/FS.py | 391 | ||||
-rw-r--r-- | engine/SCons/Node/Python.py | 17 | ||||
-rw-r--r-- | engine/SCons/Node/__init__.py | 122 |
4 files changed, 432 insertions, 102 deletions
diff --git a/engine/SCons/Node/Alias.py b/engine/SCons/Node/Alias.py index 78923b4..302a704 100644 --- a/engine/SCons/Node/Alias.py +++ b/engine/SCons/Node/Alias.py @@ -8,7 +8,7 @@ This creates a hash of global Aliases (dummy targets). """ # -# Copyright (c) 2001 - 2017 The SCons Foundation +# Copyright (c) 2001 - 2019 The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -30,7 +30,7 @@ This creates a hash of global Aliases (dummy targets). # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # -__revision__ = "src/engine/SCons/Node/Alias.py 74b2c53bc42290e911b334a6b44f187da698a668 2017/11/14 13:16:53 bdbaddog" +__revision__ = "src/engine/SCons/Node/Alias.py 72ae09dc35ac2626f8ff711d8c4b30b6138e08e3 2019-08-08 14:50:06 bdeegan" import collections diff --git a/engine/SCons/Node/FS.py b/engine/SCons/Node/FS.py index ecde942..0d903de 100644 --- a/engine/SCons/Node/FS.py +++ b/engine/SCons/Node/FS.py @@ -11,7 +11,7 @@ that can be used by scripts or modules looking for the canonical default. """ # -# Copyright (c) 2001 - 2017 The SCons Foundation +# Copyright (c) 2001 - 2019 The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -33,7 +33,7 @@ that can be used by scripts or modules looking for the canonical default. # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. from __future__ import print_function -__revision__ = "src/engine/SCons/Node/FS.py 74b2c53bc42290e911b334a6b44f187da698a668 2017/11/14 13:16:53 bdbaddog" +__revision__ = "src/engine/SCons/Node/FS.py 72ae09dc35ac2626f8ff711d8c4b30b6138e08e3 2019-08-08 14:50:06 bdeegan" import fnmatch import os @@ -43,6 +43,7 @@ import stat import sys import time import codecs +from itertools import chain import SCons.Action import SCons.Debug @@ -59,6 +60,8 @@ from SCons.Debug import Trace print_duplicate = 0 +MD5_TIMESTAMP_DEBUG = False + def sconsign_none(node): raise NotImplementedError @@ -74,6 +77,9 @@ def sconsign_dir(node): _sconsign_map = {0 : sconsign_none, 1 : sconsign_dir} +class FileBuildInfoFileToCsigMappingError(Exception): + pass + class EntryProxyAttributeError(AttributeError): """ An AttributeError subclass for recording and displaying the name @@ -132,7 +138,10 @@ def initialize_do_splitdrive(): global do_splitdrive global has_unc drive, path = os.path.splitdrive('X:/foo') - has_unc = hasattr(os.path, 'splitunc') + # splitunc is removed from python 3.7 and newer + # so we can also just test if splitdrive works with UNC + has_unc = (hasattr(os.path, 'splitunc') + or os.path.splitdrive(r'\\split\drive\test')[0] == r'\\split\drive') do_splitdrive = not not drive or has_unc @@ -272,7 +281,7 @@ def set_duplicate(duplicate): 'copy' : _copy_func } - if not duplicate in Valid_Duplicates: + if duplicate not in Valid_Duplicates: raise SCons.Errors.InternalError("The argument of set_duplicate " "should be in Valid_Duplicates") global Link_Funcs @@ -282,11 +291,13 @@ def set_duplicate(duplicate): Link_Funcs.append(link_dict[func]) def LinkFunc(target, source, env): - # Relative paths cause problems with symbolic links, so - # we use absolute paths, which may be a problem for people - # who want to move their soft-linked src-trees around. Those - # people should use the 'hard-copy' mode, softlinks cannot be - # used for that; at least I have no idea how ... + """ + Relative paths cause problems with symbolic links, so + we use absolute paths, which may be a problem for people + who want to move their soft-linked src-trees around. Those + people should use the 'hard-copy' mode, softlinks cannot be + used for that; at least I have no idea how ... + """ src = source[0].get_abspath() dest = target[0].get_abspath() dir, file = os.path.split(dest) @@ -328,7 +339,12 @@ Unlink = SCons.Action.Action(UnlinkFunc, None) def MkdirFunc(target, source, env): t = target[0] - if not t.exists(): + # This os.path.exists test looks redundant, but it's possible + # when using Install() to install multiple dirs outside the + # source tree to get a case where t.exists() is true but + # the path does already exist, so this prevents spurious + # build failures in that case. See test/Install/multi-dir. + if not t.exists() and not os.path.exists(t.get_abspath()): t.fs.mkdir(t.get_abspath()) return 0 @@ -463,7 +479,7 @@ class EntryProxy(SCons.Util.Proxy): return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_posix") def __get_windows_path(self): - """Return the path with \ as the path separator, + r"""Return the path with \ as the path separator, regardless of platform.""" if OS_SEP == '\\': return self @@ -514,7 +530,7 @@ class EntryProxy(SCons.Util.Proxy): except KeyError: try: attr = SCons.Util.Proxy.__getattr__(self, name) - except AttributeError as e: + except AttributeError: # Raise our own AttributeError subclass with an # overridden __str__() method that identifies the # name of the entry that caused the exception. @@ -682,10 +698,15 @@ class Base(SCons.Node.Node): @SCons.Memoize.CountMethodCall def stat(self): - try: return self._memo['stat'] - except KeyError: pass - try: result = self.fs.stat(self.get_abspath()) - except os.error: result = None + try: + return self._memo['stat'] + except KeyError: + pass + try: + result = self.fs.stat(self.get_abspath()) + except os.error: + result = None + self._memo['stat'] = result return result @@ -697,13 +718,17 @@ class Base(SCons.Node.Node): def getmtime(self): st = self.stat() - if st: return st[stat.ST_MTIME] - else: return None + if st: + return st[stat.ST_MTIME] + else: + return None def getsize(self): st = self.stat() - if st: return st[stat.ST_SIZE] - else: return None + if st: + return st[stat.ST_SIZE] + else: + return None def isdir(self): st = self.stat() @@ -1048,21 +1073,22 @@ _classEntry = Entry class LocalFS(object): - - # This class implements an abstraction layer for operations involving - # a local file system. Essentially, this wraps any function in - # the os, os.path or shutil modules that we use to actually go do - # anything with or to the local file system. - # - # Note that there's a very good chance we'll refactor this part of - # the architecture in some way as we really implement the interface(s) - # for remote file system Nodes. For example, the right architecture - # might be to have this be a subclass instead of a base class. - # Nevertheless, we're using this as a first step in that direction. - # - # We're not using chdir() yet because the calling subclass method - # needs to use os.chdir() directly to avoid recursion. Will we - # really need this one? + """ + This class implements an abstraction layer for operations involving + a local file system. Essentially, this wraps any function in + the os, os.path or shutil modules that we use to actually go do + anything with or to the local file system. + + Note that there's a very good chance we'll refactor this part of + the architecture in some way as we really implement the interface(s) + for remote file system Nodes. For example, the right architecture + might be to have this be a subclass instead of a base class. + Nevertheless, we're using this as a first step in that direction. + + We're not using chdir() yet because the calling subclass method + needs to use os.chdir() directly to avoid recursion. Will we + really need this one? + """ #def chdir(self, path): # return os.chdir(path) def chmod(self, path, mode): @@ -1389,10 +1415,10 @@ class FS(LocalFS): if not isinstance(d, SCons.Node.Node): d = self.Dir(d) self.Top.addRepository(d) - + def PyPackageDir(self, modulename): - """Locate the directory of a given python module name - + r"""Locate the directory of a given python module name + For example scons might resolve to Windows: C:\Python27\Lib\site-packages\scons-2.5.1 Linux: /usr/lib/scons @@ -1662,7 +1688,7 @@ class Dir(Base): return result def addRepository(self, dir): - if dir != self and not dir in self.repositories: + if dir != self and dir not in self.repositories: self.repositories.append(dir) dir._tpath = '.' self.__clearRepositoryCache() @@ -1702,7 +1728,7 @@ class Dir(Base): if self is other: result = '.' - elif not other in self._path_elements: + elif other not in self._path_elements: try: other_dir = other.get_dir() except AttributeError: @@ -2234,7 +2260,7 @@ class RootDir(Dir): this directory. """ - __slots__ = ['_lookupDict'] + __slots__ = ('_lookupDict', ) def __init__(self, drive, fs): if SCons.Debug.track_instances: logInstanceCreation(self, 'Node.FS.RootDir') @@ -2440,7 +2466,7 @@ class FileNodeInfo(SCons.Node.NodeInfoBase): """ state = getattr(self, '__dict__', {}).copy() for obj in type(self).mro(): - for name in getattr(obj,'__slots__',()): + for name in getattr(obj, '__slots__', ()): if hasattr(self, name): state[name] = getattr(self, name) @@ -2462,11 +2488,42 @@ class FileNodeInfo(SCons.Node.NodeInfoBase): if key not in ('__weakref__',): setattr(self, key, value) + def __eq__(self, other): + return self.csig == other.csig and self.timestamp == other.timestamp and self.size == other.size + + def __ne__(self, other): + return not self.__eq__(other) + class FileBuildInfo(SCons.Node.BuildInfoBase): - __slots__ = () + """ + This is info loaded from sconsign. + + Attributes unique to FileBuildInfo: + dependency_map : Caches file->csig mapping + for all dependencies. Currently this is only used when using + MD5-timestamp decider. + It's used to ensure that we copy the correct + csig from previous build to be written to .sconsign when current build + is done. Previously the matching of csig to file was strictly by order + they appeared in bdepends, bsources, or bimplicit, and so a change in order + or count of any of these could yield writing wrong csig, and then false positive + rebuilds + """ + __slots__ = ['dependency_map', ] current_version_id = 2 + def __setattr__(self, key, value): + + # If any attributes are changed in FileBuildInfo, we need to + # invalidate the cached map of file name to content signature + # heald in dependency_map. Currently only used with + # MD5-timestamp decider + if key != 'dependency_map' and hasattr(self, 'dependency_map'): + del self.dependency_map + + return super(FileBuildInfo, self).__setattr__(key, value) + def convert_to_sconsign(self): """ Converts this FileBuildInfo object for writing to a .sconsign file @@ -3225,46 +3282,232 @@ class File(Base): self._memo['changed'] = has_changed return has_changed - def changed_content(self, target, prev_ni): + def changed_content(self, target, prev_ni, repo_node=None): cur_csig = self.get_csig() try: return cur_csig != prev_ni.csig except AttributeError: return 1 - def changed_state(self, target, prev_ni): + def changed_state(self, target, prev_ni, repo_node=None): return self.state != SCons.Node.up_to_date - def changed_timestamp_then_content(self, target, prev_ni): - if not self.changed_timestamp_match(target, prev_ni): + + # Caching node -> string mapping for the below method + __dmap_cache = {} + __dmap_sig_cache = {} + + + def _build_dependency_map(self, binfo): + """ + Build mapping from file -> signature + + Args: + self - self + binfo - buildinfo from node being considered + + Returns: + dictionary of file->signature mappings + """ + + # For an "empty" binfo properties like bsources + # do not exist: check this to avoid exception. + if (len(binfo.bsourcesigs) + len(binfo.bdependsigs) + \ + len(binfo.bimplicitsigs)) == 0: + return {} + + binfo.dependency_map = { child:signature for child, signature in zip(chain(binfo.bsources, binfo.bdepends, binfo.bimplicit), + chain(binfo.bsourcesigs, binfo.bdependsigs, binfo.bimplicitsigs))} + + return binfo.dependency_map + + # @profile + def _add_strings_to_dependency_map(self, dmap): + """ + In the case comparing node objects isn't sufficient, we'll add the strings for the nodes to the dependency map + :return: + """ + + first_string = str(next(iter(dmap))) + + # print("DMAP:%s"%id(dmap)) + if first_string not in dmap: + string_dict = {str(child): signature for child, signature in dmap.items()} + dmap.update(string_dict) + return dmap + + def _get_previous_signatures(self, dmap): + """ + Return a list of corresponding csigs from previous + build in order of the node/files in children. + + Args: + self - self + dmap - Dictionary of file -> csig + + Returns: + List of csigs for provided list of children + """ + prev = [] + # MD5_TIMESTAMP_DEBUG = False + + if len(dmap) == 0: + if MD5_TIMESTAMP_DEBUG: print("Nothing dmap shortcutting") + return None + elif MD5_TIMESTAMP_DEBUG: print("len(dmap):%d"%len(dmap)) + + + # First try retrieving via Node + if MD5_TIMESTAMP_DEBUG: print("Checking if self is in map:%s id:%s type:%s"%(str(self), id(self), type(self))) + df = dmap.get(self, False) + if df: + return df + + # Now check if self's repository file is in map. + rf = self.rfile() + if MD5_TIMESTAMP_DEBUG: print("Checking if self.rfile is in map:%s id:%s type:%s"%(str(rf), id(rf), type(rf))) + rfm = dmap.get(rf, False) + if rfm: + return rfm + + # get default string for node and then also string swapping os.altsep for os.sep (/ for \) + c_strs = [str(self)] + + if os.altsep: + c_strs.append(c_strs[0].replace(os.sep, os.altsep)) + + # In some cases the dependency_maps' keys are already strings check. + # Check if either string is now in dmap. + for s in c_strs: + if MD5_TIMESTAMP_DEBUG: print("Checking if str(self) is in map :%s" % s) + df = dmap.get(s, False) + if df: + return df + + # Strings don't exist in map, add them and try again + # If there are no strings in this dmap, then add them. + # This may not be necessary, we could walk the nodes in the dmap and check each string + # rather than adding ALL the strings to dmap. In theory that would be n/2 vs 2n str() calls on node + # if not dmap.has_strings: + dmap = self._add_strings_to_dependency_map(dmap) + + # In some cases the dependency_maps' keys are already strings check. + # Check if either string is now in dmap. + for s in c_strs: + if MD5_TIMESTAMP_DEBUG: print("Checking if str(self) is in map (now with strings) :%s" % s) + df = dmap.get(s, False) + if df: + return df + + # Lastly use nodes get_path() to generate string and see if that's in dmap + if not df: + try: + # this should yield a path which matches what's in the sconsign + c_str = self.get_path() + if os.altsep: + c_str = c_str.replace(os.sep, os.altsep) + + if MD5_TIMESTAMP_DEBUG: print("Checking if self.get_path is in map (now with strings) :%s" % s) + + df = dmap.get(c_str, None) + + except AttributeError as e: + raise FileBuildInfoFileToCsigMappingError("No mapping from file name to content signature for :%s"%c_str) + + return df + + def changed_timestamp_then_content(self, target, prev_ni, node=None): + """ + Used when decider for file is Timestamp-MD5 + + NOTE: If the timestamp hasn't changed this will skip md5'ing the + file and just copy the prev_ni provided. If the prev_ni + is wrong. It will propagate it. + See: https://github.com/SCons/scons/issues/2980 + + Args: + self - dependency + target - target + prev_ni - The NodeInfo object loaded from previous builds .sconsign + node - Node instance. Check this node for file existence/timestamp + if specified. + + Returns: + Boolean - Indicates if node(File) has changed. + """ + + if node is None: + node = self + # Now get sconsign name -> csig map and then get proper prev_ni if possible + bi = node.get_stored_info().binfo + rebuilt = False + try: + dependency_map = bi.dependency_map + except AttributeError as e: + dependency_map = self._build_dependency_map(bi) + rebuilt = True + + if len(dependency_map) == 0: + # If there's no dependency map, there's no need to find the + # prev_ni as there aren't any + # shortcut the rest of the logic + if MD5_TIMESTAMP_DEBUG: print("Skipping checks len(dmap)=0") + + # We still need to get the current file's csig + # This should be slightly faster than calling self.changed_content(target, new_prev_ni) + self.get_csig() + return True + + new_prev_ni = self._get_previous_signatures(dependency_map) + new = self.changed_timestamp_match(target, new_prev_ni) + + if MD5_TIMESTAMP_DEBUG: + old = self.changed_timestamp_match(target, prev_ni) + + if old != new: + print("Mismatch self.changed_timestamp_match(%s, prev_ni) old:%s new:%s"%(str(target), old, new)) + new_prev_ni = self._get_previous_signatures(dependency_map) + + if not new: try: - self.get_ninfo().csig = prev_ni.csig + # NOTE: We're modifying the current node's csig in a query. + self.get_ninfo().csig = new_prev_ni.csig except AttributeError: pass return False - return self.changed_content(target, prev_ni) + return self.changed_content(target, new_prev_ni) - def changed_timestamp_newer(self, target, prev_ni): + def changed_timestamp_newer(self, target, prev_ni, repo_node=None): try: return self.get_timestamp() > target.get_timestamp() except AttributeError: return 1 - def changed_timestamp_match(self, target, prev_ni): + def changed_timestamp_match(self, target, prev_ni, repo_node=None): + """ + Return True if the timestamps don't match or if there is no previous timestamp + :param target: + :param prev_ni: Information about the node from the previous build + :return: + """ try: return self.get_timestamp() != prev_ni.timestamp except AttributeError: return 1 def is_up_to_date(self): + """Check for whether the Node is current + In all cases self is the target we're checking to see if it's up to date + """ + T = 0 if T: Trace('is_up_to_date(%s):' % self) if not self.exists(): if T: Trace(' not self.exists():') - # The file doesn't exist locally... + # The file (always a target) doesn't exist locally... r = self.rfile() if r != self: - # ...but there is one in a Repository... + # ...but there is one (always a target) in a Repository... if not self.changed(r): if T: Trace(' changed(%s):' % r) # ...and it's even up-to-date... @@ -3272,7 +3515,9 @@ class File(Base): # ...and they'd like a local copy. e = LocalCopy(self, r, None) if isinstance(e, SCons.Errors.BuildError): - raise + # Likely this should be re-raising exception e + # (which would be BuildError) + raise e SCons.Node.store_info_map[self.store_info](self) if T: Trace(' 1\n') return 1 @@ -3293,11 +3538,14 @@ class File(Base): result = self if not self.exists(): norm_name = _my_normcase(self.name) - for dir in self.dir.get_all_rdirs(): - try: node = dir.entries[norm_name] - except KeyError: node = dir.file_on_disk(self.name) + for repo_dir in self.dir.get_all_rdirs(): + try: + node = repo_dir.entries[norm_name] + except KeyError: + node = repo_dir.file_on_disk(self.name) + if node and node.exists() and \ - (isinstance(node, File) or isinstance(node, Entry) \ + (isinstance(node, File) or isinstance(node, Entry) or not node.is_derived()): result = node # Copy over our local attributes to the repository @@ -3317,6 +3565,28 @@ class File(Base): self._memo['rfile'] = result return result + def find_repo_file(self): + """ + For this node, find if there exists a corresponding file in one or more repositories + :return: list of corresponding files in repositories + """ + retvals = [] + + norm_name = _my_normcase(self.name) + for repo_dir in self.dir.get_all_rdirs(): + try: + node = repo_dir.entries[norm_name] + except KeyError: + node = repo_dir.file_on_disk(self.name) + + if node and node.exists() and \ + (isinstance(node, File) or isinstance(node, Entry) \ + or not node.is_derived()): + retvals.append(node) + + return retvals + + def rstr(self): return str(self.rfile()) @@ -3374,6 +3644,8 @@ class File(Base): because multiple targets built by the same action will all have the same build signature, and we have to differentiate them somehow. + + Signature should normally be string of hex digits. """ try: return self.cachesig @@ -3383,10 +3655,13 @@ class File(Base): # Collect signatures for all children children = self.children() sigs = [n.get_cachedir_csig() for n in children] + # Append this node's signature... sigs.append(self.get_contents_sig()) + # ...and it's path sigs.append(self.get_internal_path()) + # Merge this all into a single signature result = self.cachesig = SCons.Util.MD5collect(sigs) return result diff --git a/engine/SCons/Node/Python.py b/engine/SCons/Node/Python.py index f12e6ef..8726332 100644 --- a/engine/SCons/Node/Python.py +++ b/engine/SCons/Node/Python.py @@ -5,7 +5,7 @@ Python nodes. """ # -# Copyright (c) 2001 - 2017 The SCons Foundation +# Copyright (c) 2001 - 2019 The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -27,7 +27,7 @@ Python nodes. # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # -__revision__ = "src/engine/SCons/Node/Python.py 74b2c53bc42290e911b334a6b44f187da698a668 2017/11/14 13:16:53 bdbaddog" +__revision__ = "src/engine/SCons/Node/Python.py 72ae09dc35ac2626f8ff711d8c4b30b6138e08e3 2019-08-08 14:50:06 bdeegan" import SCons.Node @@ -137,6 +137,10 @@ class Value(SCons.Node.Node): return contents def get_contents(self): + """ + Get contents for signature calculations. + :return: bytes + """ text_contents = self.get_text_contents() try: return text_contents.encode() @@ -155,12 +159,17 @@ class Value(SCons.Node.Node): def get_csig(self, calc=None): """Because we're a Python value node and don't have a real timestamp, we get to ignore the calculator and just use the - value contents.""" + value contents. + + Returns string. Ideally string of hex digits. (Not bytes) + """ try: return self.ninfo.csig except AttributeError: pass - contents = self.get_contents() + + contents = self.get_text_contents() + self.get_ninfo().csig = contents return contents diff --git a/engine/SCons/Node/__init__.py b/engine/SCons/Node/__init__.py index 299673f..c1f8d89 100644 --- a/engine/SCons/Node/__init__.py +++ b/engine/SCons/Node/__init__.py @@ -22,7 +22,7 @@ be able to depend on any other type of "thing." from __future__ import print_function # -# Copyright (c) 2001 - 2017 The SCons Foundation +# Copyright (c) 2001 - 2019 The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -43,12 +43,18 @@ from __future__ import print_function # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -__revision__ = "src/engine/SCons/Node/__init__.py 74b2c53bc42290e911b334a6b44f187da698a668 2017/11/14 13:16:53 bdbaddog" +__revision__ = "src/engine/SCons/Node/__init__.py 72ae09dc35ac2626f8ff711d8c4b30b6138e08e3 2019-08-08 14:50:06 bdeegan" +import os import collections import copy from itertools import chain +try: + from itertools import zip_longest +except ImportError: + from itertools import izip_longest as zip_longest + import SCons.Debug from SCons.Debug import logInstanceCreation import SCons.Executor @@ -102,9 +108,9 @@ implicit_deps_changed = 0 # A variable that can be set to an interface-specific function be called # to annotate a Node with information about its creation. -def do_nothing(node): pass +def do_nothing_node(node): pass -Annotate = do_nothing +Annotate = do_nothing_node # Gets set to 'True' if we're running in interactive mode. Is # currently used to release parts of a target's info during @@ -139,6 +145,7 @@ def exists_entry(node): node.disambiguate() return _exists_map[node._func_exists](node) + def exists_file(node): # Duplicate from source path if we are set up to do this. if node.duplicate and not node.is_derived() and not node.linked: @@ -155,7 +162,7 @@ def exists_file(node): # The source file does not exist. Make sure no old # copy remains in the variant directory. if print_duplicate: - print("dup: no src for %s, unlinking old variant copy"%self) + print("dup: no src for %s, unlinking old variant copy" % node) if exists_base(node) or node.islink(): node.fs.unlink(node.get_internal_path()) # Return None explicitly because the Base.exists() call @@ -249,7 +256,7 @@ _target_from_source_map = {0 : target_from_source_none, # # First, the single decider functions # -def changed_since_last_build_node(node, target, prev_ni): +def changed_since_last_build_node(node, target, prev_ni, repo_node=None): """ Must be overridden in a specific subclass to return True if this @@ -269,27 +276,33 @@ def changed_since_last_build_node(node, target, prev_ni): """ raise NotImplementedError -def changed_since_last_build_alias(node, target, prev_ni): + +def changed_since_last_build_alias(node, target, prev_ni, repo_node=None): cur_csig = node.get_csig() try: return cur_csig != prev_ni.csig except AttributeError: return 1 -def changed_since_last_build_entry(node, target, prev_ni): + +def changed_since_last_build_entry(node, target, prev_ni, repo_node=None): node.disambiguate() - return _decider_map[node.changed_since_last_build](node, target, prev_ni) + return _decider_map[node.changed_since_last_build](node, target, prev_ni, repo_node) + -def changed_since_last_build_state_changed(node, target, prev_ni): - return (node.state != SCons.Node.up_to_date) +def changed_since_last_build_state_changed(node, target, prev_ni, repo_node=None): + return node.state != SCons.Node.up_to_date -def decide_source(node, target, prev_ni): - return target.get_build_env().decide_source(node, target, prev_ni) -def decide_target(node, target, prev_ni): - return target.get_build_env().decide_target(node, target, prev_ni) +def decide_source(node, target, prev_ni, repo_node=None): + return target.get_build_env().decide_source(node, target, prev_ni, repo_node) -def changed_since_last_build_python(node, target, prev_ni): + +def decide_target(node, target, prev_ni, repo_node=None): + return target.get_build_env().decide_target(node, target, prev_ni, repo_node) + + +def changed_since_last_build_python(node, target, prev_ni, repo_node=None): cur_csig = node.get_csig() try: return cur_csig != prev_ni.csig @@ -380,6 +393,7 @@ class NodeInfoBase(object): """ state = other.__getstate__() self.__setstate__(state) + def format(self, field_list=None, names=0): if field_list is None: try: @@ -505,6 +519,7 @@ class Node(object, with_metaclass(NoSlotsPyPy)): __slots__ = ['sources', 'sources_set', + 'target_peers', '_specific_sources', 'depends', 'depends_set', @@ -665,7 +680,7 @@ class Node(object, with_metaclass(NoSlotsPyPy)): executor.cleanup() def reset_executor(self): - "Remove cached executor; forces recompute when needed." + """Remove cached executor; forces recompute when needed.""" try: delattr(self, 'executor') except AttributeError: @@ -760,6 +775,25 @@ class Node(object, with_metaclass(NoSlotsPyPy)): for parent in self.waiting_parents: parent.implicit = None + # Handle issue where builder emits more than one target and + # the source file for the builder is generated. + # in that case only the first target was getting it's .implicit + # cleared when the source file is built (second scan). + # leaving only partial implicits from scan before source file is generated + # typically the compiler only. Then scanned files are appended + # This is persisted to sconsign and rebuild causes false rebuilds + # because the ordering of the implicit list then changes to what it + # should have been. + # This is at least the following bugs + # https://github.com/SCons/scons/issues/2811 + # https://jira.mongodb.org/browse/SERVER-33111 + try: + for peer in parent.target_peers: + peer.implicit = None + except AttributeError: + pass + + self.clear() if self.pseudo: @@ -1136,7 +1170,7 @@ class Node(object, with_metaclass(NoSlotsPyPy)): binfo.bactsig = SCons.Util.MD5signature(executor.get_contents()) if self._specific_sources: - sources = [ s for s in self.sources if not s in ignore_set] + sources = [s for s in self.sources if s not in ignore_set] else: sources = executor.get_unignored_sources(self, self.ignore) @@ -1145,13 +1179,17 @@ class Node(object, with_metaclass(NoSlotsPyPy)): binfo.bsources = [s for s in sources if s not in seen and not seen.add(s)] binfo.bsourcesigs = [s.get_ninfo() for s in binfo.bsources] + binfo.bdepends = [d for d in self.depends if d not in ignore_set] + binfo.bdependsigs = [d.get_ninfo() for d in self.depends] - binfo.bdepends = self.depends - binfo.bdependsigs = [d.get_ninfo() for d in self.depends if d not in ignore_set] - - binfo.bimplicit = self.implicit or [] - binfo.bimplicitsigs = [i.get_ninfo() for i in binfo.bimplicit if i not in ignore_set] - + # Because self.implicit is initialized to None (and not empty list []) + # we have to handle this case + if not self.implicit: + binfo.bimplicit = [] + binfo.bimplicitsigs = [] + else: + binfo.bimplicit = [i for i in self.implicit if i not in ignore_set] + binfo.bimplicitsigs = [i.get_ninfo() for i in binfo.bimplicit] return binfo @@ -1213,7 +1251,7 @@ class Node(object, with_metaclass(NoSlotsPyPy)): return _exists_map[self._func_exists](self) def rexists(self): - """Does this node exist locally or in a repositiory?""" + """Does this node exist locally or in a repository?""" # There are no repositories by default: return _rexists_map[self._func_rexists](self) @@ -1452,13 +1490,12 @@ class Node(object, with_metaclass(NoSlotsPyPy)): result = True for child, prev_ni in zip(children, then): - if _decider_map[child.changed_since_last_build](child, self, prev_ni): + if _decider_map[child.changed_since_last_build](child, self, prev_ni, node): if t: Trace(': %s changed' % child) result = True - contents = self.get_executor().get_contents() if self.has_builder(): - import SCons.Util + contents = self.get_executor().get_contents() newsig = SCons.Util.MD5signature(contents) if bi.bactsig != newsig: if t: Trace(': bactsig %s != newsig %s' % (bi.bactsig, newsig)) @@ -1607,29 +1644,39 @@ class Node(object, with_metaclass(NoSlotsPyPy)): # so we only print them after running them through this lambda # to turn them into the right relative Node and then return # its string. - def stringify( s, E=self.dir.Entry ) : + def stringify( s, E=self.dir.Entry): if hasattr( s, 'dir' ) : return str(E(s)) return str(s) lines = [] - removed = [x for x in old_bkids if not x in new_bkids] + removed = [x for x in old_bkids if x not in new_bkids] if removed: - removed = list(map(stringify, removed)) + removed = [stringify(r) for r in removed] fmt = "`%s' is no longer a dependency\n" lines.extend([fmt % s for s in removed]) for k in new_bkids: - if not k in old_bkids: + if k not in old_bkids: lines.append("`%s' is a new dependency\n" % stringify(k)) - elif _decider_map[k.changed_since_last_build](k, self, osig[k]): - lines.append("`%s' changed\n" % stringify(k)) + else: + changed = _decider_map[k.changed_since_last_build](k, self, osig[k]) + + if changed: + lines.append("`%s' changed\n" % stringify(k)) if len(lines) == 0 and old_bkids != new_bkids: - lines.append("the dependency order changed:\n" + - "%sold: %s\n" % (' '*15, list(map(stringify, old_bkids))) + - "%snew: %s\n" % (' '*15, list(map(stringify, new_bkids)))) + lines.append("the dependency order changed:\n") + lines.append("->Sources\n") + for (o,n) in zip_longest(old.bsources, new.bsources, fillvalue=None): + lines.append("Old:%s\tNew:%s\n"%(o,n)) + lines.append("->Depends\n") + for (o,n) in zip_longest(old.bdepends, new.bdepends, fillvalue=None): + lines.append("Old:%s\tNew:%s\n"%(o,n)) + lines.append("->Implicit\n") + for (o,n) in zip_longest(old.bimplicit, new.bimplicit, fillvalue=None): + lines.append("Old:%s\tNew:%s\n"%(o,n)) if len(lines) == 0: def fmt_with_title(title, strlines): @@ -1672,7 +1719,6 @@ class Walker(object): This is depth-first, children are visited before the parent. The Walker object can be initialized with any node, and returns the next node on the descent with each get_next() call. - 'kids_func' is an optional function that will be called to get the children of a node instead of calling 'children'. 'cycle_func' is an optional function that will be called when a cycle is detected. |