summaryrefslogtreecommitdiff
path: root/engine/SCons/Node/FS.py
diff options
context:
space:
mode:
authorJörg Frings-Fürst <debian@jff-webhosting.net>2019-08-11 12:17:57 +0200
committerJörg Frings-Fürst <debian@jff-webhosting.net>2019-08-11 12:17:57 +0200
commitc8ea3b672655ddab746a7aea5a50217057b02b9e (patch)
tree01a0e712f4cf32c7140cf1a4ae14db4da4202253 /engine/SCons/Node/FS.py
parentca7be46fc0013fc037a045b6d4df73776461e821 (diff)
parentf6c9bffb15e04ea412db4df22a3851448221b85a (diff)
Diffstat (limited to 'engine/SCons/Node/FS.py')
-rw-r--r--engine/SCons/Node/FS.py391
1 files changed, 333 insertions, 58 deletions
diff --git a/engine/SCons/Node/FS.py b/engine/SCons/Node/FS.py
index ecde942..0d903de 100644
--- a/engine/SCons/Node/FS.py
+++ b/engine/SCons/Node/FS.py
@@ -11,7 +11,7 @@ that can be used by scripts or modules looking for the canonical default.
"""
#
-# Copyright (c) 2001 - 2017 The SCons Foundation
+# Copyright (c) 2001 - 2019 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@@ -33,7 +33,7 @@ that can be used by scripts or modules looking for the canonical default.
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import print_function
-__revision__ = "src/engine/SCons/Node/FS.py 74b2c53bc42290e911b334a6b44f187da698a668 2017/11/14 13:16:53 bdbaddog"
+__revision__ = "src/engine/SCons/Node/FS.py 72ae09dc35ac2626f8ff711d8c4b30b6138e08e3 2019-08-08 14:50:06 bdeegan"
import fnmatch
import os
@@ -43,6 +43,7 @@ import stat
import sys
import time
import codecs
+from itertools import chain
import SCons.Action
import SCons.Debug
@@ -59,6 +60,8 @@ from SCons.Debug import Trace
print_duplicate = 0
+MD5_TIMESTAMP_DEBUG = False
+
def sconsign_none(node):
raise NotImplementedError
@@ -74,6 +77,9 @@ def sconsign_dir(node):
_sconsign_map = {0 : sconsign_none,
1 : sconsign_dir}
+class FileBuildInfoFileToCsigMappingError(Exception):
+ pass
+
class EntryProxyAttributeError(AttributeError):
"""
An AttributeError subclass for recording and displaying the name
@@ -132,7 +138,10 @@ def initialize_do_splitdrive():
global do_splitdrive
global has_unc
drive, path = os.path.splitdrive('X:/foo')
- has_unc = hasattr(os.path, 'splitunc')
+ # splitunc is removed from python 3.7 and newer
+ # so we can also just test if splitdrive works with UNC
+ has_unc = (hasattr(os.path, 'splitunc')
+ or os.path.splitdrive(r'\\split\drive\test')[0] == r'\\split\drive')
do_splitdrive = not not drive or has_unc
@@ -272,7 +281,7 @@ def set_duplicate(duplicate):
'copy' : _copy_func
}
- if not duplicate in Valid_Duplicates:
+ if duplicate not in Valid_Duplicates:
raise SCons.Errors.InternalError("The argument of set_duplicate "
"should be in Valid_Duplicates")
global Link_Funcs
@@ -282,11 +291,13 @@ def set_duplicate(duplicate):
Link_Funcs.append(link_dict[func])
def LinkFunc(target, source, env):
- # Relative paths cause problems with symbolic links, so
- # we use absolute paths, which may be a problem for people
- # who want to move their soft-linked src-trees around. Those
- # people should use the 'hard-copy' mode, softlinks cannot be
- # used for that; at least I have no idea how ...
+ """
+ Relative paths cause problems with symbolic links, so
+ we use absolute paths, which may be a problem for people
+ who want to move their soft-linked src-trees around. Those
+ people should use the 'hard-copy' mode, softlinks cannot be
+ used for that; at least I have no idea how ...
+ """
src = source[0].get_abspath()
dest = target[0].get_abspath()
dir, file = os.path.split(dest)
@@ -328,7 +339,12 @@ Unlink = SCons.Action.Action(UnlinkFunc, None)
def MkdirFunc(target, source, env):
t = target[0]
- if not t.exists():
+ # This os.path.exists test looks redundant, but it's possible
+ # when using Install() to install multiple dirs outside the
+ # source tree to get a case where t.exists() is true but
+ # the path does already exist, so this prevents spurious
+ # build failures in that case. See test/Install/multi-dir.
+ if not t.exists() and not os.path.exists(t.get_abspath()):
t.fs.mkdir(t.get_abspath())
return 0
@@ -463,7 +479,7 @@ class EntryProxy(SCons.Util.Proxy):
return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_posix")
def __get_windows_path(self):
- """Return the path with \ as the path separator,
+ r"""Return the path with \ as the path separator,
regardless of platform."""
if OS_SEP == '\\':
return self
@@ -514,7 +530,7 @@ class EntryProxy(SCons.Util.Proxy):
except KeyError:
try:
attr = SCons.Util.Proxy.__getattr__(self, name)
- except AttributeError as e:
+ except AttributeError:
# Raise our own AttributeError subclass with an
# overridden __str__() method that identifies the
# name of the entry that caused the exception.
@@ -682,10 +698,15 @@ class Base(SCons.Node.Node):
@SCons.Memoize.CountMethodCall
def stat(self):
- try: return self._memo['stat']
- except KeyError: pass
- try: result = self.fs.stat(self.get_abspath())
- except os.error: result = None
+ try:
+ return self._memo['stat']
+ except KeyError:
+ pass
+ try:
+ result = self.fs.stat(self.get_abspath())
+ except os.error:
+ result = None
+
self._memo['stat'] = result
return result
@@ -697,13 +718,17 @@ class Base(SCons.Node.Node):
def getmtime(self):
st = self.stat()
- if st: return st[stat.ST_MTIME]
- else: return None
+ if st:
+ return st[stat.ST_MTIME]
+ else:
+ return None
def getsize(self):
st = self.stat()
- if st: return st[stat.ST_SIZE]
- else: return None
+ if st:
+ return st[stat.ST_SIZE]
+ else:
+ return None
def isdir(self):
st = self.stat()
@@ -1048,21 +1073,22 @@ _classEntry = Entry
class LocalFS(object):
-
- # This class implements an abstraction layer for operations involving
- # a local file system. Essentially, this wraps any function in
- # the os, os.path or shutil modules that we use to actually go do
- # anything with or to the local file system.
- #
- # Note that there's a very good chance we'll refactor this part of
- # the architecture in some way as we really implement the interface(s)
- # for remote file system Nodes. For example, the right architecture
- # might be to have this be a subclass instead of a base class.
- # Nevertheless, we're using this as a first step in that direction.
- #
- # We're not using chdir() yet because the calling subclass method
- # needs to use os.chdir() directly to avoid recursion. Will we
- # really need this one?
+ """
+ This class implements an abstraction layer for operations involving
+ a local file system. Essentially, this wraps any function in
+ the os, os.path or shutil modules that we use to actually go do
+ anything with or to the local file system.
+
+ Note that there's a very good chance we'll refactor this part of
+ the architecture in some way as we really implement the interface(s)
+ for remote file system Nodes. For example, the right architecture
+ might be to have this be a subclass instead of a base class.
+ Nevertheless, we're using this as a first step in that direction.
+
+ We're not using chdir() yet because the calling subclass method
+ needs to use os.chdir() directly to avoid recursion. Will we
+ really need this one?
+ """
#def chdir(self, path):
# return os.chdir(path)
def chmod(self, path, mode):
@@ -1389,10 +1415,10 @@ class FS(LocalFS):
if not isinstance(d, SCons.Node.Node):
d = self.Dir(d)
self.Top.addRepository(d)
-
+
def PyPackageDir(self, modulename):
- """Locate the directory of a given python module name
-
+ r"""Locate the directory of a given python module name
+
For example scons might resolve to
Windows: C:\Python27\Lib\site-packages\scons-2.5.1
Linux: /usr/lib/scons
@@ -1662,7 +1688,7 @@ class Dir(Base):
return result
def addRepository(self, dir):
- if dir != self and not dir in self.repositories:
+ if dir != self and dir not in self.repositories:
self.repositories.append(dir)
dir._tpath = '.'
self.__clearRepositoryCache()
@@ -1702,7 +1728,7 @@ class Dir(Base):
if self is other:
result = '.'
- elif not other in self._path_elements:
+ elif other not in self._path_elements:
try:
other_dir = other.get_dir()
except AttributeError:
@@ -2234,7 +2260,7 @@ class RootDir(Dir):
this directory.
"""
- __slots__ = ['_lookupDict']
+ __slots__ = ('_lookupDict', )
def __init__(self, drive, fs):
if SCons.Debug.track_instances: logInstanceCreation(self, 'Node.FS.RootDir')
@@ -2440,7 +2466,7 @@ class FileNodeInfo(SCons.Node.NodeInfoBase):
"""
state = getattr(self, '__dict__', {}).copy()
for obj in type(self).mro():
- for name in getattr(obj,'__slots__',()):
+ for name in getattr(obj, '__slots__', ()):
if hasattr(self, name):
state[name] = getattr(self, name)
@@ -2462,11 +2488,42 @@ class FileNodeInfo(SCons.Node.NodeInfoBase):
if key not in ('__weakref__',):
setattr(self, key, value)
+ def __eq__(self, other):
+ return self.csig == other.csig and self.timestamp == other.timestamp and self.size == other.size
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
class FileBuildInfo(SCons.Node.BuildInfoBase):
- __slots__ = ()
+ """
+ This is info loaded from sconsign.
+
+ Attributes unique to FileBuildInfo:
+ dependency_map : Caches file->csig mapping
+ for all dependencies. Currently this is only used when using
+ MD5-timestamp decider.
+ It's used to ensure that we copy the correct
+ csig from previous build to be written to .sconsign when current build
+ is done. Previously the matching of csig to file was strictly by order
+ they appeared in bdepends, bsources, or bimplicit, and so a change in order
+ or count of any of these could yield writing wrong csig, and then false positive
+ rebuilds
+ """
+ __slots__ = ['dependency_map', ]
current_version_id = 2
+ def __setattr__(self, key, value):
+
+ # If any attributes are changed in FileBuildInfo, we need to
+ # invalidate the cached map of file name to content signature
+ # heald in dependency_map. Currently only used with
+ # MD5-timestamp decider
+ if key != 'dependency_map' and hasattr(self, 'dependency_map'):
+ del self.dependency_map
+
+ return super(FileBuildInfo, self).__setattr__(key, value)
+
def convert_to_sconsign(self):
"""
Converts this FileBuildInfo object for writing to a .sconsign file
@@ -3225,46 +3282,232 @@ class File(Base):
self._memo['changed'] = has_changed
return has_changed
- def changed_content(self, target, prev_ni):
+ def changed_content(self, target, prev_ni, repo_node=None):
cur_csig = self.get_csig()
try:
return cur_csig != prev_ni.csig
except AttributeError:
return 1
- def changed_state(self, target, prev_ni):
+ def changed_state(self, target, prev_ni, repo_node=None):
return self.state != SCons.Node.up_to_date
- def changed_timestamp_then_content(self, target, prev_ni):
- if not self.changed_timestamp_match(target, prev_ni):
+
+ # Caching node -> string mapping for the below method
+ __dmap_cache = {}
+ __dmap_sig_cache = {}
+
+
+ def _build_dependency_map(self, binfo):
+ """
+ Build mapping from file -> signature
+
+ Args:
+ self - self
+ binfo - buildinfo from node being considered
+
+ Returns:
+ dictionary of file->signature mappings
+ """
+
+ # For an "empty" binfo properties like bsources
+ # do not exist: check this to avoid exception.
+ if (len(binfo.bsourcesigs) + len(binfo.bdependsigs) + \
+ len(binfo.bimplicitsigs)) == 0:
+ return {}
+
+ binfo.dependency_map = { child:signature for child, signature in zip(chain(binfo.bsources, binfo.bdepends, binfo.bimplicit),
+ chain(binfo.bsourcesigs, binfo.bdependsigs, binfo.bimplicitsigs))}
+
+ return binfo.dependency_map
+
+ # @profile
+ def _add_strings_to_dependency_map(self, dmap):
+ """
+ In the case comparing node objects isn't sufficient, we'll add the strings for the nodes to the dependency map
+ :return:
+ """
+
+ first_string = str(next(iter(dmap)))
+
+ # print("DMAP:%s"%id(dmap))
+ if first_string not in dmap:
+ string_dict = {str(child): signature for child, signature in dmap.items()}
+ dmap.update(string_dict)
+ return dmap
+
+ def _get_previous_signatures(self, dmap):
+ """
+ Return a list of corresponding csigs from previous
+ build in order of the node/files in children.
+
+ Args:
+ self - self
+ dmap - Dictionary of file -> csig
+
+ Returns:
+ List of csigs for provided list of children
+ """
+ prev = []
+ # MD5_TIMESTAMP_DEBUG = False
+
+ if len(dmap) == 0:
+ if MD5_TIMESTAMP_DEBUG: print("Nothing dmap shortcutting")
+ return None
+ elif MD5_TIMESTAMP_DEBUG: print("len(dmap):%d"%len(dmap))
+
+
+ # First try retrieving via Node
+ if MD5_TIMESTAMP_DEBUG: print("Checking if self is in map:%s id:%s type:%s"%(str(self), id(self), type(self)))
+ df = dmap.get(self, False)
+ if df:
+ return df
+
+ # Now check if self's repository file is in map.
+ rf = self.rfile()
+ if MD5_TIMESTAMP_DEBUG: print("Checking if self.rfile is in map:%s id:%s type:%s"%(str(rf), id(rf), type(rf)))
+ rfm = dmap.get(rf, False)
+ if rfm:
+ return rfm
+
+ # get default string for node and then also string swapping os.altsep for os.sep (/ for \)
+ c_strs = [str(self)]
+
+ if os.altsep:
+ c_strs.append(c_strs[0].replace(os.sep, os.altsep))
+
+ # In some cases the dependency_maps' keys are already strings check.
+ # Check if either string is now in dmap.
+ for s in c_strs:
+ if MD5_TIMESTAMP_DEBUG: print("Checking if str(self) is in map :%s" % s)
+ df = dmap.get(s, False)
+ if df:
+ return df
+
+ # Strings don't exist in map, add them and try again
+ # If there are no strings in this dmap, then add them.
+ # This may not be necessary, we could walk the nodes in the dmap and check each string
+ # rather than adding ALL the strings to dmap. In theory that would be n/2 vs 2n str() calls on node
+ # if not dmap.has_strings:
+ dmap = self._add_strings_to_dependency_map(dmap)
+
+ # In some cases the dependency_maps' keys are already strings check.
+ # Check if either string is now in dmap.
+ for s in c_strs:
+ if MD5_TIMESTAMP_DEBUG: print("Checking if str(self) is in map (now with strings) :%s" % s)
+ df = dmap.get(s, False)
+ if df:
+ return df
+
+ # Lastly use nodes get_path() to generate string and see if that's in dmap
+ if not df:
+ try:
+ # this should yield a path which matches what's in the sconsign
+ c_str = self.get_path()
+ if os.altsep:
+ c_str = c_str.replace(os.sep, os.altsep)
+
+ if MD5_TIMESTAMP_DEBUG: print("Checking if self.get_path is in map (now with strings) :%s" % s)
+
+ df = dmap.get(c_str, None)
+
+ except AttributeError as e:
+ raise FileBuildInfoFileToCsigMappingError("No mapping from file name to content signature for :%s"%c_str)
+
+ return df
+
+ def changed_timestamp_then_content(self, target, prev_ni, node=None):
+ """
+ Used when decider for file is Timestamp-MD5
+
+ NOTE: If the timestamp hasn't changed this will skip md5'ing the
+ file and just copy the prev_ni provided. If the prev_ni
+ is wrong. It will propagate it.
+ See: https://github.com/SCons/scons/issues/2980
+
+ Args:
+ self - dependency
+ target - target
+ prev_ni - The NodeInfo object loaded from previous builds .sconsign
+ node - Node instance. Check this node for file existence/timestamp
+ if specified.
+
+ Returns:
+ Boolean - Indicates if node(File) has changed.
+ """
+
+ if node is None:
+ node = self
+ # Now get sconsign name -> csig map and then get proper prev_ni if possible
+ bi = node.get_stored_info().binfo
+ rebuilt = False
+ try:
+ dependency_map = bi.dependency_map
+ except AttributeError as e:
+ dependency_map = self._build_dependency_map(bi)
+ rebuilt = True
+
+ if len(dependency_map) == 0:
+ # If there's no dependency map, there's no need to find the
+ # prev_ni as there aren't any
+ # shortcut the rest of the logic
+ if MD5_TIMESTAMP_DEBUG: print("Skipping checks len(dmap)=0")
+
+ # We still need to get the current file's csig
+ # This should be slightly faster than calling self.changed_content(target, new_prev_ni)
+ self.get_csig()
+ return True
+
+ new_prev_ni = self._get_previous_signatures(dependency_map)
+ new = self.changed_timestamp_match(target, new_prev_ni)
+
+ if MD5_TIMESTAMP_DEBUG:
+ old = self.changed_timestamp_match(target, prev_ni)
+
+ if old != new:
+ print("Mismatch self.changed_timestamp_match(%s, prev_ni) old:%s new:%s"%(str(target), old, new))
+ new_prev_ni = self._get_previous_signatures(dependency_map)
+
+ if not new:
try:
- self.get_ninfo().csig = prev_ni.csig
+ # NOTE: We're modifying the current node's csig in a query.
+ self.get_ninfo().csig = new_prev_ni.csig
except AttributeError:
pass
return False
- return self.changed_content(target, prev_ni)
+ return self.changed_content(target, new_prev_ni)
- def changed_timestamp_newer(self, target, prev_ni):
+ def changed_timestamp_newer(self, target, prev_ni, repo_node=None):
try:
return self.get_timestamp() > target.get_timestamp()
except AttributeError:
return 1
- def changed_timestamp_match(self, target, prev_ni):
+ def changed_timestamp_match(self, target, prev_ni, repo_node=None):
+ """
+ Return True if the timestamps don't match or if there is no previous timestamp
+ :param target:
+ :param prev_ni: Information about the node from the previous build
+ :return:
+ """
try:
return self.get_timestamp() != prev_ni.timestamp
except AttributeError:
return 1
def is_up_to_date(self):
+ """Check for whether the Node is current
+ In all cases self is the target we're checking to see if it's up to date
+ """
+
T = 0
if T: Trace('is_up_to_date(%s):' % self)
if not self.exists():
if T: Trace(' not self.exists():')
- # The file doesn't exist locally...
+ # The file (always a target) doesn't exist locally...
r = self.rfile()
if r != self:
- # ...but there is one in a Repository...
+ # ...but there is one (always a target) in a Repository...
if not self.changed(r):
if T: Trace(' changed(%s):' % r)
# ...and it's even up-to-date...
@@ -3272,7 +3515,9 @@ class File(Base):
# ...and they'd like a local copy.
e = LocalCopy(self, r, None)
if isinstance(e, SCons.Errors.BuildError):
- raise
+ # Likely this should be re-raising exception e
+ # (which would be BuildError)
+ raise e
SCons.Node.store_info_map[self.store_info](self)
if T: Trace(' 1\n')
return 1
@@ -3293,11 +3538,14 @@ class File(Base):
result = self
if not self.exists():
norm_name = _my_normcase(self.name)
- for dir in self.dir.get_all_rdirs():
- try: node = dir.entries[norm_name]
- except KeyError: node = dir.file_on_disk(self.name)
+ for repo_dir in self.dir.get_all_rdirs():
+ try:
+ node = repo_dir.entries[norm_name]
+ except KeyError:
+ node = repo_dir.file_on_disk(self.name)
+
if node and node.exists() and \
- (isinstance(node, File) or isinstance(node, Entry) \
+ (isinstance(node, File) or isinstance(node, Entry)
or not node.is_derived()):
result = node
# Copy over our local attributes to the repository
@@ -3317,6 +3565,28 @@ class File(Base):
self._memo['rfile'] = result
return result
+ def find_repo_file(self):
+ """
+ For this node, find if there exists a corresponding file in one or more repositories
+ :return: list of corresponding files in repositories
+ """
+ retvals = []
+
+ norm_name = _my_normcase(self.name)
+ for repo_dir in self.dir.get_all_rdirs():
+ try:
+ node = repo_dir.entries[norm_name]
+ except KeyError:
+ node = repo_dir.file_on_disk(self.name)
+
+ if node and node.exists() and \
+ (isinstance(node, File) or isinstance(node, Entry) \
+ or not node.is_derived()):
+ retvals.append(node)
+
+ return retvals
+
+
def rstr(self):
return str(self.rfile())
@@ -3374,6 +3644,8 @@ class File(Base):
because multiple targets built by the same action will all
have the same build signature, and we have to differentiate
them somehow.
+
+ Signature should normally be string of hex digits.
"""
try:
return self.cachesig
@@ -3383,10 +3655,13 @@ class File(Base):
# Collect signatures for all children
children = self.children()
sigs = [n.get_cachedir_csig() for n in children]
+
# Append this node's signature...
sigs.append(self.get_contents_sig())
+
# ...and it's path
sigs.append(self.get_internal_path())
+
# Merge this all into a single signature
result = self.cachesig = SCons.Util.MD5collect(sigs)
return result