diff options
author | Jörg Frings-Fürst <debian@jff-webhosting.net> | 2019-12-28 17:12:41 +0100 |
---|---|---|
committer | Jörg Frings-Fürst <debian@jff-webhosting.net> | 2019-12-28 17:12:41 +0100 |
commit | 56597a6a68e741355b301f91d5913d59cfb34eaa (patch) | |
tree | 7531e41faf62f126984bf05b8d712f99c9520d5d /bin | |
parent | 56337453f0f3fbe34255e636d5d65974f4d17681 (diff) |
New upstream version 3.1.2upstream/3.1.2upstream
Diffstat (limited to 'bin')
-rw-r--r-- | bin/SConsDoc.py | 218 | ||||
-rw-r--r-- | bin/SConsExamples.py | 77 | ||||
-rw-r--r-- | bin/calibrate.py | 10 | ||||
-rw-r--r-- | bin/docs-update-generated.py | 17 | ||||
-rw-r--r-- | bin/import-test.py | 2 | ||||
-rw-r--r-- | bin/linecount.py | 2 | ||||
-rw-r--r-- | bin/restore.sh | 18 | ||||
-rw-r--r-- | bin/rsync-sourceforge | 32 | ||||
-rw-r--r-- | bin/scons-proc.py | 14 | ||||
-rwxr-xr-x | bin/scons-review.sh | 24 | ||||
-rwxr-xr-x[-rw-r--r--] | bin/scons_dev_master.py | 2 | ||||
-rw-r--r-- | bin/sfsum | 148 | ||||
-rw-r--r-- | bin/timebuild | 65 | ||||
-rw-r--r-- | bin/update-release-info.py | 2 |
14 files changed, 212 insertions, 419 deletions
diff --git a/bin/SConsDoc.py b/bin/SConsDoc.py index e44a8db..f1de48a 100644 --- a/bin/SConsDoc.py +++ b/bin/SConsDoc.py @@ -26,7 +26,8 @@ # from __future__ import print_function -__doc__ = """ + +__doc__ = r""" This module parses home-brew XML files that document various things in SCons. Right now, it handles Builders, functions, construction variables, and Tools, but we expect it to get extended in the future. @@ -117,16 +118,18 @@ import re import sys import copy +PY2 = sys.version_info[0] == 2 + # Do we have libxml2/libxslt/lxml? has_libxml2 = True try: import libxml2 import libxslt -except: +except ImportError: has_libxml2 = False try: import lxml - except: + except ImportError: raise ImportError("Failed to import either libxml2/libxslt or lxml") has_etree = False @@ -155,13 +158,16 @@ if not has_etree: except ImportError: raise ImportError("Failed to import ElementTree from any known place") -re_entity = re.compile(r"\&([^;]+);") +# patterns to help trim XML passed in as strings +re_entity = re.compile(r"&([^;]+);") re_entity_header = re.compile(r"<!DOCTYPE\s+sconsdoc\s+[^\]]+\]>") # Namespace for the SCons Docbook XSD -dbxsd="http://www.scons.org/dbxsd/v1.0" +dbxsd = "http://www.scons.org/dbxsd/v1.0" +# Namsespace pattern to help identify an scons-xml file read as bytes +dbxsdpat = b'xmlns="%s"' % dbxsd.encode('utf-8') # Namespace map identifier for the SCons Docbook XSD -dbxid="dbx" +dbxid = "dbx" # Namespace for schema instances xsi = "http://www.w3.org/2001/XMLSchema-instance" @@ -170,19 +176,22 @@ copyright_comment = """ Copyright (c) 2001 - 2019 The SCons Foundation This file is processed by the bin/SConsDoc.py module. -See its __doc__ string for a discussion of the format. +See its docstring for a discussion of the format. """ def isSConsXml(fpath): - """ Check whether the given file is a SCons XML file, i.e. it - contains the default target namespace definition. + """ Check whether the given file is an SCons XML file. + + It is SCons XML if it contains the default target namespace definition + described by dbxsdpat + """ try: - with open(fpath,'r') as f: + with open(fpath, 'rb') as f: content = f.read() - if content.find('xmlns="%s"' % dbxsd) >= 0: + if content.find(dbxsdpat) >= 0: return True - except: + except Exception: pass return False @@ -195,10 +204,11 @@ def remove_entities(content): return content -default_xsd = os.path.join('doc','xsd','scons.xsd') +default_xsd = os.path.join('doc', 'xsd', 'scons.xsd') ARG = "dbscons" + class Libxml2ValidityHandler: def __init__(self): @@ -224,10 +234,11 @@ class DoctypeEntity: def getEntityString(self): txt = """ <!ENTITY %(perc)s %(name)s SYSTEM "%(uri)s"> %(perc)s%(name)s; -""" % {'perc' : perc, 'name' : self.name, 'uri' : self.uri} +""" % {'perc': perc, 'name': self.name, 'uri': self.uri} return txt + class DoctypeDeclaration: def __init__(self, name_=None): self.name = name_ @@ -253,14 +264,16 @@ class DoctypeDeclaration: return content if not has_libxml2: - class TreeFactory: + class TreeFactory(object): def __init__(self): pass - def newNode(self, tag): + @staticmethod + def newNode(tag): return etree.Element(tag) - def newEtreeNode(self, tag, init_ns=False): + @staticmethod + def newEtreeNode(tag, init_ns=False): if init_ns: NSMAP = {None: dbxsd, 'xsi' : xsi} @@ -268,47 +281,66 @@ if not has_libxml2: return etree.Element(tag) - def copyNode(self, node): + @staticmethod + def copyNode(node): return copy.deepcopy(node) - def appendNode(self, parent, child): + @staticmethod + def appendNode(parent, child): parent.append(child) - def hasAttribute(self, node, att): + @staticmethod + def hasAttribute(node, att): return att in node.attrib - def getAttribute(self, node, att): + @staticmethod + def getAttribute(node, att): return node.attrib[att] - def setAttribute(self, node, att, value): + @staticmethod + def setAttribute(node, att, value): node.attrib[att] = value - def getText(self, root): + @staticmethod + def getText(root): return root.text - def setText(self, root, txt): + @staticmethod + def setText(root, txt): root.text = txt - def writeGenTree(self, root, fp): + @staticmethod + def writeGenTree(root, fp): dt = DoctypeDeclaration() - fp.write(etree.tostring(root, xml_declaration=True, - encoding="UTF-8", pretty_print=True, + try: + encfun = unicode # PY2 + except NameError: + encfun = str + fp.write(etree.tostring(root, encoding=encfun, + pretty_print=True, doctype=dt.createDoctype())) - def writeTree(self, root, fpath): - with open(fpath, 'w') as fp: - fp.write(etree.tostring(root, xml_declaration=True, - encoding="UTF-8", pretty_print=True)) - - def prettyPrintFile(self, fpath): - with open(fpath,'r') as fin: + @staticmethod + def writeTree(root, fpath): + try: + encfun = unicode # PY2 + except NameError: + encfun = "utf-8" + with open(fpath, 'wb') as fp: + fp.write(etree.tostring(root, encoding=encfun, + pretty_print=True)) + + @staticmethod + def prettyPrintFile(fpath): + with open(fpath,'rb') as fin: tree = etree.parse(fin) pretty_content = etree.tostring(tree, pretty_print=True) - with open(fpath,'w') as fout: + with open(fpath,'wb') as fout: fout.write(pretty_content) - def decorateWithHeader(self, root): + @staticmethod + def decorateWithHeader(root): root.attrib["{"+xsi+"}schemaLocation"] = "%s %s/scons.xsd" % (dbxsd, dbxsd) return root @@ -316,12 +348,12 @@ if not has_libxml2: """ Return a XML file tree with the correct namespaces set, the element root as top entry and the given header comment. """ - NSMAP = {None: dbxsd, - 'xsi' : xsi} + NSMAP = {None: dbxsd, 'xsi' : xsi} t = etree.Element(root, nsmap=NSMAP) return self.decorateWithHeader(t) - def validateXml(self, fpath, xmlschema_context): + @staticmethod + def validateXml(fpath, xmlschema_context): # Use lxml xmlschema = etree.XMLSchema(xmlschema_context) try: @@ -339,19 +371,22 @@ if not has_libxml2: return False return True - def findAll(self, root, tag, ns=None, xp_ctxt=None, nsmap=None): + @staticmethod + def findAll(root, tag, ns=None, xp_ctxt=None, nsmap=None): expression = ".//{%s}%s" % (nsmap[ns], tag) if not ns or not nsmap: expression = ".//%s" % tag return root.findall(expression) - def findAllChildrenOf(self, root, tag, ns=None, xp_ctxt=None, nsmap=None): + @staticmethod + def findAllChildrenOf(root, tag, ns=None, xp_ctxt=None, nsmap=None): expression = "./{%s}%s/*" % (nsmap[ns], tag) if not ns or not nsmap: expression = "./%s/*" % tag return root.findall(expression) - def convertElementTree(self, root): + @staticmethod + def convertElementTree(root): """ Convert the given tree of etree.Element entries to a list of tree nodes for the current XML toolkit. @@ -359,53 +394,63 @@ if not has_libxml2: return [root] else: - class TreeFactory: + class TreeFactory(object): def __init__(self): pass - def newNode(self, tag): + @staticmethod + def newNode(tag): return libxml2.newNode(tag) - def newEtreeNode(self, tag, init_ns=False): + @staticmethod + def newEtreeNode(tag, init_ns=False): return etree.Element(tag) - def copyNode(self, node): + @staticmethod + def copyNode(node): return node.copyNode(1) - def appendNode(self, parent, child): + @staticmethod + def appendNode(parent, child): if hasattr(parent, 'addChild'): parent.addChild(child) else: parent.append(child) - def hasAttribute(self, node, att): + @staticmethod + def hasAttribute(node, att): if hasattr(node, 'hasProp'): return node.hasProp(att) return att in node.attrib - def getAttribute(self, node, att): + @staticmethod + def getAttribute(node, att): if hasattr(node, 'prop'): return node.prop(att) return node.attrib[att] - def setAttribute(self, node, att, value): + @staticmethod + def setAttribute(node, att, value): if hasattr(node, 'setProp'): node.setProp(att, value) else: node.attrib[att] = value - def getText(self, root): + @staticmethod + def getText(root): if hasattr(root, 'getContent'): return root.getContent() return root.text - def setText(self, root, txt): + @staticmethod + def setText(root, txt): if hasattr(root, 'setContent'): root.setContent(txt) else: root.text = txt - def writeGenTree(self, root, fp): + @staticmethod + def writeGenTree(root, fp): doc = libxml2.newDoc('1.0') dtd = doc.newDtd("sconsdoc", None, None) doc.addChild(dtd) @@ -420,23 +465,26 @@ else: fp.write(content) doc.freeDoc() - def writeTree(self, root, fpath): - with open(fpath, 'w') as fp: + @staticmethod + def writeTree(root, fpath): + with open(fpath, 'wb') as fp: doc = libxml2.newDoc('1.0') doc.setRootElement(root) fp.write(doc.serialize("UTF-8", 1)) doc.freeDoc() - def prettyPrintFile(self, fpath): + @staticmethod + def prettyPrintFile(fpath): # Read file and resolve entities doc = libxml2.readFile(fpath, None, libxml2d.XML_PARSE_NOENT) - with open(fpath, 'w') as fp: + with open(fpath, 'wb') as fp: # Prettyprint fp.write(doc.serialize("UTF-8", 1)) # Cleanup doc.freeDoc() - def decorateWithHeader(self, root): + @staticmethod + def decorateWithHeader(root): # Register the namespaces ns = root.newNs(dbxsd, None) xi = root.newNs(xsi, 'xsi') @@ -453,7 +501,8 @@ else: t = libxml2.newNode(root) return self.decorateWithHeader(t) - def validateXml(self, fpath, xmlschema_context): + @staticmethod + def validateXml(fpath, xmlschema_context): retval = True # Create validation context @@ -479,7 +528,8 @@ else: return retval - def findAll(self, root, tag, ns=None, xpath_context=None, nsmap=None): + @staticmethod + def findAll(root, tag, ns=None, xpath_context=None, nsmap=None): if hasattr(root, 'xpathEval') and xpath_context: # Use the xpath context xpath_context.setContextNode(root) @@ -493,7 +543,8 @@ else: expression = ".//%s" % tag return root.findall(expression) - def findAllChildrenOf(self, root, tag, ns=None, xpath_context=None, nsmap=None): + @staticmethod + def findAllChildrenOf(root, tag, ns=None, xpath_context=None, nsmap=None): if hasattr(root, 'xpathEval') and xpath_context: # Use the xpath context xpath_context.setContextNode(root) @@ -562,20 +613,18 @@ else: tf = TreeFactory() -class SConsDocTree: +class SConsDocTree(object): def __init__(self): - self.nsmap = {'dbx' : dbxsd} + self.nsmap = {'dbx': dbxsd} self.doc = None self.root = None self.xpath_context = None def parseContent(self, content, include_entities=True): - """ Parses the given content as XML file. This method - is used when we generate the basic lists of entities - for the builders, tools and functions. - So we usually don't bother about namespaces and resolving - entities here...this is handled in parseXmlFile below - (step 2 of the overall process). + """ Parses the given text content as XML + + This is the setup portion, called from parseContent in + an SConsDocHandler instance - see the notes there. """ if not include_entities: content = remove_entities(content) @@ -583,7 +632,6 @@ class SConsDocTree: self.root = etree.fromstring(content) def parseXmlFile(self, fpath): - nsmap = {'dbx' : dbxsd} if not has_libxml2: # Create domtree from file domtree = etree.parse(fpath) @@ -604,7 +652,7 @@ class SConsDocTree: if self.xpath_context is not None: self.xpath_context.xpathFreeContext() -perc="%" +perc = "%" def validate_all_xml(dpaths, xsdfile=default_xsd): xmlschema_context = None @@ -620,7 +668,7 @@ def validate_all_xml(dpaths, xsdfile=default_xsd): fpaths = [] for dp in dpaths: if dp.endswith('.xml') and isSConsXml(dp): - path='.' + path = '.' fpaths.append(dp) else: for path, dirs, files in os.walk(dp): @@ -633,8 +681,8 @@ def validate_all_xml(dpaths, xsdfile=default_xsd): fails = [] for idx, fp in enumerate(fpaths): fpath = os.path.join(path, fp) - print("%.2f%s (%d/%d) %s" % (float(idx+1)*100.0/float(len(fpaths)), - perc, idx+1, len(fpaths),fp)) + print("%.2f%s (%d/%d) %s" % (float(idx + 1) * 100.0 /float(len(fpaths)), + perc, idx + 1, len(fpaths), fp)) if not tf.validateXml(fp, xmlschema_context): fails.append(fp) @@ -649,6 +697,7 @@ def validate_all_xml(dpaths, xsdfile=default_xsd): return True + class Item(object): def __init__(self, name): self.name = name @@ -668,21 +717,25 @@ class Item(object): def __lt__(self, other): return self.sort_name < other.sort_name + class Builder(Item): pass + class Function(Item): - def __init__(self, name): - super(Function, self).__init__(name) + pass + class Tool(Item): def __init__(self, name): Item.__init__(self, name) self.entity = self.name.replace('+', 'X') + class ConstructionVariable(Item): pass + class Arguments(object): def __init__(self, signature, body=None): if not body: @@ -692,7 +745,7 @@ class Arguments(object): def __str__(self): s = ''.join(self.body).strip() result = [] - for m in re.findall('([a-zA-Z/_]+|[^a-zA-Z/_]+)', s): + for m in re.findall(r'([a-zA-Z/_]+|[^a-zA-Z/_]+)', s): if ' ' in m: m = '"%s"' % m result.append(m) @@ -700,6 +753,7 @@ class Arguments(object): def append(self, data): self.body.append(data) + class SConsDocHandler(object): def __init__(self): self.builders = {} @@ -794,7 +848,7 @@ class SConsDocHandler(object): self.parseDomtree(t.root, t.xpath_context, t.nsmap) # lifted from Ka-Ping Yee's way cool pydoc module. -if sys.version_info[0] == 2: +if PY2: def importfile(path): """Import a Python source file or compiled file given its path.""" import imp @@ -817,11 +871,9 @@ if sys.version_info[0] == 2: else: # PY3 version, from newer pydoc def importfile(path): """Import a Python source file or compiled file given its path.""" - import importlib - from pydoc import ErrorDuringImport - magic = importlib.util.MAGIC_NUMBER + from importlib.util import MAGIC_NUMBER with open(path, 'rb') as ifp: - is_bytecode = magic == ifp.read(len(magic)) + is_bytecode = MAGIC_NUMBER == ifp.read(len(MAGIC_NUMBER)) filename = os.path.basename(path) name, ext = os.path.splitext(filename) if is_bytecode: @@ -832,7 +884,7 @@ else: # PY3 version, from newer pydoc spec = importlib.util.spec_from_file_location(name, path, loader=loader) try: return importlib._bootstrap._load(spec) - except: + except ImportError: raise ErrorDuringImport(path, sys.exc_info()) # Local Variables: diff --git a/bin/SConsExamples.py b/bin/SConsExamples.py index 7491c58..dbb8715 100644 --- a/bin/SConsExamples.py +++ b/bin/SConsExamples.py @@ -287,9 +287,8 @@ def ensureExampleOutputsExist(dpath): fpath = os.path.join(generated_examples, key + '_' + r.name.replace("/", "_")) # Write file - f = open(fpath, 'w') - f.write("%s\n" % content) - f.close() + with open(fpath, 'w') as f: + f.write("%s\n" % content) perc = "%" @@ -324,9 +323,8 @@ def createAllExampleOutputs(dpath): fpath = os.path.join(generated_examples, key + '_' + r.name.replace("/", "_")) # Write file - f = open(fpath, 'w') - f.write("%s\n" % content) - f.close() + with open(fpath, 'w') as f: + f.write("%s\n" % content) idx += 1 def collectSConsExampleNames(fpath): @@ -458,6 +456,7 @@ import re import SCons.Action import SCons.Defaults import SCons.Node.FS +import shutil platform = '%(osname)s' @@ -540,30 +539,28 @@ def Null(target, source, env): def Cat(target, source, env): target = str(target[0]) - f = open(target, "wb") for src in map(str, source): - f.write(open(src, "rb").read()) - f.close() + shutil.copy(src, target) def CCCom(target, source, env): - target = str(target[0]) - fp = open(target, "wb") - def process(source_file, fp=fp): - for line in open(source_file, "rb").readlines(): - m = re.match(r'#include\s[<"]([^<"]+)[>"]', line) - if m: - include = m.group(1) - for d in [str(env.Dir('$CPPPATH')), '.']: - f = os.path.join(d, include) - if os.path.exists(f): - process(f) - break - elif line[:11] != "STRIP CCCOM": - fp.write(line) - for src in map(str, source): - process(src) - fp.write('debug = ' + ARGUMENTS.get('debug', '0') + '\\n') - fp.close() + def process(source_file, ofp): + with open(source_file, "r") as ifp: + for line in ifp.readlines(): + m = re.match(r'#include\s[<"]([^<"]+)[>"]', line) + if m: + include = m.group(1) + for d in [str(env.Dir('$CPPPATH')), '.']: + f = os.path.join(d, include) + if os.path.exists(f): + process(f, ofp) + break + elif line[:11] != "STRIP CCCOM": + ofp.write(line) + + with open(str(target[0]), "w") as fp: + for src in map(str, source): + process(src, fp) + fp.write('debug = ' + ARGUMENTS.get('debug', '0') + '\\n') public_class_re = re.compile('^public class (\S+)', re.MULTILINE) @@ -577,20 +574,23 @@ def JavaCCom(target, source, env): for t in tlist: not_copied[t] = 1 for src in map(str, source): - contents = open(src, "rb").read() + with open(src, "r") as f: + contents = f.read() classes = public_class_re.findall(contents) for c in classes: for t in [x for x in tlist if x.find(c) != -1]: - open(t, "wb").write(contents) + with open(t, "w") as f: + f.write(contents) del not_copied[t] for t in not_copied.keys(): - open(t, "wb").write("\\n") + with open(t, "w") as f: + f.write("\\n") def JavaHCom(target, source, env): tlist = map(str, target) slist = map(str, source) for t, s in zip(tlist, slist): - open(t, "wb").write(open(s, "rb").read()) + shutil.copy(s, t) def JarCom(target, source, env): target = str(target[0]) @@ -599,10 +599,8 @@ def JarCom(target, source, env): for dirpath, dirnames, filenames in os.walk(src): class_files.extend([ os.path.join(dirpath, f) for f in filenames if f.endswith('.class') ]) - f = open(target, "wb") for cf in class_files: - f.write(open(cf, "rb").read()) - f.close() + shutil.copy(cf, target) # XXX Adding COLOR, COLORS and PACKAGE to the 'cc' varlist(s) by hand # here is bogus. It's for the benefit of doc/user/command-line.in, which @@ -721,7 +719,8 @@ def command_touch(args, command, test, values): if not os.path.isabs(file): file = os.path.join(test.workpath('WORK'), file) if not os.path.exists(file): - open(file, 'wb') + with open(file, 'w'): + pass os.utime(file, times) return [] @@ -735,8 +734,8 @@ def command_edit(args, c, test, values): for file in args: if not os.path.isabs(file): file = os.path.join(test.workpath('WORK'), file) - contents = open(file, 'rb').read() - open(file, 'wb').write(contents + add_string) + with open(file, 'a') as f: + f.write(add_string) return [] def command_ls(args, c, test, values): @@ -825,7 +824,7 @@ def create_scons_output(e): t.write(path, content) if hasattr(f, 'chmod'): if len(f.chmod): - os.chmod(path, int(f.chmod, 0)) + os.chmod(path, int(f.chmod, base=8)) # Regular expressions for making the doc output consistent, # regardless of reported addresses or Python version. @@ -882,7 +881,7 @@ def create_scons_output(e): lines = ExecuteCommand(args, command, t, {'osname':o.os, 'tools':o.tools}) if not command.output and lines: ncontent = '\n'.join(lines) - ncontent = address_re.sub(r' at 0x700000>', ncontent) + ncontent = address_re.sub(r' at 0x700000>', ncontent) ncontent = engine_re.sub(r' File "bootstrap/src/engine/SCons/', ncontent) ncontent = file_re.sub(r'\1 <module>', ncontent) ncontent = nodelist_re.sub(r"\1 'NodeList' object \2", ncontent) diff --git a/bin/calibrate.py b/bin/calibrate.py index 3f9104e..be06a54 100644 --- a/bin/calibrate.py +++ b/bin/calibrate.py @@ -28,8 +28,8 @@ import re import subprocess import sys -variable_re = re.compile('^VARIABLE: (.*)$', re.M) -elapsed_re = re.compile('^ELAPSED: (.*)$', re.M) +variable_re = re.compile(r'^VARIABLE: (.*)$', re.M) +elapsed_re = re.compile(r'^ELAPSED: (.*)$', re.M) def main(argv=None): if argv is None: @@ -60,7 +60,8 @@ def main(argv=None): while good < 3: p = subprocess.Popen(command, stdout=subprocess.PIPE, - stderr=subprocess.STDOUT) + stderr=subprocess.STDOUT, + universal_newlines=True) output = p.communicate()[0] vm = variable_re.search(output) em = elapsed_re.search(output) @@ -70,12 +71,13 @@ def main(argv=None): print(output) raise print("run %3d: %7.3f: %s" % (run, elapsed, ' '.join(vm.groups()))) - if opts.min < elapsed and elapsed < opts.max: + if opts.min < elapsed < opts.max: good += 1 else: good = 0 for v in vm.groups(): var, value = v.split('=', 1) + # TODO: this sometimes converges slowly, better algorithm? value = int((int(value) * opts.max) // elapsed) os.environ[var] = str(value) run += 1 diff --git a/bin/docs-update-generated.py b/bin/docs-update-generated.py index c164baf..78b60a7 100644 --- a/bin/docs-update-generated.py +++ b/bin/docs-update-generated.py @@ -10,6 +10,8 @@ from __future__ import print_function import os import sys +import subprocess + import SConsDoc # Directory where all generated files are stored @@ -17,8 +19,8 @@ gen_folder = os.path.join('doc','generated') def argpair(key): """ Return the argument pair *.gen,*.mod for the given key. """ - arg = '%s,%s' % (os.path.join(gen_folder,'%s.gen' % key), - os.path.join(gen_folder,'%s.mod' % key)) + arg = '%s,%s' % (os.path.join(gen_folder, '%s.gen' % key), + os.path.join(gen_folder, '%s.mod' % key)) return arg @@ -43,10 +45,13 @@ def generate_all(): print("Couldn't create destination folder %s! Exiting..." % gen_folder) return # Call scons-proc.py - os.system('%s %s -b %s -f %s -t %s -v %s %s' % - (sys.executable, os.path.join('bin','scons-proc.py'), - argpair('builders'), argpair('functions'), - argpair('tools'), argpair('variables'), ' '.join(flist))) + _ = subprocess.call([sys.executable, + os.path.join('bin','scons-proc.py'), + '-b', argpair('builders'), + '-f', argpair('functions'), + '-t', argpair('tools'), + '-v', argpair('variables')] + flist, + shell=False) if __name__ == "__main__": diff --git a/bin/import-test.py b/bin/import-test.py index 5c4902f..23718cf 100644 --- a/bin/import-test.py +++ b/bin/import-test.py @@ -25,7 +25,7 @@ # """ triple-quotes will need to have their contents edited by hand. # -__revision__ = "bin/import-test.py 72ae09dc35ac2626f8ff711d8c4b30b6138e08e3 2019-08-08 14:50:06 bdeegan" +__revision__ = "bin/import-test.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" import os.path import sys diff --git a/bin/linecount.py b/bin/linecount.py index f2f3713..f9a3f5f 100644 --- a/bin/linecount.py +++ b/bin/linecount.py @@ -23,7 +23,7 @@ # interesting one for most purposes. from __future__ import division, print_function -__revision__ = "bin/linecount.py 72ae09dc35ac2626f8ff711d8c4b30b6138e08e3 2019-08-08 14:50:06 bdeegan" +__revision__ = "bin/linecount.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" import os.path diff --git a/bin/restore.sh b/bin/restore.sh index 1db2aca..2e7883d 100644 --- a/bin/restore.sh +++ b/bin/restore.sh @@ -1,6 +1,6 @@ #!/usr/bin/env sh # -# Simple hack script to restore __revision__, __COPYRIGHT_, 3.1.1 +# Simple hack script to restore __revision__, __COPYRIGHT_, 3.1.2 # and other similar variables to what gets checked in to source. This # comes in handy when people send in diffs based on the released source. # @@ -24,7 +24,7 @@ for i in `find $DIRS -name '*.py'`; do ed $i <<EOF g/Copyright (c) 2001.*SCons Foundation/s//Copyright (c) 2001 - 2019 The SCons Foundation/p w -/^__revision__ = /s/= .*/= "bin/restore.sh 72ae09dc35ac2626f8ff711d8c4b30b6138e08e3 2019-08-08 14:50:06 bdeegan"/p +/^__revision__ = /s/= .*/= "bin/restore.sh bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"/p w q EOF @@ -35,7 +35,7 @@ for i in `find $DIRS -name 'scons.bat'`; do ed $i <<EOF g/Copyright (c) 2001.*SCons Foundation/s//Copyright (c) 2001 - 2019 The SCons Foundation/p w -/^@REM src\/script\/scons.bat/s/@REM .* knight/@REM bin/restore.sh 72ae09dc35ac2626f8ff711d8c4b30b6138e08e3 2019-08-08 14:50:06 bdeegan/p +/^@REM src\/script\/scons.bat/s/@REM .* knight/@REM bin/restore.sh bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan/p w q EOF @@ -44,13 +44,13 @@ done for i in `find $DIRS -name '__init__.py' -o -name 'scons.py' -o -name 'sconsign.py'`; do header $i ed $i <<EOF -/^__version__ = /s/= .*/= "3.1.1"/p +/^__version__ = /s/= .*/= "3.1.2"/p w -/^__build__ = /s/= .*/= "72ae09dc35ac2626f8ff711d8c4b30b6138e08e3"/p +/^__build__ = /s/= .*/= "bee7caf9defd6e108fc2998a2520ddb36a967691"/p w /^__buildsys__ = /s/= .*/= "octodog"/p w -/^__date__ = /s/= .*/= "2019-08-08 14:50:06"/p +/^__date__ = /s/= .*/= "2019-12-17 02:07:09"/p w /^__developer__ = /s/= .*/= "bdeegan"/p w @@ -61,7 +61,7 @@ done for i in `find $DIRS -name 'setup.py'`; do header $i ed $i <<EOF -/^ *version = /s/= .*/= "3.1.1",/p +/^ *version = /s/= .*/= "3.1.2",/p w q EOF @@ -72,9 +72,9 @@ for i in `find $DIRS -name '*.txt'`; do ed $i <<EOF g/Copyright (c) 2001.*SCons Foundation/s//Copyright (c) 2001 - 2019 The SCons Foundation/p w -/# [^ ]* 0.96.[CD][0-9]* [0-9\/]* [0-9:]* knight$/s/.*/# bin/restore.sh 72ae09dc35ac2626f8ff711d8c4b30b6138e08e3 2019-08-08 14:50:06 bdeegan/p +/# [^ ]* 0.96.[CD][0-9]* [0-9\/]* [0-9:]* knight$/s/.*/# bin/restore.sh bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan/p w -/Version [0-9][0-9]*\.[0-9][0-9]*/s//Version 3.1.1/p +/Version [0-9][0-9]*\.[0-9][0-9]*/s//Version 3.1.2/p w q EOF diff --git a/bin/rsync-sourceforge b/bin/rsync-sourceforge deleted file mode 100644 index de44e3b..0000000 --- a/bin/rsync-sourceforge +++ /dev/null @@ -1,32 +0,0 @@ -#!/bin/sh -# -# Sync this directory tree with sourceforge. -# -# Cribbed and modified from Peter Miller's same-named script in -# /home/groups/a/ae/aegis/aegis at SourceForge. -# -# Guide to what this does with rsync: -# -# --rsh=ssh use ssh for the transfer -# -l copy symlinks as symlinks -# -p preserve permissions -# -r recursive -# -t preserve times -# -z compress data -# --stats file transfer statistics -# --exclude exclude files matching the pattern -# --delete delete files that don't exist locally -# --delete-excluded delete files that match the --exclude patterns -# --progress show progress during the transfer -# -v verbose -# -LOCAL=/home/scons/scons -REMOTE=/home/groups/s/sc/scons/scons -/usr/bin/rsync --rsh=ssh -l -p -r -t -z --stats \ - --exclude build \ - --exclude "*,D" \ - --exclude "*.pyc" \ - --exclude aegis.log \ - --delete --delete-excluded \ - --progress -v \ - ${LOCAL}/. scons.sourceforge.net:${REMOTE}/. diff --git a/bin/scons-proc.py b/bin/scons-proc.py index e09c853..d8b37df 100644 --- a/bin/scons-proc.py +++ b/bin/scons-proc.py @@ -32,8 +32,8 @@ Options: -t file(s) dump tool information to the specified file(s) -v file(s) dump variable information to the specified file(s) - Regard that each -[btv] argument is a pair of - comma-separated .gen,.mod file names. + The "files" argument following a -[bftv] argument is expected to + be a comma-separated pair of names like: foo.gen,foo.mod """ @@ -70,7 +70,9 @@ def parse_docs(args, include_entities=True): sys.stderr.write("error in %s\n" % f) raise else: - content = open(f).read() + # mode we read (text/bytes) has to match handling in SConsDoc + with open(f, 'r') as fp: + content = fp.read() if content: try: h.parseContent(content, include_entities) @@ -166,6 +168,7 @@ class SCons_XML(object): # Write file f = self.fopen(filename) stf.writeGenTree(root, f) + f.close() def write_mod(self, filename): try: @@ -212,6 +215,7 @@ class SCons_XML(object): v.tag, v.entityfunc(), v.tag)) f.write('\n') f.write(Warning) + f.close() class Proxy(object): def __init__(self, subject): @@ -348,7 +352,7 @@ processor_class = SCons_XML # Step 1: Creating entity files for builders, functions,... print("Generating entity files...") -h = parse_docs(args, False) +h = parse_docs(args, include_entities=False) write_output_files(h, buildersfiles, functionsfiles, toolsfiles, variablesfiles, SCons_XML.write_mod) @@ -362,7 +366,7 @@ else: # Step 3: Creating actual documentation snippets, using the # fully resolved and updated entities from the *.mod files. print("Updating documentation for builders, tools and functions...") -h = parse_docs(args, True) +h = parse_docs(args, include_entities=True) write_output_files(h, buildersfiles, functionsfiles, toolsfiles, variablesfiles, SCons_XML.write) print("Done") diff --git a/bin/scons-review.sh b/bin/scons-review.sh deleted file mode 100755 index f126333..0000000 --- a/bin/scons-review.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh - -case "$1" in -'') exec svn diff --diff-cmd diff -x -c $* ;; --m) svn diff --diff-cmd diff -x -c $* | alpine scons-dev ;; -*) echo "Error: unknown option '$1"; exit 1 ;; -esac - -# OLD CODE FOR USE WITH AEGIS -# -#if test $# -ne 1; then -# echo "Usage: scons-review change#" >&2 -# exit 1 -#fi -#if test "X$AEGIS_PROJECT" = "X"; then -# echo "scons-review: AEGIS_PROJECT is not set" >&2 -# exit 1 -#fi -#DIR=`aegis -cd -dd $*` -#if test "X${DIR}" = "X"; then -# echo "scons-review: No Aegis directory for '$*'" >&2 -# exit 1 -#fi -#(cd ${DIR} && find * -name '*,D' | sort | xargs cat) | pine scons-dev diff --git a/bin/scons_dev_master.py b/bin/scons_dev_master.py index 4b1160f..cdbd68e 100644..100755 --- a/bin/scons_dev_master.py +++ b/bin/scons_dev_master.py @@ -1,4 +1,4 @@ -#!/bin/sh +#!/usr/bin/python # # A script for turning a generic Ubuntu system into a master for diff --git a/bin/sfsum b/bin/sfsum deleted file mode 100644 index 142793a..0000000 --- a/bin/sfsum +++ /dev/null @@ -1,148 +0,0 @@ -#!/usr/bin/env python -# -# sfsum.py: A script for parsing XML data exported from -# SourceForge projects. -# -# Right now, this is hard-coded to generate a summary of open bugs. -# -# XML data for SourceForge project is available for download by project -# administrators. Because it's intended for backup purposes, you have -# to slurp the whole set of data, including info about all of the closed -# items, the feature requests, etc., so it can get big. -# -# You can do this by hand (if you're an administrator) with a URL like -# this (where 30337 is the group_id for SCons): -# -# http://sourceforge.net/export/xml_export.php?group_id=30337 -# -# They also have a Perl script, called xml_export, available as part -# of a set of utilities called "adocman" which automate dealing with -# SourceForge document management from the command line. "adocman" -# is available at: -# -# https://sourceforge.net/projects/sitedocs/ -# -from __future__ import print_function - -import xml.sax -import xml.sax.saxutils -import sys - -SFName = { - 'Unassigned' : 'nobody', - 'Chad Austin' : 'aegis', - 'Charle Crain' : 'diewarzau', - 'Steven Knight' : 'stevenknight', - 'Steve Leblanc' : 'stevenleblanc', - 'Jeff Petkau' : 'jpet', - 'Anthony Roach' : 'anthonyroach', - 'Steven Shaw' : 'steven_shaw', - 'Terrel Shumway' : 'terrelshumway', - 'Greg Spencer' : 'greg_spencer', - 'Christoph Wiedemann' : 'wiedeman', -} - -class Artifact(object): - """Just a place to hold attributes that we find in the XML.""" - pass - -Artifacts = {} - -def nws(text): - """Normalize white space. This will become important if/when - we enhance this to search for arbitrary fields.""" - return ' '.join(text.split()) - -class ClassifyArtifacts(xml.sax.saxutils.DefaultHandler): - """ - Simple SAX subclass to classify the artifacts in SourceForge - XML output. - - This reads up the fields in an XML description and turns the field - descriptions into attributes of an Artificat object, on the fly. - Artifacts are of the following types: - - Bugs - Feature Requests - Patches - Support Requests - - We could, if we choose to, add additional types in the future - by creating additional trackers. - - This class loses some info right now because we don't pay attention - to the <messages> tag in the output, which contains a list of items - that have <field> tags in them. Right now, these just overwrite - each other in the Arifact object we create. - - We also don't pay attention to any attributes of a <field> tag other - than the "name" attribute. We'll need to extend this class if we - ever want to pay attention to those attributes. - """ - def __init__(self): - self.artifact = None - - def startElement(self, name, attrs): - self.text = "" - if name == 'artifact': - self.artifact = Artifact() - elif not self.artifact is None and name == 'field': - self.fname = attrs.get('name', None) - - def characters(self, ch): - if not self.artifact is None: - self.text = self.text + ch - - def endElement(self, name): - global Artifacts - if name == 'artifact': - type = self.artifact.artifact_type - try: - list = Artifacts[type] - except KeyError: - Artifacts[type] = list = [] - list.append(self.artifact) - self.artifact = None - elif not self.artifact is None and name == 'field': - setattr(self.artifact, self.fname, self.text) - -if __name__ == '__main__': - # Create a parser. - parser = xml.sax.make_parser() - # Tell the parser we are not interested in XML namespaces. - parser.setFeature(xml.sax.handler.feature_namespaces, 0) - - # Instantiate our handler and tell the parser to use it. - parser.setContentHandler(ClassifyArtifacts()) - - # Parse the input. - parser.parse(sys.argv[1]) - - # Hard-coded search for 'Open' bugs. This should be easily - # generalized once we figure out other things for this script to do. - bugs = [x for x in Artifacts['Bugs'] if x.status == 'Open'] - - print(list(Artifacts.keys())) - - print("%d open bugs" % len(bugs)) - - # Sort them into a separate list for each assignee. - Assigned = {} - for bug in bugs: - a = bug.assigned_to - try: - list = Assigned[a] - except KeyError: - Assigned[a] = list = [] - list.append(bug) - - for a in SFName.keys(): - try: - b = Assigned[SFName[a]] - except KeyError: - pass - else: - print(" %s" % a) - b.sort(key=lambda x, y: (x.artifact_id, y.artifact_id)) - for bug in b: - print(" %-6s %s" % (bug.artifact_id, bug.summary)) diff --git a/bin/timebuild b/bin/timebuild deleted file mode 100644 index d5af983..0000000 --- a/bin/timebuild +++ /dev/null @@ -1,65 +0,0 @@ -#!/bin/sh -# -# Profile running SCons to build itself from the current package. -# -# This runs "aegis -build" to build a current scons-src-*.tar.gz -# package, unpacks it in the supplied directory name, and then -# starts a profiled run of an SCons build, followed by another. -# This results in two profiles: -# -# NAME/NAME-0.prof -# profile of a build-everything run -# -# NAME/NAME-1.prof -# profile of an all-up-to-date run -# -# This also copies the build scons-src-*.tar.gz file to the NAME -# subdirectory, and tars up everything under src/ as NAME/src.tar.gz, -# so that repeated runs with different in-progress changes can serve -# as their own crude version control, so you don't lose that exact -# combination of features which performed best. - -if test X$1 = X; then - echo "Must supply name!" >&2 - exit 1 -fi - -VERSION=0.90 - -DIR=$1 - -SRC="scons-src-$VERSION" -SRC_TAR_GZ="${SRC}.tar.gz" -B_D_SRC_TAR_GZ="build/dist/${SRC_TAR_GZ}" - -echo "Building ${B_D_SRC_TAR_GZ}: " `date` -aegis -build ${B_D_SRC_TAR_GZ} - -echo "mkdir ${DIR}: " `date` -mkdir ${DIR} - -echo "cp ${B_D_SRC_TAR_GZ} ${DIR}: " `date` -cp ${B_D_SRC_TAR_GZ} ${DIR} - -echo "tar cf ${DIR}/src.tar.gz: " `date` -tar cf ${DIR}/src.tar.gz src - -cd ${DIR} - -echo "tar zxf ${SRC_TAR_GZ}: " `date` -tar zxf ${SRC_TAR_GZ} - -cd ${SRC} - -SCRIPT="src/script/scons.py" -ARGS="version=$VERSION" - -export SCONS_LIB_DIR=`pwd`/src/engine - -echo "Build run starting: " `date` -python $SCRIPT --profile=../$DIR-0.prof $ARGS > ../$DIR-0.log 2>&1 - -echo "Up-to-date run starting: " `date` -python $SCRIPT --profile=../$DIR-1.prof $ARGS > ../$DIR-1.log 2>&1 - -echo "Finished $DIR at: " `date` diff --git a/bin/update-release-info.py b/bin/update-release-info.py index 7db9037..4f33857 100644 --- a/bin/update-release-info.py +++ b/bin/update-release-info.py @@ -58,7 +58,7 @@ In 'post' mode, files are prepared for the next release cycle: # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. from __future__ import print_function -__revision__ = "bin/update-release-info.py 72ae09dc35ac2626f8ff711d8c4b30b6138e08e3 2019-08-08 14:50:06 bdeegan" +__revision__ = "bin/update-release-info.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" import os import sys |