summaryrefslogtreecommitdiff
path: root/engine/SCons/Tool/tex.py
diff options
context:
space:
mode:
authorLuca Falavigna <dktrkranz@debian.org>2013-03-11 22:26:44 +0100
committerLuca Falavigna <dktrkranz@debian.org>2013-03-11 22:26:44 +0100
commitb8c0e44ecb8787b9b5608b5d2dd975d9faafe8a3 (patch)
tree5c6836919ac668fdc86757d3e2f3614ff8b00aef /engine/SCons/Tool/tex.py
parentc259ca8db390351482f0c40550aa6b572e8db1e8 (diff)
parentcc58c8587a4e67f389b00e5d3278fae049ac7399 (diff)
Merge tag 'upstream/2.3.0'
Upstream version 2.3.0
Diffstat (limited to 'engine/SCons/Tool/tex.py')
-rw-r--r--engine/SCons/Tool/tex.py78
1 files changed, 68 insertions, 10 deletions
diff --git a/engine/SCons/Tool/tex.py b/engine/SCons/Tool/tex.py
index ce394e4..5960634 100644
--- a/engine/SCons/Tool/tex.py
+++ b/engine/SCons/Tool/tex.py
@@ -10,7 +10,7 @@ selection method.
"""
#
-# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012 The SCons Foundation
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@@ -32,7 +32,7 @@ selection method.
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
-__revision__ = "src/engine/SCons/Tool/tex.py issue-2856:2676:d23b7a2f45e8 2012/08/05 15:38:28 garyo"
+__revision__ = "src/engine/SCons/Tool/tex.py 2013/03/03 09:48:35 garyo"
import os.path
import re
@@ -100,6 +100,10 @@ makeglossary_re = re.compile(r"^[^%\n]*\\makeglossary", re.MULTILINE)
makeglossaries_re = re.compile(r"^[^%\n]*\\makeglossaries", re.MULTILINE)
makeacronyms_re = re.compile(r"^[^%\n]*\\makeglossaries", re.MULTILINE)
beamer_re = re.compile(r"^[^%\n]*\\documentclass\{beamer\}", re.MULTILINE)
+regex = r'^[^%\n]*\\newglossary\s*\[([^\]]+)\]?\s*\{([^}]*)\}\s*\{([^}]*)\}\s*\{([^}]*)\}\s*\{([^}]*)\}'
+newglossary_re = re.compile(regex, re.MULTILINE)
+
+newglossary_suffix = []
# search to find all files included by Latex
include_re = re.compile(r'^[^%\n]*\\(?:include|input){([^}]*)}', re.MULTILINE)
@@ -125,6 +129,9 @@ LaTeXAction = None
# An action to run BibTeX on a file.
BibTeXAction = None
+# An action to run Biber on a file.
+BiberAction = None
+
# An action to run MakeIndex on a file.
MakeIndexAction = None
@@ -137,6 +144,9 @@ MakeGlossaryAction = None
# An action to run MakeIndex (for acronyms) on a file.
MakeAcronymsAction = None
+# An action to run MakeIndex (for newglossary commands) on a file.
+MakeNewGlossaryAction = None
+
# Used as a return value of modify_env_var if the variable is not set.
_null = SCons.Scanner.LaTeX._null
@@ -232,7 +242,8 @@ def InternalLaTeXAuxAction(XXXLaTeXAction, target = None, source= None, env=None
saved_hashes = {}
suffix_nodes = {}
- for suffix in all_suffixes:
+
+ for suffix in all_suffixes+sum(newglossary_suffix, []):
theNode = env.fs.File(targetbase + suffix)
suffix_nodes[suffix] = theNode
saved_hashes[suffix] = theNode.get_csig()
@@ -336,7 +347,9 @@ def InternalLaTeXAuxAction(XXXLaTeXAction, target = None, source= None, env=None
must_rerun_latex = True
# Now decide if biber will need to be run.
- # The information that bibtex reads from the .bcf file is
+ # When the backend for biblatex is biber (by choice or default) the
+ # citation information is put in the .bcf file.
+ # The information that biber reads from the .bcf file is
# pass-independent. If we find (below) that the .bbl file is unchanged,
# then the last latex saw a correct bibliography.
# Therefore only do this once
@@ -349,11 +362,11 @@ def InternalLaTeXAuxAction(XXXLaTeXAction, target = None, source= None, env=None
content = open(target_bcf, "rb").read()
if content.find("bibdata") != -1:
if Verbose:
- print "Need to run bibtex on ",bcffilename
+ print "Need to run biber on ",bcffilename
bibfile = env.fs.File(SCons.Util.splitext(target_bcf)[0])
- result = BibTeXAction(bibfile, bibfile, env)
+ result = BiberAction(bibfile, bibfile, env)
if result != 0:
- check_file_error_message(env['BIBTEX'], 'blg')
+ check_file_error_message(env['BIBER'], 'blg')
must_rerun_latex = True
# Now decide if latex will need to be run again due to index.
@@ -410,6 +423,21 @@ def InternalLaTeXAuxAction(XXXLaTeXAction, target = None, source= None, env=None
'alg')
return result
+ # Now decide if latex will need to be run again due to newglossary command.
+ for ig in range(len(newglossary_suffix)):
+ if check_MD5(suffix_nodes[newglossary_suffix[ig][2]],newglossary_suffix[ig][2]) or (count == 1):
+ # We must run makeindex
+ if Verbose:
+ print "Need to run makeindex for newglossary"
+ newglfile = suffix_nodes[newglossary_suffix[ig][2]]
+ MakeNewGlossaryAction = SCons.Action.Action("$MAKENEWGLOSSARY ${SOURCE.filebase}%s -s ${SOURCE.filebase}.ist -t ${SOURCE.filebase}%s -o ${SOURCE.filebase}%s" % (newglossary_suffix[ig][2],newglossary_suffix[ig][0],newglossary_suffix[ig][1]), "$MAKENEWGLOSSARYCOMSTR")
+
+ result = MakeNewGlossaryAction(newglfile, newglfile, env)
+ if result != 0:
+ check_file_error_message('%s (newglossary)' % env['MAKENEWGLOSSARY'],
+ newglossary_suffix[ig][0])
+ return result
+
# Now decide if latex needs to be run yet again to resolve warnings.
if warning_rerun_re.search(logContent):
must_rerun_latex = True
@@ -595,9 +623,23 @@ def ScanFiles(theFile, target, paths, file_tests, file_tests_search, env, graphi
for i in range(len(file_tests_search)):
if file_tests[i][0] is None:
+ if Verbose:
+ print "scan i ",i," files_tests[i] ",file_tests[i], file_tests[i][1]
file_tests[i][0] = file_tests_search[i].search(content)
if Verbose and file_tests[i][0]:
- print " found match for ",file_tests[i][-1][-1]
+ print " found match for ",file_tests[i][1][-1]
+ # for newglossary insert the suffixes in file_tests[i]
+ if file_tests[i][0] and file_tests[i][1][-1] == 'newglossary':
+ findresult = file_tests_search[i].findall(content)
+ for l in range(len(findresult)) :
+ (file_tests[i][1]).insert(0,'.'+findresult[l][3])
+ (file_tests[i][1]).insert(0,'.'+findresult[l][2])
+ (file_tests[i][1]).insert(0,'.'+findresult[l][0])
+ suffix_list = ['.'+findresult[l][0],'.'+findresult[l][2],'.'+findresult[l][3] ]
+ newglossary_suffix.append(suffix_list)
+ if Verbose:
+ print " new suffixes for newglossary ",newglossary_suffix
+
incResult = includeOnly_re.search(content)
if incResult:
@@ -676,7 +718,8 @@ def tex_emitter_core(target, source, env, graphics_extensions):
makeglossary_re,
makeglossaries_re,
makeacronyms_re,
- beamer_re ]
+ beamer_re,
+ newglossary_re ]
# set up list with the file suffixes that need emitting
# when a feature is found
file_tests_suff = [['.aux','aux_file'],
@@ -693,7 +736,9 @@ def tex_emitter_core(target, source, env, graphics_extensions):
['.glo', '.gls', '.glg','glossary'],
['.glo', '.gls', '.glg','glossaries'],
['.acn', '.acr', '.alg','acronyms'],
- ['.nav', '.snm', '.out', '.toc','beamer'] ]
+ ['.nav', '.snm', '.out', '.toc','beamer'],
+ ['newglossary',] ]
+ # for newglossary the suffixes are added as we find the command
# build the list of lists
file_tests = []
for i in range(len(file_tests_search)):
@@ -722,6 +767,7 @@ def tex_emitter_core(target, source, env, graphics_extensions):
if Verbose:
print "search path ",paths
+ # scan all sources for side effect files
aux_files = []
file_tests = ScanFiles(source[0], target, paths, file_tests, file_tests_search, env, graphics_extensions, targetdir, aux_files)
@@ -839,6 +885,11 @@ def generate_common(env):
if BibTeXAction is None:
BibTeXAction = SCons.Action.Action("$BIBTEXCOM", "$BIBTEXCOMSTR")
+ # Define an action to run Biber on a file.
+ global BiberAction
+ if BiberAction is None:
+ BiberAction = SCons.Action.Action("$BIBERCOM", "$BIBERCOMSTR")
+
# Define an action to run MakeIndex on a file.
global MakeIndexAction
if MakeIndexAction is None:
@@ -898,6 +949,10 @@ def generate_common(env):
env['BIBTEXFLAGS'] = SCons.Util.CLVar('')
env['BIBTEXCOM'] = CDCOM + '${TARGET.dir} && $BIBTEX $BIBTEXFLAGS ${SOURCE.filebase}'
+ env['BIBER'] = 'biber'
+ env['BIBERFLAGS'] = SCons.Util.CLVar('')
+ env['BIBERCOM'] = CDCOM + '${TARGET.dir} && $BIBER $BIBERFLAGS ${SOURCE.filebase}'
+
env['MAKEINDEX'] = 'makeindex'
env['MAKEINDEXFLAGS'] = SCons.Util.CLVar('')
env['MAKEINDEXCOM'] = CDCOM + '${TARGET.dir} && $MAKEINDEX $MAKEINDEXFLAGS ${SOURCE.file}'
@@ -917,6 +972,9 @@ def generate_common(env):
env['MAKENCLFLAGS'] = '-s ${MAKENCLSTYLE} -t ${SOURCE.filebase}.nlg'
env['MAKENCLCOM'] = CDCOM + '${TARGET.dir} && $MAKENCL ${SOURCE.filebase}.nlo $MAKENCLFLAGS -o ${SOURCE.filebase}.nls'
+ env['MAKENEWGLOSSARY'] = 'makeindex'
+ env['MAKENEWGLOSSARYCOM'] = CDCOM + '${TARGET.dir} && $MAKENEWGLOSSARY '
+
def exists(env):
generate_darwin(env)
return env.Detect('tex')