diff options
author | William Deegan <bill@baddogconsulting.com> | 2023-01-23 20:30:24 -0800 |
---|---|---|
committer | William Deegan <bill@baddogconsulting.com> | 2023-01-23 20:30:24 -0800 |
commit | dc28694b2088f5b102692c89b049fae56947a39b (patch) | |
tree | 593dd43ea6c6f59be4685ba327d393da18c290d7 | |
parent | c96a660e4d38ec2d3c5fd25fa74a04bc47c62845 (diff) | |
download | scons-git-dc28694b2088f5b102692c89b049fae56947a39b.tar.gz |
Added blurb to RELEASE.txt. Renamed function check_content_hash() from check_MD5().
-rw-r--r-- | CHANGES.txt | 7 | ||||
-rw-r--r-- | RELEASE.txt | 3 | ||||
-rw-r--r-- | SCons/Tool/tex.py | 25 |
3 files changed, 17 insertions, 18 deletions
diff --git a/CHANGES.txt b/CHANGES.txt index 2308a2f79..2475bddb8 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -63,6 +63,9 @@ RELEASE VERSION/DATE TO BE FILLED IN LATER From Ryan Saunders: - Fixed runtest.py failure on Windows caused by excessive escaping of the path to python.exe. + From Lukas Schrangl: + - Run LaTeX after biber/bibtex only if necessary + From Flaviu Tamas: - Added -fsanitize support to ParseFlags(). This will propagate to CCFLAGS and LINKFLAGS. @@ -1066,10 +1069,6 @@ RELEASE 3.1.1 - Mon, 07 Aug 2019 20:09:12 -0500 - JSON encoding errors for CacheDir config - JSON decoding errors for CacheDir config - From Lukas Schrangl: - - Run LaTeX after biber/bibtex only if necessary - - RELEASE 3.1.0 - Mon, 20 Jul 2019 16:59:23 -0700 From Joseph Brill: diff --git a/RELEASE.txt b/RELEASE.txt index 302e49468..146f3718c 100644 --- a/RELEASE.txt +++ b/RELEASE.txt @@ -44,6 +44,7 @@ CHANGED/ENHANCED EXISTING FUNCTIONALITY - Migrated logging logic for --taskmastertrace to use Python's logging module. Added logging to NewParallel Job class (Andrew Morrow's new parallel job implementation) - Preliminary support for Python 3.12. +- Run LaTeX after biber/bibtex only if necessary FIXES @@ -53,7 +54,7 @@ FIXES - A list argument as the source to the Copy() action function is now handled. Both the implementation and the strfunction which prints the progress message were adjusted. -- The Java Scanner processing of JAVACLASSPATH for dependencies (behavior +- The Java Scanner processing of JAVACLASSPATH for dep qendencies (behavior that was introduced in SCons 4.4.0) is adjusted to split on the system's search path separator instead of on a space. The previous behavior meant that a path containing spaces (e.g. r"C:\somepath\My Classes") would diff --git a/SCons/Tool/tex.py b/SCons/Tool/tex.py index b9dd29e09..0a688f58a 100644 --- a/SCons/Tool/tex.py +++ b/SCons/Tool/tex.py @@ -253,10 +253,10 @@ def InternalLaTeXAuxAction(XXXLaTeXAction, target = None, source= None, env=None # .aux files already processed by BibTex already_bibtexed = [] - # - # routine to update MD5 hash and compare - # - def check_MD5(filenode, suffix): + def check_content_hash(filenode, suffix): + """ + Routine to update content hash and compare + """ global must_rerun_latex # two calls to clear old csig filenode.clear_memoized_values() @@ -295,7 +295,6 @@ def InternalLaTeXAuxAction(XXXLaTeXAction, target = None, source= None, env=None with open(logfilename, "rb") as f: logContent = f.read().decode(errors='replace') - # Read the fls file to find all .aux files flsfilename = targetbase + '.fls' flsContent = '' @@ -345,7 +344,7 @@ def InternalLaTeXAuxAction(XXXLaTeXAction, target = None, source= None, env=None result = BibTeXAction(bibfile, bibfile, env) if result != 0: check_file_error_message(env['BIBTEX'], 'blg') - check_MD5(suffix_nodes[".bbl"], ".bbl") + check_content_hash(suffix_nodes[".bbl"], ".bbl") # Now decide if biber will need to be run. # When the backend for biblatex is biber (by choice or default) the @@ -369,10 +368,10 @@ def InternalLaTeXAuxAction(XXXLaTeXAction, target = None, source= None, env=None result = BiberAction(bibfile, bibfile, env) if result != 0: check_file_error_message(env['BIBER'], 'blg') - check_MD5(suffix_nodes[".bbl"], ".bbl") + check_content_hash(suffix_nodes[".bbl"], ".bbl") # Now decide if latex will need to be run again due to index. - if check_MD5(suffix_nodes['.idx'],'.idx') or (count == 1 and run_makeindex): + if check_content_hash(suffix_nodes['.idx'], '.idx') or (count == 1 and run_makeindex): # We must run makeindex if Verbose: print("Need to run makeindex") @@ -387,10 +386,10 @@ def InternalLaTeXAuxAction(XXXLaTeXAction, target = None, source= None, env=None # Harder is case is where an action needs to be called -- that should be rare (I hope?) for index in check_suffixes: - check_MD5(suffix_nodes[index],index) + check_content_hash(suffix_nodes[index], index) # Now decide if latex will need to be run again due to nomenclature. - if check_MD5(suffix_nodes['.nlo'],'.nlo') or (count == 1 and run_nomenclature): + if check_content_hash(suffix_nodes['.nlo'], '.nlo') or (count == 1 and run_nomenclature): # We must run makeindex if Verbose: print("Need to run makeindex for nomenclature") @@ -402,7 +401,7 @@ def InternalLaTeXAuxAction(XXXLaTeXAction, target = None, source= None, env=None #return result # Now decide if latex will need to be run again due to glossary. - if check_MD5(suffix_nodes['.glo'],'.glo') or (count == 1 and run_glossaries) or (count == 1 and run_glossary): + if check_content_hash(suffix_nodes['.glo'], '.glo') or (count == 1 and run_glossaries) or (count == 1 and run_glossary): # We must run makeindex if Verbose: print("Need to run makeindex for glossary") @@ -414,7 +413,7 @@ def InternalLaTeXAuxAction(XXXLaTeXAction, target = None, source= None, env=None #return result # Now decide if latex will need to be run again due to acronyms. - if check_MD5(suffix_nodes['.acn'],'.acn') or (count == 1 and run_acronyms): + if check_content_hash(suffix_nodes['.acn'], '.acn') or (count == 1 and run_acronyms): # We must run makeindex if Verbose: print("Need to run makeindex for acronyms") @@ -427,7 +426,7 @@ def InternalLaTeXAuxAction(XXXLaTeXAction, target = None, source= None, env=None # Now decide if latex will need to be run again due to newglossary command. for ng in newglossary_suffix: - if check_MD5(suffix_nodes[ng[2]], ng[2]) or (count == 1): + if check_content_hash(suffix_nodes[ng[2]], ng[2]) or (count == 1): # We must run makeindex if Verbose: print("Need to run makeindex for newglossary") |