summaryrefslogtreecommitdiff
path: root/include_server
diff options
context:
space:
mode:
Diffstat (limited to 'include_server')
-rwxr-xr-xinclude_server/basics.py124
-rwxr-xr-xinclude_server/basics_test.py51
-rwxr-xr-xinclude_server/compress_files.py17
-rwxr-xr-xinclude_server/include_analyzer.py13
-rwxr-xr-xinclude_server/include_analyzer_memoizing_node.py5
-rwxr-xr-xinclude_server/include_analyzer_memoizing_node_test.py8
-rwxr-xr-xinclude_server/include_analyzer_test.py17
-rwxr-xr-xinclude_server/include_server.py44
-rwxr-xr-xinclude_server/include_server_test.py251
-rwxr-xr-xinclude_server/parse_file_test.py4
10 files changed, 290 insertions, 244 deletions
diff --git a/include_server/basics.py b/include_server/basics.py
index c23c7be..a0d7228 100755
--- a/include_server/basics.py
+++ b/include_server/basics.py
@@ -21,6 +21,7 @@
__author__ = 'Nils Klarlund'
+import glob
import os.path
import resource
import signal
@@ -28,69 +29,82 @@ import sys
import tempfile
-# MISCELLANEOUS CONSTANTS
+# TEMPORARY LOCATIONS FOR GENERATIONS OF COMPRESSED FILES
-# Place for creation of temporary directories.
-client_tmp = None
-# And, the current such temporary directory.
-client_root = None
-# This constant is embedded in names of client root directories.
-INCLUDE_SERVER_NAME = 'include_server'
+class ClientRootKeeper(object):
+ """Determine the tmp directory to use for compressed files.
+ Use the RAM disk-like /dev/shm as default place to store compressed files if
+ available. The protocol between the include server and distcc client
+ stipulates that the top three directories constitute the prefix prepended to
+ absolute file paths.
-def InitializeClientTmp():
- """Determine the tmp directory to use.
+ Instance vars:
+ client_tmp: a path, the place for creation of temporary directories.
+ client_root: a path, the current such temporary directory
- Use the RAM disk-like /dev/shm as default place to store compressed files if
- available.
+ A typical client root looks like:
+
+ - /tmp/tmpBDoZQV.include_server-6642-13/padding, or
+ - /dev/shm/tmpBDoZQV.include_server-6642-19
+
+ Note that each path has exactly three directory components to it. This is an
+ invariant. Some client roots are padded with '/padding' to satisfy the
+ invariant.
"""
+
+ # This constant is embedded in names of client root directories.
+ INCLUDE_SERVER_NAME = 'include_server'
- global client_tmp
- if 'DISTCC_CLIENT_TMP' in os.environ:
- client_tmp = os.environ['DISTCC_CLIENT_TMP']
- elif os.path.isdir('/dev/shm') and os.access('/dev/shm',
- os.X_OK + os.W_OK + os.R_OK):
- client_tmp = '/dev/shm'
- else:
- client_tmp = '/tmp'
- if not client_tmp or client_tmp[0] != '/':
- sys.exit("""DISTCC_CLIENT_TMP must start with '/'.""")
- client_tmp = client_tmp.rstrip('/')
- # The protocol between the include server and distcc client stipulates
- # that the top three directories constitute the prefix prepended to absolute
- # file paths. To have room to make a temp directory, we'll need to have less
- # than two levels at this point.
- # Note: '/a/b'.split('/') == ['', 'a', 'b'].
- if len(client_tmp.split('/')) > 3:
- sys.exit('DISTCC_CLIENT_TMP must have at most two directory levels.')
-
-
-def InitializeClientRoot(generation):
- """Make a client directory for a generation of compressed files.
+ def __init__(self):
+ """Constructor."""
+ if 'DISTCC_CLIENT_TMP' in os.environ:
+ self.client_tmp = os.environ['DISTCC_CLIENT_TMP']
+ elif os.path.isdir('/dev/shm') and os.access('/dev/shm',
+ os.X_OK + os.W_OK + os.R_OK):
+ self.client_tmp = '/dev/shm'
+ else:
+ self.client_tmp = '/tmp'
+ if not self.client_tmp or self.client_tmp[0] != '/':
+ sys.exit("""DISTCC_CLIENT_TMP must start with '/'.""")
+ self.client_tmp = self.client_tmp.rstrip('/')
+ # To have room to make a temp directory, we'll need to have less than two
+ # levels at this point. Note: '/a/b'.split('/') == ['', 'a', 'b'].
+ if len(self.client_tmp.split('/')) > 3:
+ sys.exit('DISTCC_CLIENT_TMP must have at most two directory levels.')
+ self.number_missing_levels = 3 - len(self.client_tmp.split('/'))
+ self.client_root = None
+
+ def Glob(self, pid_expr):
+ """Glob unpadded client roots whose pid is matched by pid expression."""
+ return glob.glob('%s/*.%s-%s-*'
+ % (self.client_tmp, self.INCLUDE_SERVER_NAME,
+ pid_expr))
- Arguments:
- generation: a natural number, usually 1 or slightly bigger; this number,
- minus 1, indicates how many times a reset of the caches has taken place.
- """
- assert client_tmp
- global client_root
- try:
- # Create a unique identifier that will never repeat. Use pid as suffix for
- # cleanout mechanism that wipes files not associated with a running pid.
- client_root = tempfile.mkdtemp('.%s-%s-%d' %
- (INCLUDE_SERVER_NAME,
- os.getpid(), generation),
- dir=client_tmp)
- number_missing_levels = 3 - len(client_tmp.split('/'))
- # Stuff client_root path until we have exactly three levels in all.
- for unused_i in range(number_missing_levels):
- client_root += '/padding'
- os.mkdir(client_root)
- except (IOError, OSError), why:
- sys.exit('Could not create client root directory %s: %s' %
- (client_root, why))
-
+ def ClientRootMakedir(self, generation):
+ """Make a new client directory for a generation of compressed files.
+
+ Arguments:
+ generation: a natural number, usually 1 or slightly bigger; this number,
+ minus 1, indicates how many times a reset of the caches has taken place.
+ """
+ try:
+ # Create a unique identifier that will never repeat. Use pid as suffix for
+ # cleanout mechanism that wipes files not associated with a running pid.
+ client_root_before_padding = tempfile.mkdtemp(
+ '.%s-%s-%d' %
+ (self.INCLUDE_SERVER_NAME,
+ os.getpid(), generation),
+ dir=self.client_tmp)
+ self.client_root = (client_root_before_padding
+ + '/padding' * self.number_missing_levels)
+ if not os.path.isdir(self.client_root):
+ os.makedirs(self.client_root)
+ except (IOError, OSError), why:
+ sys.exit('Could not create client root directory %s: %s' %
+ (self.client_root, why))
+
# For automated emails, see also src/emaillog.h.
DCC_EMAILLOG_WHOM_TO_BLAME = os.getenv('DISTCC_EMAILLOG_WHOM_TO_BLAME',
diff --git a/include_server/basics_test.py b/include_server/basics_test.py
index 7b7308c..31fff38 100755
--- a/include_server/basics_test.py
+++ b/include_server/basics_test.py
@@ -21,6 +21,7 @@ __author__ = "Nils Klarlund"
import os
import os.path
+import tempfile
import unittest
import basics
@@ -34,32 +35,52 @@ class BasicsTest(unittest.TestCase):
def tearDown(self):
pass
- def test_InitializeClientTmp(self):
+ def test_ClientRootKeeper(self):
os.environ['DISTCC_CLIENT_TMP'] = 'to/be'
- self.assertRaises(SystemExit, basics.InitializeClientTmp)
+ self.assertRaises(SystemExit, basics.ClientRootKeeper)
os.environ['DISTCC_CLIENT_TMP'] = '/to/be/or'
- self.assertRaises(SystemExit, basics.InitializeClientTmp)
+ self.assertRaises(SystemExit, basics.ClientRootKeeper)
try:
- os_mkdir = os.mkdir
+ tempfile_mkdtemp = tempfile.mkdtemp
+ os_makedirs = os.makedirs
- def Mock_os_mkdir(f, *args):
+ def Mock_tempfile_mkdtemp(pat, dir):
+ self.assert_((pat, dir)
+ in
+ [('.%s-%s-%d' %
+ (basics.ClientRootKeeper.INCLUDE_SERVER_NAME,
+ os.getpid(), generation),
+ prefix)
+ for generation, prefix in
+ [(1,'/to/be'), (2, '/to')]])
+ return (dir == '/to/be' and '/to/be/xxxxxx'
+ or dir == '/to' and '/to/xxxxxxx')
+
+ def Mock_os_makedirs(f, *unused_args):
if not f.startswith('/to/'):
raise Exception, f
- os.mkdir = Mock_os_mkdir
+
+ tempfile.mkdtemp = Mock_tempfile_mkdtemp
+ os.makedirs = Mock_os_makedirs
+
os.environ['DISTCC_CLIENT_TMP'] = '/to/be'
- basics.InitializeClientTmp()
- basics.InitializeClientRoot(1)
- self.assertEqual(os.path.dirname(basics.client_root), "/to/be")
+ client_root_keeper = basics.ClientRootKeeper()
+ client_root_keeper.ClientRootMakedir(1)
+ self.assertEqual(os.path.dirname(client_root_keeper.client_root),
+ "/to/be")
os.environ['DISTCC_CLIENT_TMP'] = '/to'
- basics.InitializeClientTmp()
- basics.InitializeClientRoot(2)
+ client_root_keeper = basics.ClientRootKeeper()
+ client_root_keeper.ClientRootMakedir(2)
+ print 'xxxxxxxxxxxx', client_root_keeper.client_root
self.assertEqual(os.path.dirname(
- os.path.dirname(basics.client_root)), "/to")
- self.assertEqual(os.path.basename(basics.client_root), "padding")
+ os.path.dirname(client_root_keeper.client_root)), "/to")
+ self.assertEqual(os.path.basename(client_root_keeper.client_root),
+ "padding")
self.assertEqual(len(
- [ None for ch in basics.client_root if ch == '/' ]), 3)
+ [ None for ch in client_root_keeper.client_root if ch == '/' ]), 3)
finally:
- os.mkdir = os_mkdir
+ tempfile.mkdtemp = tempfile_mkdtemp
+ os.makedirs = os_makedirs
unittest.main()
diff --git a/include_server/compress_files.py b/include_server/compress_files.py
index 133eb5f..ac615da 100755
--- a/include_server/compress_files.py
+++ b/include_server/compress_files.py
@@ -40,20 +40,19 @@ class CompressFiles(object):
# The realpath_map indices of files that have been compressed already.
self.files_compressed = set([])
- def Compress(self, include_closure, client_root):
+ def Compress(self, include_closure, client_root_keeper):
"""Copy files in include_closure to the client_root directory, compressing
them as we go, and also inserting #line directives.
Arguments:
include_closure: a dictionary, see IncludeAnalyzer.RunAlgorithm
- client_root: a directory name, see basics.py, the directory to which
- compressed files are mirrored
+ client_root_keeper: an object as defined in basics.py
Returns: a list of filepaths under client_root
Walk through the files in the include closure. Make sure their compressed
- images (with either .lzo or lzo.abs extension) exist under client_root. Also
- collect all the .lzo or .lzo.abs filepaths in a list, which is the return
- value.
+ images (with either .lzo or lzo.abs extension) exist under client_root as
+ handled by client_root_keeper. Also collect all the .lzo or .lzo.abs
+ filepaths in a list, which is the return value.
"""
realpath_string = self.realpath_map.string
files = [] # where we accumulate files
@@ -66,9 +65,11 @@ class CompressFiles(object):
if len(include_closure[realpath_idx]) > 0:
# Designate by suffix '.abs' that this file is to become known by an
# absolute filepath through a #line directive.
- new_filepath = "%s%s.lzo.abs" % (client_root, realpath)
+ new_filepath = "%s%s.lzo.abs" % (client_root_keeper.client_root,
+ realpath)
else:
- new_filepath = "%s%s.lzo" % (client_root, realpath)
+ new_filepath = "%s%s.lzo" % (client_root_keeper.client_root,
+ realpath)
files.append(new_filepath)
if not new_filepath in self.files_compressed:
self.files_compressed.add(new_filepath)
diff --git a/include_server/include_analyzer.py b/include_server/include_analyzer.py
index 4706d14..0d08e12 100755
--- a/include_server/include_analyzer.py
+++ b/include_server/include_analyzer.py
@@ -90,10 +90,10 @@ class IncludeAnalyzer(object):
self.include_dir_pairs = set([]) # the pairs (quote search list,
# angle search lists)
- def __init__(self, stat_reset_triggers={}):
+ def __init__(self, client_root_keeper, stat_reset_triggers={}):
self.generation = 1
- basics.InitializeClientRoot(self.generation)
- self.client_root = basics.client_root
+ self.client_root_keeper = client_root_keeper
+ self.client_root_keeper.ClientRootMakedir(self.generation)
self.stat_reset_triggers = stat_reset_triggers
self.translation_unit = "unknown translation unit"
self.timer = None
@@ -217,7 +217,7 @@ class IncludeAnalyzer(object):
path)
self.ClearStatCaches()
- def DoCompilationCommand(self, cmd, currdir):
+ def DoCompilationCommand(self, cmd, currdir, client_root_keeper):
"""Parse and and process the command; then gather files and links."""
self.translation_unit = "unknown translation unit" # don't know yet
@@ -254,7 +254,7 @@ class IncludeAnalyzer(object):
# performance degradation for large link farms. We expect at most a
# handful.
links = self.mirror_path.Links()
- files = self.compress_files.Compress(include_closure, basics.client_root)
+ files = self.compress_files.Compress(include_closure, client_root_keeper)
realpath_map = self.realpath_map
files_and_links = files + links
@@ -357,6 +357,5 @@ class IncludeAnalyzer(object):
# But we cannot delete any such information, because slow-poke distcc
# clients that have received earlier include manifests perhaps only now get
# around to reading a previous generation client root directory.
- basics.InitializeClientRoot(self.generation)
- self.client_root = basics.client_root
+ self.client_root_keeper.ClientRootMakedir(self.generation)
self._InitializeAllCaches()
diff --git a/include_server/include_analyzer_memoizing_node.py b/include_server/include_analyzer_memoizing_node.py
index 7a917b4..300ad45 100755
--- a/include_server/include_analyzer_memoizing_node.py
+++ b/include_server/include_analyzer_memoizing_node.py
@@ -276,9 +276,10 @@ class IncludeAnalyzerMemoizingNode(include_analyzer.IncludeAnalyzer):
# symbol that is being defined or redefined.
self.parse_file.SetDefineCallback(self.support_master.InvalidateRecords)
- def __init__(self, stat_reset_triggers={}):
+ def __init__(self, client_root_keeper, stat_reset_triggers={}):
"""Constructor."""
include_analyzer.IncludeAnalyzer.__init__(self,
+ client_root_keeper,
stat_reset_triggers)
self._InitializeAllCachesMemoizing()
@@ -446,7 +447,7 @@ class IncludeAnalyzerMemoizingNode(include_analyzer.IncludeAnalyzer):
dir_map.string[d_],
includepath_map.string[fp_]),
currdir_idx,
- self.client_root)
+ self.client_root_keeper.client_root)
# We have fp_resolved_pair if and only if we have fp_real_idx
assert not fp_resolved_pair or fp_real_idx
diff --git a/include_server/include_analyzer_memoizing_node_test.py b/include_server/include_analyzer_memoizing_node_test.py
index 898c921..9b29a65 100755
--- a/include_server/include_analyzer_memoizing_node_test.py
+++ b/include_server/include_analyzer_memoizing_node_test.py
@@ -35,15 +35,17 @@ class IncludeAnalyzerMemoizingNodeUnitTest(unittest.TestCase):
"""Translate the indices in an include closure to their denoted strings."""
return (
dict((self.realpath_map.string[rp_idx],
- [ (self.directory_map.string[dir_idx], self.includepath_map.string[ip_idx])
+ [ (self.directory_map.string[dir_idx],
+ self.includepath_map.string[ip_idx])
for (dir_idx, ip_idx) in include_closure[rp_idx] ])
for rp_idx in include_closure))
def setUp(self):
basics.opt_debug_pattern = 1
- basics.InitializeClientTmp()
+ client_root_keeper = basics.ClientRootKeeper()
self.include_analyzer = (
- include_analyzer_memoizing_node.IncludeAnalyzerMemoizingNode())
+ include_analyzer_memoizing_node.IncludeAnalyzerMemoizingNode(
+ client_root_keeper))
self.includepath_map = self.include_analyzer.includepath_map
self.canonical_path = self.include_analyzer.canonical_path
diff --git a/include_server/include_analyzer_test.py b/include_server/include_analyzer_test.py
index cb46a69..619674c 100755
--- a/include_server/include_analyzer_test.py
+++ b/include_server/include_analyzer_test.py
@@ -40,10 +40,11 @@ class IncludeAnalyzerTest(unittest.TestCase):
self.global_dirs = []
basics.opt_print_statistics = False
basics.opt_debug_pattern = 1
- basics.InitializeClientTmp()
+ client_root_keeper = basics.ClientRootKeeper()
if algorithm == basics.MEMOIZING:
self.include_analyzer = (
- include_analyzer_memoizing_node.IncludeAnalyzerMemoizingNode())
+ include_analyzer_memoizing_node.IncludeAnalyzerMemoizingNode(
+ client_root_keeper))
else:
self.fail("Algorithm not known.")
@@ -172,7 +173,8 @@ class IncludeAnalyzerTest(unittest.TestCase):
for f_name in lst:
self.failUnless(
re.match(r"%s/.+[.]include_server[-][0-9]+[-]%s"
- % (basics.client_tmp, expected),
+ % (self.include_analyzer.client_root_keeper.client_tmp,
+ expected),
f_name),
f_name)
@@ -223,7 +225,8 @@ class IncludeAnalyzerTest(unittest.TestCase):
files_and_links = self.include_analyzer.DoCompilationCommand(
"gcc -Itest_data/dfoo test_data/stat_triggers.c".split(),
- os.getcwd())
+ os.getcwd(),
+ self.include_analyzer.client_root_keeper)
# Check that we picked up the dfoo version of the .h file!
self.assertEqual(GetFileNamesFromAbsLzoName(files_and_links),
@@ -254,7 +257,8 @@ class IncludeAnalyzerTest(unittest.TestCase):
files_and_links = self.include_analyzer.DoCompilationCommand(
"gcc -Itest_data/dfoo test_data/stat_triggers.c".split(),
- os.getcwd())
+ os.getcwd(),
+ self.include_analyzer.client_root_keeper)
self.assertEqual(self.include_analyzer.generation, 2)
CheckGeneration(files_and_links, 2)
@@ -295,7 +299,8 @@ class IncludeAnalyzerTest(unittest.TestCase):
files_and_links = self.include_analyzer.DoCompilationCommand(
"gcc -Itest_data/dfoo test_data/stat_triggers.c".split(),
- os.getcwd())
+ os.getcwd(),
+ self.include_analyzer.client_root_keeper)
# Now, check that we again picked up the dfoo version of the .h file.
self.assertEqual(GetFileNamesFromAbsLzoName(files_and_links),
diff --git a/include_server/include_server.py b/include_server/include_server.py
index 249c9fd..b0562f6 100755
--- a/include_server/include_server.py
+++ b/include_server/include_server.py
@@ -202,24 +202,22 @@ def _RemoveDirectoryTree(tree_top):
pass
-def _CleanOutClientRoots(client_root):
- """Delete client root directory and everything below, for all generations.
- Argument:
- client_root: a directory path ending in "*distcc-*-*"
+def _CleanOutClientRoots(client_root_keeper, pid=None):
+ """Delete client root directories pertaining to this process.
+ Args:
+ client_root_keeper: an object of type ClientRootKeeper
+ pid: None (which means 'pid of current process') or an integer
"""
- # Determine all generations of this directory.
- hyphen_ultimate_position = client_root.rfind('-')
- client_roots = glob.glob("%s-*" % client_root[:hyphen_ultimate_position])
- assert client_root in client_roots, (client_root, client_roots)
- for client_root_ in client_roots:
+ if not pid:
+ pid = os.getpid()
+ for client_root_ in client_root_keeper.Glob(str(pid)):
_RemoveDirectoryTree(client_root_)
-def _CleanOutOthers():
+def _CleanOutOthers(client_root_keeper):
"""Search for left-overs from include servers that have passed away."""
- # Find all distcc-pump directories whether abandoned or not.
- distcc_directories = glob.glob("%s/*.%s-*-*" % (basics.client_tmp,
- basics.INCLUDE_SERVER_NAME))
+ # Find all client root subdirectories whether abandoned or not.
+ distcc_directories = client_root_keeper.Glob('*')
for directory in distcc_directories:
# Fish out pid from end of directory name.
hyphen_ultimate_position = directory.rfind('-')
@@ -245,7 +243,7 @@ def _CleanOutOthers():
continue # no access, not ours
Debug(DEBUG_TRACE,
"Cleaning out '%s' after defunct include server." % directory)
- _CleanOutClientRoots(directory)
+ _CleanOutClientRoots(client_root_keeper, pid)
NEWLINE_RE = re.compile(r"\n", re.MULTILINE)
@@ -424,7 +422,10 @@ def DistccIncludeHandlerGenerator(include_analyzer):
# accumulated operations can be executed after DoCompilationCommand
# when the timer has been cancelled.
include_analyzer.timer = basics.IncludeAnalyzerTimer()
- files_and_links = include_analyzer.DoCompilationCommand(cmd, currdir)
+ files_and_links = (
+ include_analyzer.
+ DoCompilationCommand(cmd, currdir,
+ include_analyzer.client_root_keeper))
finally:
# The timer should normally be cancelled during normal execution
# flow. Still, we want to make sure that this is indeed the case in
@@ -636,19 +637,20 @@ def _SetUp(include_server_port):
if os.sep != '/':
sys.exit("Expected '/' as separator in filepaths.")
- # Determine basics.client_tmp now.
- basics.InitializeClientTmp()
+ client_root_keeper = basics.ClientRootKeeper()
# So that we can call this function --- to sweep out possible junk. Also, this
# will allow the include analyzer to call InitializeClientRoot.
- _CleanOutOthers()
+ _CleanOutOthers(client_root_keeper)
Debug(DEBUG_TRACE, "Starting socketserver %s" % include_server_port)
# Create the analyser.
include_analyzer = (
include_analyzer_memoizing_node.IncludeAnalyzerMemoizingNode(
- basics.opt_stat_reset_triggers))
+ client_root_keeper,
+ basics.opt_stat_reset_triggers))
include_analyzer.email_sender = _EmailSender()
+
# Wrap it inside a handler that is a part of a UnixStreamServer.
server = QueuingSocketServer(
include_server_port,
@@ -661,8 +663,8 @@ def _SetUp(include_server_port):
def _CleanOut(include_analyzer, include_server_port):
"""Prepare shutdown by cleaning out files and unlinking port."""
- if include_analyzer and include_analyzer.client_root:
- _CleanOutClientRoots(include_analyzer.client_root)
+ if include_analyzer and include_analyzer.client_root_keeper:
+ _CleanOutClientRoots(include_analyzer.client_root_keeper)
try:
os.unlink(include_server_port)
except OSError:
diff --git a/include_server/include_server_test.py b/include_server/include_server_test.py
index c115106..c1309ec 100755
--- a/include_server/include_server_test.py
+++ b/include_server/include_server_test.py
@@ -25,6 +25,8 @@ ultimately the notion of an AssertionError.
__author__ = "Nils Klarlund"
import os
+import sys
+import traceback
import unittest
import basics
@@ -57,131 +59,130 @@ class IncludeServerTest(unittest.TestCase):
return set([ self.include_analyzer.realpath_map.string[f] for f in files ])
def test_IncludeHandler_handle(self):
+ self_test = self
+ client_root_keeper = basics.ClientRootKeeper()
+ old_RWcd = distcc_pump_c_extensions.RCwd
+ distcc_pump_c_extensions.RCwd = None # to be set below
+ old_RArgv = distcc_pump_c_extensions.RArgv
+ distcc_pump_c_extensions.RArgv = None # to be set below
+ old_XArgv = distcc_pump_c_extensions.XArgv
+ distcc_pump_c_extensions.XArgv = lambda _, __: None
+ old_StreamRequestHandler = (
+ include_server.SocketServer.StreamRequestHandler)
+
+ class Mock_StreamRequestHandler(object):
+ def __init__(self):
+ self.rfile = lambda: None
+ self.rfile.fileno = lambda: 27
+ self.wfile = lambda: None
+ self.wfile.fileno = lambda: 27
+
+ include_server.SocketServer.StreamRequestHandler = (
+ Mock_StreamRequestHandler)
+
+ include_analyzer = (
+ include_analyzer_memoizing_node.
+ IncludeAnalyzerMemoizingNode(client_root_keeper))
+
+ class Mock_EmailSender(object):
+
+ def __init(self):
+ self.expect = lambda: None
+
+ def MaybeSendEmail(self, fd, force=False, never=False):
+ fd.seek(0)
+ text = fd.read()
+ self.expect(text, force, never)
+ fd.close()
+ raise
+
+ mock_email_sender = include_analyzer.email_sender = Mock_EmailSender()
+
+ include_handler = (
+ include_server.DistccIncludeHandlerGenerator(include_analyzer)())
+
+ # Wow, that was a lot of set-up. Now exercise the include server and
+ # analyzer with an emphasis on triggering exceptions.
+
+ # Exercise 1: non-existent translation unit.
+
+ distcc_pump_c_extensions.RArgv = lambda self: [ "gcc", "parse.c" ]
+ distcc_pump_c_extensions.RCwd = lambda self: os.getcwd()
+
+ def Expect1(txt, force, never):
+ self_test.assert_(
+ "Include server not covering: " +
+ "Could not find translation unit 'parse.c'" in txt)
+ self_test.assertEqual(never, True)
+
+ mock_email_sender.expect = Expect1
try:
- self_test = self
- basics.InitializeClientTmp()
- old_RWcd = distcc_pump_c_extensions.RCwd
- distcc_pump_c_extensions.RCwd = None # to be set below
- old_RArgv = distcc_pump_c_extensions.RArgv
- distcc_pump_c_extensions.RArgv = None # to be set below
- old_XArgv = distcc_pump_c_extensions.XArgv
- distcc_pump_c_extensions.XArgv = lambda _, __: None
- old_StreamRequestHandler = (
- include_server.SocketServer.StreamRequestHandler)
-
- class Mock_StreamRequestHandler(object):
- def __init__(self):
- self.rfile = lambda: None
- self.rfile.fileno = lambda: 27
- self.wfile = lambda: None
- self.wfile.fileno = lambda: 27
-
- include_server.SocketServer.StreamRequestHandler = (
- Mock_StreamRequestHandler)
-
- include_analyzer = (
- include_analyzer_memoizing_node.IncludeAnalyzerMemoizingNode())
-
- class Mock_EmailSender(object):
-
- def __init(self):
- self.expect = lambda: None
-
- def MaybeSendEmail(self, fd, force=False, never=False):
- fd.seek(0)
- text = fd.read()
- self.expect(text, force, never)
- fd.close()
- raise
-
- mock_email_sender = include_analyzer.email_sender = Mock_EmailSender()
-
- include_handler = (
- include_server.DistccIncludeHandlerGenerator(include_analyzer)())
-
- # Wow, that was a lot of set-up. Now exercise the include server and
- # analyzer with an emphasis on triggering exceptions.
-
- # Exercise 1: non-existent translation unit.
-
- distcc_pump_c_extensions.RArgv = lambda self: [ "gcc", "parse.c" ]
- distcc_pump_c_extensions.RCwd = lambda self: os.getcwd()
-
- def Expect1(txt, force, never):
- self_test.assert_(
- "Include server not covering: " +
- "Could not find translation unit 'parse.c'" in txt)
- self_test.assertEqual(never, True)
-
- mock_email_sender.expect = Expect1
- try:
- include_handler.handle()
- except NotCoveredError:
- pass
- else:
- raise AssertionError
-
- # Exercise 2: provoke assertion error in cache_basics by providing an
- # entirely false value of current directory as provided in RCwd.
-
- distcc_pump_c_extensions.RArgv = lambda self: [ "gcc", "parse.c" ]
- distcc_pump_c_extensions.RCwd = lambda self: "/"
- # The cwd will be changed because of false value.
- oldcwd = os.getcwd()
-
- # We must distinguish between provoked and erroneous exceptions. So, we
- # mock out, in a sense, the provoked assertion exception that we
- # expect. The variable got_here allows us to filter the provoked exception
- # away from unexpected ones.
- got_here = []
-
- def Expect2(txt, force, never):
-
- self_test.assert_(
- "Include server internal error: 'exceptions.AssertionError" in txt)
-
- self_test.assert_("""for translation unit 'parse.c'""" in txt)
-
- # This email should be sent.
- self_test.assertEqual(never, False)
-
- got_here.append(True)
-
- mock_email_sender.expect = Expect2
- try:
- include_handler.handle()
- except AssertionError:
- os.chdir(oldcwd)
- # Make sure that we're catching the induced AssertionError, not one
- # produced in Except2.
- self.assert_(got_here)
- else:
- raise AssertionError
-
- # Exercise 3: provoke a NotCoveredError due to an absolute #include.
-
- distcc_pump_c_extensions.RArgv = lambda self: [ "gcc",
- "test_data/contains_abs_include.c" ]
- distcc_pump_c_extensions.RCwd = lambda self: os.getcwd()
-
- def Expect3(txt, force, never):
- self_test.assert_(
- "Filepath must be relative but isn't: '/love/of/my/life'."
- in txt)
- # Now check that this email is scheduled to not be sent.
- self_test.assertEqual(never, True)
-
- mock_email_sender.expect = Expect3
- try:
- include_handler.handle()
- except NotCoveredError:
- pass
-
- finally:
- distcc_pump_c_extensions.RWcd = old_RWcd
- distcc_pump_c_extensions.RArgv = old_RArgv
- distcc_pump_c_extensions.XArgv = old_XArgv
- include_server.SocketServer.StreamRequestHandler = (
- old_StreamRequestHandler)
+ include_handler.handle()
+ except NotCoveredError:
+ pass
+ else:
+ raise AssertionError
+
+ # Exercise 2: provoke assertion error in cache_basics by providing an
+ # entirely false value of current directory as provided in RCwd.
+
+ distcc_pump_c_extensions.RArgv = lambda self: [ "gcc", "parse.c" ]
+ distcc_pump_c_extensions.RCwd = lambda self: "/"
+ # The cwd will be changed because of false value.
+ oldcwd = os.getcwd()
+
+ # We must distinguish between provoked and erroneous exceptions. So, we
+ # mock out, in a sense, the provoked assertion exception that we
+ # expect. The variable got_here allows us to filter the provoked exception
+ # away from unexpected ones.
+ got_here = []
+
+ def Expect2(txt, force, never):
+
+ self_test.assert_(
+ "Include server internal error: 'exceptions.AssertionError" in txt)
+
+ self_test.assert_("""for translation unit 'parse.c'""" in txt)
+
+ # This email should be sent.
+ self_test.assertEqual(never, False)
+
+ got_here.append(True)
+
+ mock_email_sender.expect = Expect2
+ try:
+ include_handler.handle()
+ except AssertionError:
+ os.chdir(oldcwd)
+ # Make sure that we're catching the induced AssertionError, not one
+ # produced in Except2.
+ self.assert_(got_here)
+ else:
+ raise AssertionError
+
+ # Exercise 3: provoke a NotCoveredError due to an absolute #include.
+
+ distcc_pump_c_extensions.RArgv = lambda self: [ "gcc",
+ "test_data/contains_abs_include.c" ]
+ distcc_pump_c_extensions.RCwd = lambda self: os.getcwd()
+
+ def Expect3(txt, force, never):
+ self_test.assert_(
+ "Filepath must be relative but isn't: '/love/of/my/life'."
+ in txt)
+ # Now check that this email is scheduled to not be sent.
+ self_test.assertEqual(never, True)
+
+ mock_email_sender.expect = Expect3
+ try:
+ include_handler.handle()
+ except NotCoveredError:
+ pass
+
+ distcc_pump_c_extensions.RWcd = old_RWcd
+ distcc_pump_c_extensions.RArgv = old_RArgv
+ distcc_pump_c_extensions.XArgv = old_XArgv
+ include_server.SocketServer.StreamRequestHandler = (
+ old_StreamRequestHandler)
unittest.main()
diff --git a/include_server/parse_file_test.py b/include_server/parse_file_test.py
index b42e7f8..1f53950 100755
--- a/include_server/parse_file_test.py
+++ b/include_server/parse_file_test.py
@@ -33,9 +33,9 @@ class parse_file_Test(unittest.TestCase):
def setUp(self):
include_server.print_statistics = False
- basics.InitializeClientTmp()
+ client_root_keeper = basics.ClientRootKeeper()
include_server.write_include_closure_file = True
- self.include_analyzer = include_analyzer.IncludeAnalyzer()
+ self.include_analyzer = include_analyzer.IncludeAnalyzer(client_root_keeper)
def tearDown(self):
pass