summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorIan Clatworthy <ian.clatworthy@canonical.com>2009-03-08 08:55:09 +1000
committerIan Clatworthy <ian.clatworthy@canonical.com>2009-03-08 08:55:09 +1000
commit5ac4324a68f9fbf83935c434b525d9bc39961e6f (patch)
tree154fc84ce0f8bca0be2f7995e02ff8b5862e002c
parent3bdca8582fa461da28ec4ba6acef70c9013a571c (diff)
downloadbzr-fastimport-5ac4324a68f9fbf83935c434b525d9bc39961e6f.tar.gz
make groupcompress _FAST during import
-rw-r--r--processors/generic_processor.py11
1 files changed, 10 insertions, 1 deletions
diff --git a/processors/generic_processor.py b/processors/generic_processor.py
index e7c0463..1c940a1 100644
--- a/processors/generic_processor.py
+++ b/processors/generic_processor.py
@@ -147,7 +147,16 @@ class GenericProcessor(processor.ImportProcessor):
_max_pack_count_for_import
else:
self._original_max_pack_count = None
-
+
+ # Make groupcompress use the fast algorithm during importing.
+ # We want to repack at the end anyhow when more information
+ # is available to do a better job of saving space.
+ try:
+ from bzrlib.plugins.groupcompress import groupcompress
+ groupcompress._FAST = True
+ except ImportError:
+ pass
+
# Create a write group. This is committed at the end of the import.
# Checkpointing closes the current one and starts a new one.
self.repo.start_write_group()