summaryrefslogtreecommitdiff
path: root/git/test
diff options
context:
space:
mode:
authorSebastian Thiel <byronimo@gmail.com>2011-06-07 21:36:42 +0200
committerSebastian Thiel <byronimo@gmail.com>2011-06-07 21:36:42 +0200
commit58a930a632c867b65b9a3802e2f4190cf32e33ee (patch)
tree95b1311a3a4bfcdf4c2dba66f360e6985184013e /git/test
parenta98e0af511b728030c12bf8633b077866bb74e47 (diff)
parentf6897c78be5a5530129df50742cb6cabfb8609c9 (diff)
downloadgitpython-58a930a632c867b65b9a3802e2f4190cf32e33ee.tar.gz
Merge branch 'gitdbmerger'
Diffstat (limited to 'git/test')
-rw-r--r--git/test/__init__.py9
-rw-r--r--git/test/db/__init__.py4
-rw-r--r--git/test/db/base.py (renamed from git/test/test_repo.py)67
-rw-r--r--git/test/db/cmd/__init__.py4
-rw-r--r--git/test/db/cmd/test_base.py32
-rw-r--r--git/test/db/lib.py246
-rw-r--r--git/test/db/py/__init__.py4
-rw-r--r--git/test/db/py/test_base.py16
-rw-r--r--git/test/db/py/test_git.py51
-rw-r--r--git/test/db/py/test_loose.py36
-rw-r--r--git/test/db/py/test_mem.py30
-rw-r--r--git/test/db/py/test_pack.py76
-rw-r--r--git/test/db/py/test_ref.py62
-rw-r--r--git/test/db/test_base.py20
-rw-r--r--git/test/fixtures/git_config16
-rw-r--r--git/test/fixtures/objects/7b/b839852ed5e3a069966281bb08d50012fb309bbin0 -> 446 bytes
-rw-r--r--git/test/fixtures/packs/pack-11fdfa9e156ab73caae3b6da867192221f2089c2.idxbin0 -> 1912 bytes
-rw-r--r--git/test/fixtures/packs/pack-11fdfa9e156ab73caae3b6da867192221f2089c2.packbin0 -> 51875 bytes
-rw-r--r--git/test/fixtures/packs/pack-a2bf8e71d8c18879e499335762dd95119d93d9f1.idxbin0 -> 2248 bytes
-rw-r--r--git/test/fixtures/packs/pack-a2bf8e71d8c18879e499335762dd95119d93d9f1.packbin0 -> 3732 bytes
-rw-r--r--git/test/fixtures/packs/pack-c0438c19fb16422b6bbcce24387b3264416d485b.idxbin0 -> 2672 bytes
-rw-r--r--git/test/fixtures/packs/pack-c0438c19fb16422b6bbcce24387b3264416d485b.packbin0 -> 49113 bytes
-rw-r--r--git/test/lib/__init__.py5
-rw-r--r--git/test/lib/base.py200
-rw-r--r--git/test/lib/helper.py75
-rw-r--r--git/test/objects/__init__.py1
-rw-r--r--git/test/objects/lib.py14
-rw-r--r--git/test/objects/test_blob.py (renamed from git/test/test_blob.py)8
-rw-r--r--git/test/objects/test_commit.py (renamed from git/test/test_commit.py)14
-rw-r--r--git/test/objects/test_submodule.py (renamed from git/test/test_submodule.py)63
-rw-r--r--git/test/objects/test_tree.py (renamed from git/test/test_tree.py)10
-rw-r--r--git/test/performance/__init__.py2
-rw-r--r--git/test/performance/db/__init__.py1
-rw-r--r--git/test/performance/db/looseodb_impl.py132
-rw-r--r--git/test/performance/db/odb_impl.py72
-rw-r--r--git/test/performance/db/packedodb_impl.py107
-rw-r--r--git/test/performance/db/test_looseodb_cmd.py11
-rw-r--r--git/test/performance/db/test_looseodb_pure.py6
-rw-r--r--git/test/performance/db/test_odb_cmd.py6
-rw-r--r--git/test/performance/db/test_odb_pure.py6
-rw-r--r--git/test/performance/db/test_packedodb_pure.py90
-rw-r--r--git/test/performance/lib.py38
-rw-r--r--git/test/performance/objects/__init__.py1
-rw-r--r--git/test/performance/objects/test_commit.py (renamed from git/test/performance/test_commit.py)21
-rw-r--r--git/test/performance/test_odb.py70
-rw-r--r--git/test/performance/test_streams.py131
-rw-r--r--git/test/test_actor.py36
-rw-r--r--git/test/test_base.py122
-rw-r--r--git/test/test_config.py12
-rw-r--r--git/test/test_db.py25
-rw-r--r--git/test/test_diff.py11
-rw-r--r--git/test/test_example.py64
-rw-r--r--git/test/test_fun.py8
-rw-r--r--git/test/test_git.py148
-rw-r--r--git/test/test_import.py58
-rw-r--r--git/test/test_index.py8
-rw-r--r--git/test/test_pack.py247
-rw-r--r--git/test/test_reflog.py2
-rw-r--r--git/test/test_refs.py76
-rw-r--r--git/test/test_remote.py857
-rw-r--r--git/test/test_stats.py8
-rw-r--r--git/test/test_stream.py155
-rw-r--r--git/test/test_util.py129
63 files changed, 2779 insertions, 944 deletions
diff --git a/git/test/__init__.py b/git/test/__init__.py
index 757cbad1..a29d96a7 100644
--- a/git/test/__init__.py
+++ b/git/test/__init__.py
@@ -3,3 +3,12 @@
#
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+
+import git.util
+
+def _init_pool():
+ """Assure the pool is actually threaded"""
+ size = 2
+ print "Setting ThreadPool to %i" % size
+ git.util.pool.set_size(size)
+
diff --git a/git/test/db/__init__.py b/git/test/db/__init__.py
new file mode 100644
index 00000000..8a681e42
--- /dev/null
+++ b/git/test/db/__init__.py
@@ -0,0 +1,4 @@
+# Copyright (C) 2010, 2011 Sebastian Thiel (byronimo@gmail.com) and contributors
+#
+# This module is part of GitDB and is released under
+# the New BSD License: http://www.opensource.org/licenses/bsd-license.php
diff --git a/git/test/test_repo.py b/git/test/db/base.py
index deadbe9a..5291ba03 100644
--- a/git/test/test_repo.py
+++ b/git/test/db/base.py
@@ -3,30 +3,41 @@
#
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+from lib import TestDBBase
from git.test.lib import *
-from git import *
+from git.cmd import Git
+from git.objects import *
+from git.exc import *
+from git.index import *
+from git.refs import *
from git.util import join_path_native
from git.exc import BadObject
-from gitdb.util import hex_to_bin, bin_to_hex
+from git.util import hex_to_bin, bin_to_hex
import os, sys
import tempfile
import shutil
from cStringIO import StringIO
+from git.db.compat import RepoCompatibilityInterface
-class TestRepo(TestBase):
+
+class RepoGlobalsItemDeletorMetaCls(GlobalsItemDeletorMetaCls):
+ ModuleToDelete = 'RepoBase'
+
+
+class RepoBase(TestDBBase):
+ """Basic test for everything a fully implemented repository should support"""
+ __metaclass__ = RepoGlobalsItemDeletorMetaCls
- @raises(InvalidGitRepositoryError)
def test_new_should_raise_on_invalid_repo_location(self):
- Repo(tempfile.gettempdir())
+ self.failUnlessRaises(InvalidGitRepositoryError, self.RepoCls, tempfile.gettempdir())
- @raises(NoSuchPathError)
def test_new_should_raise_on_non_existant_path(self):
- Repo("repos/foobar")
+ self.failUnlessRaises(NoSuchPathError, self.RepoCls, "repos/foobar")
def test_repo_creation_from_different_paths(self):
- r_from_gitdir = Repo(self.rorepo.git_dir)
+ r_from_gitdir = self.RepoCls(self.rorepo.git_dir)
assert r_from_gitdir.git_dir == self.rorepo.git_dir
assert r_from_gitdir.git_dir.endswith('.git')
assert not self.rorepo.git.working_dir.endswith('.git')
@@ -133,8 +144,8 @@ class TestRepo(TestBase):
try:
# with specific path
for path in (git_dir_rela, git_dir_abs):
- r = Repo.init(path=path, bare=True)
- assert isinstance(r, Repo)
+ r = self.RepoCls.init(path=path, bare=True)
+ assert isinstance(r, self.RepoCls)
assert r.bare == True
assert os.path.isdir(r.git_dir)
@@ -155,7 +166,7 @@ class TestRepo(TestBase):
# END exception handling
# try again, this time with the absolute version
- rc = Repo.clone_from(r.git_dir, clone_path)
+ rc = self.RepoCls.clone_from(r.git_dir, clone_path)
self._assert_empty_repo(rc)
shutil.rmtree(git_dir_abs)
@@ -171,7 +182,7 @@ class TestRepo(TestBase):
os.makedirs(git_dir_rela)
os.chdir(git_dir_rela)
- r = Repo.init(bare=False)
+ r = self.RepoCls.init(bare=False)
r.bare == False
self._assert_empty_repo(r)
@@ -184,7 +195,10 @@ class TestRepo(TestBase):
# END restore previous state
def test_bare_property(self):
- self.rorepo.bare
+ if isinstance(self.rorepo, RepoCompatibilityInterface):
+ self.rorepo.bare
+ #END handle compatability
+ self.rorepo.is_bare
def test_daemon_export(self):
orig_val = self.rorepo.daemon_export
@@ -204,8 +218,7 @@ class TestRepo(TestBase):
self.rorepo.alternates = cur_alternates
def test_repr(self):
- path = os.path.join(os.path.abspath(GIT_REPO), '.git')
- assert_equal('<git.Repo "%s">' % path, repr(self.rorepo))
+ assert_equal('<git.Repo "%s">' % rorepo_dir(), repr(self.rorepo))
def test_is_dirty_with_bare_repository(self):
orig_value = self.rorepo._bare
@@ -235,6 +248,7 @@ class TestRepo(TestBase):
assert isinstance(index, IndexFile)
def test_tag(self):
+ assert self.rorepo.tag('0.1.5').commit
assert self.rorepo.tag('refs/tags/0.1.5').commit
def test_archive(self):
@@ -570,8 +584,15 @@ class TestRepo(TestBase):
assert rev_parse(refspec+":CHANGES").type == 'blob'
#END operate on non-detached head
- # the last position
- assert rev_parse('@{1}') != head.commit
+ # the most recent previous position of the currently checked out branch
+
+ try:
+ assert rev_parse('@{1}') != head.commit
+ except IndexError:
+ # on new checkouts, there isn't even a single past branch position
+ # in the log
+ pass
+ #END handle fresh checkouts
# position doesn't exist
self.failUnlessRaises(IndexError, rev_parse, '@{10000}')
@@ -579,17 +600,13 @@ class TestRepo(TestBase):
# currently, nothing more is supported
self.failUnlessRaises(NotImplementedError, rev_parse, "@{1 week ago}")
- def test_repo_odbtype(self):
- target_type = GitDB
- if sys.version_info[1] < 5:
- target_type = GitCmdObjectDB
- assert isinstance(self.rorepo.odb, target_type)
-
def test_submodules(self):
assert len(self.rorepo.submodules) == 1 # non-recursive
- assert len(list(self.rorepo.iter_submodules())) == 2
+ # in previous configurations, we had recursive repositories so this would compare to 2
+ # now there is only one left, as gitdb was merged
+ assert len(list(self.rorepo.iter_submodules())) == 1
- assert isinstance(self.rorepo.submodule("gitdb"), Submodule)
+ assert isinstance(self.rorepo.submodule("git/ext/async"), Submodule)
self.failUnlessRaises(ValueError, self.rorepo.submodule, "doesn't exist")
@with_rw_repo('HEAD', bare=False)
diff --git a/git/test/db/cmd/__init__.py b/git/test/db/cmd/__init__.py
new file mode 100644
index 00000000..8a681e42
--- /dev/null
+++ b/git/test/db/cmd/__init__.py
@@ -0,0 +1,4 @@
+# Copyright (C) 2010, 2011 Sebastian Thiel (byronimo@gmail.com) and contributors
+#
+# This module is part of GitDB and is released under
+# the New BSD License: http://www.opensource.org/licenses/bsd-license.php
diff --git a/git/test/db/cmd/test_base.py b/git/test/db/cmd/test_base.py
new file mode 100644
index 00000000..959be16b
--- /dev/null
+++ b/git/test/db/cmd/test_base.py
@@ -0,0 +1,32 @@
+# Copyright (C) 2010, 2011 Sebastian Thiel (byronimo@gmail.com) and contributors
+#
+# This module is part of GitDB and is released under
+# the New BSD License: http://www.opensource.org/licenses/bsd-license.php
+from git.test.lib import rorepo_dir
+from git.test.db.base import RepoBase
+
+from git.util import bin_to_hex
+from git.exc import BadObject
+
+from git.db.complex import CmdCompatibilityGitDB
+from git.db.cmd.base import *
+
+class TestBase(RepoBase):
+ RepoCls = CmdCompatibilityGitDB
+
+ def test_basics(self):
+ gdb = self.rorepo
+
+ # partial to complete - works with everything
+ hexsha = bin_to_hex(gdb.partial_to_complete_sha_hex("0.1.6"))
+ assert len(hexsha) == 40
+
+ assert bin_to_hex(gdb.partial_to_complete_sha_hex(hexsha[:20])) == hexsha
+
+ # fails with BadObject
+ for invalid_rev in ("0000", "bad/ref", "super bad"):
+ self.failUnlessRaises(BadObject, gdb.partial_to_complete_sha_hex, invalid_rev)
+
+ def test_fetch_info(self):
+ self.failUnlessRaises(ValueError, CmdFetchInfo._from_line, self.rorepo, "nonsense", '')
+ self.failUnlessRaises(ValueError, CmdFetchInfo._from_line, self.rorepo, "? [up to date] 0.1.7RC -> origin/0.1.7RC", '')
diff --git a/git/test/db/lib.py b/git/test/db/lib.py
new file mode 100644
index 00000000..499ca252
--- /dev/null
+++ b/git/test/db/lib.py
@@ -0,0 +1,246 @@
+# Copyright (C) 2010, 2011 Sebastian Thiel (byronimo@gmail.com) and contributors
+#
+# This module is part of GitDB and is released under
+# the New BSD License: http://www.opensource.org/licenses/bsd-license.php
+"""Base classes for object db testing"""
+from git.test.lib import (
+ with_rw_directory,
+ with_packs_rw,
+ ZippedStoreShaWriter,
+ fixture_path,
+ TestBase,
+ rorepo_dir,
+ )
+
+from git.stream import Sha1Writer
+from git.base import (
+ IStream,
+ OStream,
+ OInfo
+ )
+
+from git.exc import BadObject
+from git.typ import str_blob_type
+
+from async import IteratorReader
+from cStringIO import StringIO
+from struct import pack
+
+
+__all__ = ('TestDBBase', 'with_rw_directory', 'with_packs_rw', 'fixture_path')
+
+class TestDBBase(TestBase):
+ """Base Class providing default functionality to all tests such as:
+
+ - Utility functions provided by the TestCase base of the unittest method such as::
+ self.fail("todo")
+ self.failUnlessRaises(...)
+
+ - Class level repository which is considered read-only as it is shared among
+ all test cases in your type.
+ Access it using::
+ self.rorepo # 'ro' stands for read-only
+
+ The rorepo is in fact your current project's git repo. If you refer to specific
+ shas for your objects, be sure you choose some that are part of the immutable portion
+ of the project history ( to assure tests don't fail for others ).
+
+ Derived types can override the default repository type to create a different
+ read-only repo, allowing to test their specific type
+ """
+
+ # data
+ two_lines = "1234\nhello world"
+ all_data = (two_lines, )
+
+ #{ Configuration
+ # The repository type to instantiate. It takes at least a path to operate upon
+ # during instantiation.
+ RepoCls = None
+
+ # if True, a read-only repo will be provided and RepoCls must be set.
+ # Otherwise it may remain unset
+ needs_ro_repo = True
+ #} END configuration
+
+ @classmethod
+ def setUpAll(cls):
+ """
+ Dynamically add a read-only repository to our actual type. This way
+ each test type has its own repository
+ """
+ if cls.needs_ro_repo:
+ assert cls.RepoCls is not None, "RepoCls class member must be set"
+ cls.rorepo = cls.RepoCls(rorepo_dir())
+ #END handle rorepo
+
+ def _assert_object_writing_simple(self, db):
+ # write a bunch of objects and query their streams and info
+ null_objs = db.size()
+ ni = 250
+ for i in xrange(ni):
+ data = pack(">L", i)
+ istream = IStream(str_blob_type, len(data), StringIO(data))
+ new_istream = db.store(istream)
+ assert new_istream is istream
+ assert db.has_object(istream.binsha)
+
+ info = db.info(istream.binsha)
+ assert isinstance(info, OInfo)
+ assert info.type == istream.type and info.size == istream.size
+
+ stream = db.stream(istream.binsha)
+ assert isinstance(stream, OStream)
+ assert stream.binsha == info.binsha and stream.type == info.type
+ assert stream.read() == data
+ # END for each item
+
+ assert db.size() == null_objs + ni
+ shas = list(db.sha_iter())
+ assert len(shas) == db.size()
+ assert len(shas[0]) == 20
+
+
+ def _assert_object_writing(self, db):
+ """General tests to verify object writing, compatible to ObjectDBW
+ :note: requires write access to the database"""
+ # start in 'dry-run' mode, using a simple sha1 writer
+ ostreams = (ZippedStoreShaWriter, None)
+ for ostreamcls in ostreams:
+ for data in self.all_data:
+ dry_run = ostreamcls is not None
+ ostream = None
+ if ostreamcls is not None:
+ ostream = ostreamcls()
+ assert isinstance(ostream, Sha1Writer)
+ # END create ostream
+
+ prev_ostream = db.set_ostream(ostream)
+ assert type(prev_ostream) in ostreams or prev_ostream in ostreams
+
+ istream = IStream(str_blob_type, len(data), StringIO(data))
+
+ # store returns same istream instance, with new sha set
+ my_istream = db.store(istream)
+ sha = istream.binsha
+ assert my_istream is istream
+ assert db.has_object(sha) != dry_run
+ assert len(sha) == 20
+
+ # verify data - the slow way, we want to run code
+ if not dry_run:
+ info = db.info(sha)
+ assert str_blob_type == info.type
+ assert info.size == len(data)
+
+ ostream = db.stream(sha)
+ assert ostream.read() == data
+ assert ostream.type == str_blob_type
+ assert ostream.size == len(data)
+ else:
+ self.failUnlessRaises(BadObject, db.info, sha)
+ self.failUnlessRaises(BadObject, db.stream, sha)
+
+ # DIRECT STREAM COPY
+ # our data hase been written in object format to the StringIO
+ # we pasesd as output stream. No physical database representation
+ # was created.
+ # Test direct stream copy of object streams, the result must be
+ # identical to what we fed in
+ ostream.seek(0)
+ istream.stream = ostream
+ assert istream.binsha is not None
+ prev_sha = istream.binsha
+
+ db.set_ostream(ZippedStoreShaWriter())
+ db.store(istream)
+ assert istream.binsha == prev_sha
+ new_ostream = db.ostream()
+
+ # note: only works as long our store write uses the same compression
+ # level, which is zip_best
+ assert ostream.getvalue() == new_ostream.getvalue()
+ # END for each data set
+ # END for each dry_run mode
+
+ def _assert_object_writing_async(self, db):
+ """Test generic object writing using asynchronous access"""
+ ni = 5000
+ def istream_generator(offset=0, ni=ni):
+ for data_src in xrange(ni):
+ data = str(data_src + offset)
+ yield IStream(str_blob_type, len(data), StringIO(data))
+ # END for each item
+ # END generator utility
+
+ # for now, we are very trusty here as we expect it to work if it worked
+ # in the single-stream case
+
+ # write objects
+ reader = IteratorReader(istream_generator())
+ istream_reader = db.store_async(reader)
+ istreams = istream_reader.read() # read all
+ assert istream_reader.task().error() is None
+ assert len(istreams) == ni
+
+ for stream in istreams:
+ assert stream.error is None
+ assert len(stream.binsha) == 20
+ assert isinstance(stream, IStream)
+ # END assert each stream
+
+ # test has-object-async - we must have all previously added ones
+ reader = IteratorReader( istream.binsha for istream in istreams )
+ hasobject_reader = db.has_object_async(reader)
+ count = 0
+ for sha, has_object in hasobject_reader:
+ assert has_object
+ count += 1
+ # END for each sha
+ assert count == ni
+
+ # read the objects we have just written
+ reader = IteratorReader( istream.binsha for istream in istreams )
+ ostream_reader = db.stream_async(reader)
+
+ # read items individually to prevent hitting possible sys-limits
+ count = 0
+ for ostream in ostream_reader:
+ assert isinstance(ostream, OStream)
+ count += 1
+ # END for each ostream
+ assert ostream_reader.task().error() is None
+ assert count == ni
+
+ # get info about our items
+ reader = IteratorReader( istream.binsha for istream in istreams )
+ info_reader = db.info_async(reader)
+
+ count = 0
+ for oinfo in info_reader:
+ assert isinstance(oinfo, OInfo)
+ count += 1
+ # END for each oinfo instance
+ assert count == ni
+
+
+ # combined read-write using a converter
+ # add 2500 items, and obtain their output streams
+ nni = 2500
+ reader = IteratorReader(istream_generator(offset=ni, ni=nni))
+ istream_to_sha = lambda istreams: [ istream.binsha for istream in istreams ]
+
+ istream_reader = db.store_async(reader)
+ istream_reader.set_post_cb(istream_to_sha)
+
+ ostream_reader = db.stream_async(istream_reader)
+
+ count = 0
+ # read it individually, otherwise we might run into the ulimit
+ for ostream in ostream_reader:
+ assert isinstance(ostream, OStream)
+ count += 1
+ # END for each ostream
+ assert count == nni
+
+
diff --git a/git/test/db/py/__init__.py b/git/test/db/py/__init__.py
new file mode 100644
index 00000000..8a681e42
--- /dev/null
+++ b/git/test/db/py/__init__.py
@@ -0,0 +1,4 @@
+# Copyright (C) 2010, 2011 Sebastian Thiel (byronimo@gmail.com) and contributors
+#
+# This module is part of GitDB and is released under
+# the New BSD License: http://www.opensource.org/licenses/bsd-license.php
diff --git a/git/test/db/py/test_base.py b/git/test/db/py/test_base.py
new file mode 100644
index 00000000..6b06bbe9
--- /dev/null
+++ b/git/test/db/py/test_base.py
@@ -0,0 +1,16 @@
+# Copyright (C) 2010, 2011 Sebastian Thiel (byronimo@gmail.com) and contributors
+#
+# This module is part of GitDB and is released under
+# the New BSD License: http://www.opensource.org/licenses/bsd-license.php
+from git.test.lib import rorepo_dir
+from git.test.db.base import RepoBase
+
+from git.db.complex import PureCompatibilityGitDB
+
+class TestPyDBBase(RepoBase):
+
+ RepoCls = PureCompatibilityGitDB
+
+ def test_basics(self):
+ pass
+
diff --git a/git/test/db/py/test_git.py b/git/test/db/py/test_git.py
new file mode 100644
index 00000000..ecaa5c8f
--- /dev/null
+++ b/git/test/db/py/test_git.py
@@ -0,0 +1,51 @@
+# Copyright (C) 2010, 2011 Sebastian Thiel (byronimo@gmail.com) and contributors
+#
+# This module is part of GitDB and is released under
+# the New BSD License: http://www.opensource.org/licenses/bsd-license.php
+from git.test.lib import rorepo_dir
+from git.test.db.lib import TestDBBase, with_rw_directory
+from git.exc import BadObject
+from git.db.py.complex import PureGitODB
+from git.base import OStream, OInfo
+from git.util import hex_to_bin, bin_to_hex
+
+import os
+
+class TestGitDB(TestDBBase):
+ needs_ro_repo = False
+
+ def test_reading(self):
+ gdb = PureGitODB(os.path.join(rorepo_dir(), 'objects'))
+
+ # we have packs and loose objects, alternates doesn't necessarily exist
+ assert 1 < len(gdb.databases()) < 4
+
+ # access should be possible
+ git_sha = hex_to_bin("5aebcd5cb3340fb31776941d7e4d518a712a8655")
+ assert isinstance(gdb.info(git_sha), OInfo)
+ assert isinstance(gdb.stream(git_sha), OStream)
+ assert gdb.size() > 200
+ sha_list = list(gdb.sha_iter())
+ assert len(sha_list) == gdb.size()
+
+
+ # This is actually a test for compound functionality, but it doesn't
+ # have a separate test module
+ # test partial shas
+ # this one as uneven and quite short
+ assert gdb.partial_to_complete_sha_hex('5aebcd') == hex_to_bin("5aebcd5cb3340fb31776941d7e4d518a712a8655")
+
+ # mix even/uneven hexshas
+ for i, binsha in enumerate(sha_list[:50]):
+ assert gdb.partial_to_complete_sha_hex(bin_to_hex(binsha)[:8-(i%2)]) == binsha
+ # END for each sha
+
+ self.failUnlessRaises(BadObject, gdb.partial_to_complete_sha_hex, "0000")
+
+ @with_rw_directory
+ def test_writing(self, path):
+ gdb = PureGitODB(path)
+
+ # its possible to write objects
+ self._assert_object_writing(gdb)
+ self._assert_object_writing_async(gdb)
diff --git a/git/test/db/py/test_loose.py b/git/test/db/py/test_loose.py
new file mode 100644
index 00000000..0c9b4831
--- /dev/null
+++ b/git/test/db/py/test_loose.py
@@ -0,0 +1,36 @@
+# Copyright (C) 2010, 2011 Sebastian Thiel (byronimo@gmail.com) and contributors
+#
+# This module is part of GitDB and is released under
+# the New BSD License: http://www.opensource.org/licenses/bsd-license.php
+from git.test.db.lib import TestDBBase, with_rw_directory
+from git.db.py.loose import PureLooseObjectODB
+from git.exc import BadObject
+from git.util import bin_to_hex
+
+class TestLooseDB(TestDBBase):
+
+ needs_ro_repo = False
+
+ @with_rw_directory
+ def test_basics(self, path):
+ ldb = PureLooseObjectODB(path)
+
+ # write data
+ self._assert_object_writing(ldb)
+ self._assert_object_writing_async(ldb)
+
+ # verify sha iteration and size
+ shas = list(ldb.sha_iter())
+ assert shas and len(shas[0]) == 20
+
+ assert len(shas) == ldb.size()
+
+ # verify find short object
+ long_sha = bin_to_hex(shas[-1])
+ for short_sha in (long_sha[:20], long_sha[:5]):
+ assert bin_to_hex(ldb.partial_to_complete_sha_hex(short_sha)) == long_sha
+ # END for each sha
+
+ self.failUnlessRaises(BadObject, ldb.partial_to_complete_sha_hex, '0000')
+ # raises if no object could be foudn
+
diff --git a/git/test/db/py/test_mem.py b/git/test/db/py/test_mem.py
new file mode 100644
index 00000000..bc98dc56
--- /dev/null
+++ b/git/test/db/py/test_mem.py
@@ -0,0 +1,30 @@
+# Copyright (C) 2010, 2011 Sebastian Thiel (byronimo@gmail.com) and contributors
+#
+# This module is part of GitDB and is released under
+# the New BSD License: http://www.opensource.org/licenses/bsd-license.php
+from git.test.db.lib import TestDBBase, with_rw_directory
+from git.db.py.mem import PureMemoryDB
+from git.db.py.loose import PureLooseObjectODB
+
+class TestPureMemoryDB(TestDBBase):
+
+ needs_ro_repo = False
+
+ @with_rw_directory
+ def test_writing(self, path):
+ mdb = PureMemoryDB()
+
+ # write data
+ self._assert_object_writing_simple(mdb)
+
+ # test stream copy
+ ldb = PureLooseObjectODB(path)
+ assert ldb.size() == 0
+ num_streams_copied = mdb.stream_copy(mdb.sha_iter(), ldb)
+ assert num_streams_copied == mdb.size()
+
+ assert ldb.size() == mdb.size()
+ for sha in mdb.sha_iter():
+ assert ldb.has_object(sha)
+ assert ldb.stream(sha).read() == mdb.stream(sha).read()
+ # END verify objects where copied and are equal
diff --git a/git/test/db/py/test_pack.py b/git/test/db/py/test_pack.py
new file mode 100644
index 00000000..5043f446
--- /dev/null
+++ b/git/test/db/py/test_pack.py
@@ -0,0 +1,76 @@
+# Copyright (C) 2010, 2011 Sebastian Thiel (byronimo@gmail.com) and contributors
+#
+# This module is part of GitDB and is released under
+# the New BSD License: http://www.opensource.org/licenses/bsd-license.php
+from git.test.db.lib import TestDBBase, with_packs_rw
+
+from git.db.py.pack import PurePackedODB
+from git.test.lib import fixture_path
+
+from git.exc import BadObject, AmbiguousObjectName
+
+import os
+import random
+
+class TestPackDB(TestDBBase):
+
+ needs_ro_repo = False
+
+ @with_packs_rw
+ def test_writing(self, path):
+ pdb = PurePackedODB(path)
+
+ # on demand, we init our pack cache
+ num_packs = len(pdb.entities())
+ assert num_packs
+ assert pdb._st_mtime != 0
+
+ # test pack directory changed:
+ # packs removed - rename a file, should affect the glob
+ pack_path = pdb.entities()[0].pack().path()
+ new_pack_path = pack_path + "renamed"
+ os.rename(pack_path, new_pack_path)
+
+ pdb.update_cache(force=True)
+ assert len(pdb.entities()) == num_packs - 1
+
+ # packs added
+ os.rename(new_pack_path, pack_path)
+ pdb.update_cache(force=True)
+ assert len(pdb.entities()) == num_packs
+
+ # bang on the cache
+ # access the Entities directly, as there is no iteration interface
+ # yet ( or required for now )
+ sha_list = list(pdb.sha_iter())
+ assert len(sha_list) == pdb.size()
+
+ # hit all packs in random order
+ random.shuffle(sha_list)
+
+ for sha in sha_list:
+ info = pdb.info(sha)
+ stream = pdb.stream(sha)
+ # END for each sha to query
+
+
+ # test short finding - be a bit more brutal here
+ max_bytes = 19
+ min_bytes = 2
+ num_ambiguous = 0
+ for i, sha in enumerate(sha_list):
+ short_sha = sha[:max((i % max_bytes), min_bytes)]
+ try:
+ assert pdb.partial_to_complete_sha(short_sha, len(short_sha)*2) == sha
+ except AmbiguousObjectName:
+ num_ambiguous += 1
+ pass # valid, we can have short objects
+ # END exception handling
+ # END for each sha to find
+
+ # we should have at least one ambiguous, considering the small sizes
+ # but in our pack, there is no ambigious ...
+ # assert num_ambiguous
+
+ # non-existing
+ self.failUnlessRaises(BadObject, pdb.partial_to_complete_sha, "\0\0", 4)
diff --git a/git/test/db/py/test_ref.py b/git/test/db/py/test_ref.py
new file mode 100644
index 00000000..c5374dc9
--- /dev/null
+++ b/git/test/db/py/test_ref.py
@@ -0,0 +1,62 @@
+# Copyright (C) 2010, 2011 Sebastian Thiel (byronimo@gmail.com) and contributors
+#
+# This module is part of GitDB and is released under
+# the New BSD License: http://www.opensource.org/licenses/bsd-license.php
+from git.test.db.lib import *
+from git.db.py.ref import PureReferenceDB
+
+from git.util import (
+ NULL_BIN_SHA,
+ hex_to_bin
+ )
+
+import os
+
+class TestPureReferenceDB(TestDBBase):
+
+ needs_ro_repo = False
+
+ def make_alt_file(self, alt_path, alt_list):
+ """Create an alternates file which contains the given alternates.
+ The list can be empty"""
+ alt_file = open(alt_path, "wb")
+ for alt in alt_list:
+ alt_file.write(alt + "\n")
+ alt_file.close()
+
+ @with_rw_directory
+ def test_writing(self, path):
+ NULL_BIN_SHA = '\0' * 20
+
+ alt_path = os.path.join(path, 'alternates')
+ rdb = PureReferenceDB(alt_path)
+ assert len(rdb.databases()) == 0
+ assert rdb.size() == 0
+ assert len(list(rdb.sha_iter())) == 0
+
+ # try empty, non-existing
+ assert not rdb.has_object(NULL_BIN_SHA)
+
+
+ # setup alternate file
+ # add two, one is invalid
+ own_repo_path = fixture_path('../../../.git/objects') # use own repo
+ self.make_alt_file(alt_path, [own_repo_path, "invalid/path"])
+ rdb.update_cache()
+ assert len(rdb.databases()) == 1
+
+ # we should now find a default revision of ours
+ git_sha = hex_to_bin("5aebcd5cb3340fb31776941d7e4d518a712a8655")
+ assert rdb.has_object(git_sha)
+
+ # remove valid
+ self.make_alt_file(alt_path, ["just/one/invalid/path"])
+ rdb.update_cache()
+ assert len(rdb.databases()) == 0
+
+ # add valid
+ self.make_alt_file(alt_path, [own_repo_path])
+ rdb.update_cache()
+ assert len(rdb.databases()) == 1
+
+
diff --git a/git/test/db/test_base.py b/git/test/db/test_base.py
new file mode 100644
index 00000000..2a882d0a
--- /dev/null
+++ b/git/test/db/test_base.py
@@ -0,0 +1,20 @@
+# Copyright (C) 2010, 2011 Sebastian Thiel (byronimo@gmail.com) and contributors
+#
+# This module is part of GitDB and is released under
+# the New BSD License: http://www.opensource.org/licenses/bsd-license.php
+from lib import *
+from git.db import RefSpec
+
+class TestBase(TestDBBase):
+
+ needs_ro_repo = False
+
+ @with_rw_directory
+ def test_basics(self, path):
+ self.failUnlessRaises(ValueError, RefSpec, None, None)
+ rs = RefSpec(None, "something")
+ assert rs.force == False
+ assert rs.delete_destination()
+ assert rs.source is None
+ assert rs.destination == "something"
+
diff --git a/git/test/fixtures/git_config b/git/test/fixtures/git_config
index 3c91985f..ff8e7114 100644
--- a/git/test/fixtures/git_config
+++ b/git/test/fixtures/git_config
@@ -1,22 +1,28 @@
[core]
repositoryformatversion = 0
filemode = true
- bare = false
- logallrefupdates = true
+ bare = false
+ logallrefupdates = true
[remote "origin"]
fetch = +refs/heads/*:refs/remotes/origin/*
url = git://gitorious.org/~byron/git-python/byrons-clone.git
pushurl = git@gitorious.org:~byron/git-python/byrons-clone.git
-[branch "master"]
+# a tab indented section header
+ [branch "master"]
remote = origin
merge = refs/heads/master
-[remote "mainline"]
+# an space indented section header
+ [remote "mainline"]
+ # space indented comment
url = git://gitorious.org/git-python/mainline.git
fetch = +refs/heads/*:refs/remotes/mainline/*
+
[remote "MartinMarcher"]
+ # tab indented comment
url = git://gitorious.org/~martin.marcher/git-python/serverhorror.git
fetch = +refs/heads/*:refs/remotes/MartinMarcher/*
-[gui]
+ # can handle comments - the section name is supposed to be stripped
+[ gui ]
geometry = 1316x820+219+243 207 192
[branch "mainline_performance"]
remote = mainline
diff --git a/git/test/fixtures/objects/7b/b839852ed5e3a069966281bb08d50012fb309b b/git/test/fixtures/objects/7b/b839852ed5e3a069966281bb08d50012fb309b
new file mode 100644
index 00000000..021c2db3
--- /dev/null
+++ b/git/test/fixtures/objects/7b/b839852ed5e3a069966281bb08d50012fb309b
Binary files differ
diff --git a/git/test/fixtures/packs/pack-11fdfa9e156ab73caae3b6da867192221f2089c2.idx b/git/test/fixtures/packs/pack-11fdfa9e156ab73caae3b6da867192221f2089c2.idx
new file mode 100644
index 00000000..fda5969b
--- /dev/null
+++ b/git/test/fixtures/packs/pack-11fdfa9e156ab73caae3b6da867192221f2089c2.idx
Binary files differ
diff --git a/git/test/fixtures/packs/pack-11fdfa9e156ab73caae3b6da867192221f2089c2.pack b/git/test/fixtures/packs/pack-11fdfa9e156ab73caae3b6da867192221f2089c2.pack
new file mode 100644
index 00000000..a3209d2b
--- /dev/null
+++ b/git/test/fixtures/packs/pack-11fdfa9e156ab73caae3b6da867192221f2089c2.pack
Binary files differ
diff --git a/git/test/fixtures/packs/pack-a2bf8e71d8c18879e499335762dd95119d93d9f1.idx b/git/test/fixtures/packs/pack-a2bf8e71d8c18879e499335762dd95119d93d9f1.idx
new file mode 100644
index 00000000..a7d6c717
--- /dev/null
+++ b/git/test/fixtures/packs/pack-a2bf8e71d8c18879e499335762dd95119d93d9f1.idx
Binary files differ
diff --git a/git/test/fixtures/packs/pack-a2bf8e71d8c18879e499335762dd95119d93d9f1.pack b/git/test/fixtures/packs/pack-a2bf8e71d8c18879e499335762dd95119d93d9f1.pack
new file mode 100644
index 00000000..955c424c
--- /dev/null
+++ b/git/test/fixtures/packs/pack-a2bf8e71d8c18879e499335762dd95119d93d9f1.pack
Binary files differ
diff --git a/git/test/fixtures/packs/pack-c0438c19fb16422b6bbcce24387b3264416d485b.idx b/git/test/fixtures/packs/pack-c0438c19fb16422b6bbcce24387b3264416d485b.idx
new file mode 100644
index 00000000..87c635f4
--- /dev/null
+++ b/git/test/fixtures/packs/pack-c0438c19fb16422b6bbcce24387b3264416d485b.idx
Binary files differ
diff --git a/git/test/fixtures/packs/pack-c0438c19fb16422b6bbcce24387b3264416d485b.pack b/git/test/fixtures/packs/pack-c0438c19fb16422b6bbcce24387b3264416d485b.pack
new file mode 100644
index 00000000..a69b28ac
--- /dev/null
+++ b/git/test/fixtures/packs/pack-c0438c19fb16422b6bbcce24387b3264416d485b.pack
Binary files differ
diff --git a/git/test/lib/__init__.py b/git/test/lib/__init__.py
index 77512794..a0656438 100644
--- a/git/test/lib/__init__.py
+++ b/git/test/lib/__init__.py
@@ -5,9 +5,14 @@
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
import inspect
+# TODO: Separate names - they do repeat unfortunately. Also deduplicate it,
+# redesign decorators to support multiple database types in succession.
+from base import *
+
from mock import *
from asserts import *
from helper import *
+
__all__ = [ name for name, obj in locals().items()
if not (name.startswith('_') or inspect.ismodule(obj)) ]
diff --git a/git/test/lib/base.py b/git/test/lib/base.py
new file mode 100644
index 00000000..bc160783
--- /dev/null
+++ b/git/test/lib/base.py
@@ -0,0 +1,200 @@
+# Copyright (C) 2010, 2011 Sebastian Thiel (byronimo@gmail.com) and contributors
+#
+# This module is part of PureCompatibilityGitDB and is released under
+# the New BSD License: http://www.opensource.org/licenses/bsd-license.php
+"""Utilities used in ODB testing"""
+from git.base import OStream
+from git.stream import (
+ Sha1Writer,
+ ZippedStoreShaWriter
+ )
+
+from git.util import (
+ zlib,
+ dirname
+ )
+
+import sys
+import random
+from array import array
+from cStringIO import StringIO
+
+import glob
+import unittest
+import tempfile
+import shutil
+import os
+import gc
+
+
+#{ Decorators
+
+def with_rw_directory(func):
+ """Create a temporary directory which can be written to, remove it if the
+ test suceeds, but leave it otherwise to aid additional debugging"""
+ def wrapper(self):
+ path = maketemp(prefix=func.__name__)
+ os.mkdir(path)
+ keep = False
+ try:
+ try:
+ return func(self, path)
+ except Exception:
+ print >> sys.stderr, "Test %s.%s failed, output is at %r" % (type(self).__name__, func.__name__, path)
+ keep = True
+ raise
+ finally:
+ # Need to collect here to be sure all handles have been closed. It appears
+ # a windows-only issue. In fact things should be deleted, as well as
+ # memory maps closed, once objects go out of scope. For some reason
+ # though this is not the case here unless we collect explicitly.
+ if not keep:
+ gc.collect()
+ shutil.rmtree(path)
+ # END handle exception
+ # END wrapper
+
+ wrapper.__name__ = func.__name__
+ return wrapper
+
+
+def with_rw_repo(func):
+ """Create a copy of our repository and put it into a writable location. It will
+ be removed if the test doesn't result in an error.
+ As we can currently only copy the fully working tree, tests must not rely on
+ being on a certain branch or on anything really except for the default tags
+ that should exist
+ Wrapped function obtains a git repository """
+ def wrapper(self, path):
+ src_dir = dirname(dirname(dirname(__file__)))
+ assert(os.path.isdir(path))
+ os.rmdir(path) # created by wrapper, but must not exist for copy operation
+ shutil.copytree(src_dir, path)
+ target_gitdir = os.path.join(path, '.git')
+ assert os.path.isdir(target_gitdir)
+ return func(self, self.RepoCls(target_gitdir))
+ #END wrapper
+ wrapper.__name__ = func.__name__
+ return with_rw_directory(wrapper)
+
+
+
+def with_packs_rw(func):
+ """Function that provides a path into which the packs for testing should be
+ copied. Will pass on the path to the actual function afterwards
+
+ :note: needs with_rw_directory wrapped around it"""
+ def wrapper(self, path):
+ src_pack_glob = fixture_path('packs/*')
+ print src_pack_glob
+ copy_files_globbed(src_pack_glob, path, hard_link_ok=True)
+ return func(self, path)
+ # END wrapper
+
+ wrapper.__name__ = func.__name__
+ return with_rw_directory(wrapper)
+
+#} END decorators
+
+#{ Routines
+
+def rorepo_dir():
+ """:return: path to our own repository, being our own .git directory.
+ :note: doesn't work in bare repositories"""
+ base = os.path.join(dirname(dirname(dirname(dirname(__file__)))), '.git')
+ assert os.path.isdir(base)
+ return base
+
+def maketemp(*args, **kwargs):
+ """Wrapper around default tempfile.mktemp to fix an osx issue"""
+ tdir = tempfile.mktemp(*args, **kwargs)
+ if sys.platform == 'darwin':
+ tdir = '/private' + tdir
+ return tdir
+
+def fixture_path(relapath=''):
+ """:return: absolute path into the fixture directory
+ :param relapath: relative path into the fixtures directory, or ''
+ to obtain the fixture directory itself"""
+ test_dir = os.path.dirname(os.path.dirname(__file__))
+ return os.path.join(test_dir, "fixtures", relapath)
+
+def fixture(name):
+ return open(fixture_path(name), 'rb').read()
+
+def absolute_project_path():
+ return os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
+
+def copy_files_globbed(source_glob, target_dir, hard_link_ok=False):
+ """Copy all files found according to the given source glob into the target directory
+ :param hard_link_ok: if True, hard links will be created if possible. Otherwise
+ the files will be copied"""
+ for src_file in glob.glob(source_glob):
+ if hard_link_ok and hasattr(os, 'link'):
+ target = os.path.join(target_dir, os.path.basename(src_file))
+ try:
+ os.link(src_file, target)
+ except OSError:
+ shutil.copy(src_file, target_dir)
+ # END handle cross device links ( and resulting failure )
+ else:
+ shutil.copy(src_file, target_dir)
+ # END try hard link
+ # END for each file to copy
+
+
+def make_bytes(size_in_bytes, randomize=False):
+ """:return: string with given size in bytes
+ :param randomize: try to produce a very random stream"""
+ actual_size = size_in_bytes / 4
+ producer = xrange(actual_size)
+ if randomize:
+ producer = list(producer)
+ random.shuffle(producer)
+ # END randomize
+ a = array('i', producer)
+ return a.tostring()
+
+def make_object(type, data):
+ """:return: bytes resembling an uncompressed object"""
+ odata = "blob %i\0" % len(data)
+ return odata + data
+
+def make_memory_file(size_in_bytes, randomize=False):
+ """:return: tuple(size_of_stream, stream)
+ :param randomize: try to produce a very random stream"""
+ d = make_bytes(size_in_bytes, randomize)
+ return len(d), StringIO(d)
+
+#} END routines
+
+#{ Stream Utilities
+
+class DummyStream(object):
+ def __init__(self):
+ self.was_read = False
+ self.bytes = 0
+ self.closed = False
+
+ def read(self, size):
+ self.was_read = True
+ self.bytes = size
+
+ def close(self):
+ self.closed = True
+
+ def _assert(self):
+ assert self.was_read
+
+
+class DeriveTest(OStream):
+ def __init__(self, sha, type, size, stream, *args, **kwargs):
+ self.myarg = kwargs.pop('myarg')
+ self.args = args
+
+ def _assert(self):
+ assert self.args
+ assert self.myarg
+
+#} END stream utilitiess
+
diff --git a/git/test/lib/helper.py b/git/test/lib/helper.py
index 76aaaa38..5776f526 100644
--- a/git/test/lib/helper.py
+++ b/git/test/lib/helper.py
@@ -12,26 +12,18 @@ import tempfile
import shutil
import cStringIO
-GIT_REPO = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
+from base import (
+ maketemp,
+ rorepo_dir
+ )
-__all__ = (
- 'fixture_path', 'fixture', 'absolute_project_path', 'StringProcessAdapter',
- 'with_rw_repo', 'with_rw_and_rw_remote_repo', 'TestBase', 'TestCase', 'GIT_REPO'
- )
-
-#{ Routines
-
-def fixture_path(name):
- test_dir = os.path.dirname(os.path.dirname(__file__))
- return os.path.join(test_dir, "fixtures", name)
-def fixture(name):
- return open(fixture_path(name), 'rb').read()
+__all__ = (
+ 'StringProcessAdapter', 'GlobalsItemDeletorMetaCls',
+ 'with_rw_repo', 'with_rw_and_rw_remote_repo', 'TestBase', 'TestCase',
+ )
-def absolute_project_path():
- return os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
-#} END routines
#{ Adapters
@@ -52,13 +44,6 @@ class StringProcessAdapter(object):
#{ Decorators
-def _mktemp(*args):
- """Wrapper around default tempfile.mktemp to fix an osx issue"""
- tdir = tempfile.mktemp(*args)
- if sys.platform == 'darwin':
- tdir = '/private' + tdir
- return tdir
-
def _rmtree_onerror(osremove, fullpath, exec_info):
"""
Handle the case on windows that read-only files cannot be deleted by
@@ -87,7 +72,7 @@ def with_rw_repo(working_tree_ref, bare=False):
if bare:
prefix = ''
#END handle prefix
- repo_dir = _mktemp("%sbare_%s" % (prefix, func.__name__))
+ repo_dir = maketemp("%sbare_%s" % (prefix, func.__name__))
rw_repo = self.rorepo.clone(repo_dir, shared=True, bare=bare, n=True)
rw_repo.head.commit = rw_repo.commit(working_tree_ref)
@@ -143,8 +128,8 @@ def with_rw_and_rw_remote_repo(working_tree_ref):
assert isinstance(working_tree_ref, basestring), "Decorator requires ref name for working tree checkout"
def argument_passer(func):
def remote_repo_creator(self):
- remote_repo_dir = _mktemp("remote_repo_%s" % func.__name__)
- repo_dir = _mktemp("remote_clone_non_bare_repo")
+ remote_repo_dir = maketemp("remote_repo_%s" % func.__name__)
+ repo_dir = maketemp("remote_clone_non_bare_repo")
rw_remote_repo = self.rorepo.clone(remote_repo_dir, shared=True, bare=True)
rw_repo = rw_remote_repo.clone(repo_dir, shared=True, bare=False, n=True) # recursive alternates info ?
@@ -205,32 +190,40 @@ def with_rw_and_rw_remote_repo(working_tree_ref):
return argument_passer
#} END decorators
+
+#{ Meta Classes
+class GlobalsItemDeletorMetaCls(type):
+ """Utiltiy to prevent the RepoBase to be picked up by nose as the metacls
+ will delete the instance from the globals"""
+ #{ Configuration
+ # Set this to a string name of the module to delete
+ ModuleToDelete = None
+ #} END configuration
+
+ def __new__(metacls, name, bases, clsdict):
+ assert metacls.ModuleToDelete is not None, "Invalid metaclass configuration"
+ new_type = super(GlobalsItemDeletorMetaCls, metacls).__new__(metacls, name, bases, clsdict)
+ if name != metacls.ModuleToDelete:
+ mod = __import__(new_type.__module__, globals(), locals(), new_type.__module__)
+ delattr(mod, metacls.ModuleToDelete)
+ #END handle deletion
+ return new_type
+
+#} END meta classes
class TestBase(TestCase):
"""
Base Class providing default functionality to all tests such as:
-
- Utility functions provided by the TestCase base of the unittest method such as::
self.fail("todo")
self.failUnlessRaises(...)
-
- - Class level repository which is considered read-only as it is shared among
- all test cases in your type.
- Access it using::
- self.rorepo # 'ro' stands for read-only
-
- The rorepo is in fact your current project's git repo. If you refer to specific
- shas for your objects, be sure you choose some that are part of the immutable portion
- of the project history ( to assure tests don't fail for others ).
"""
@classmethod
def setUpAll(cls):
- """
- Dynamically add a read-only repository to our actual type. This way
- each test type has its own repository
- """
- cls.rorepo = Repo(GIT_REPO)
+ """This method is only called to provide the most basic functionality
+ Subclasses may just override it or implement it differently"""
+ cls.rorepo = Repo(rorepo_dir())
def _make_file(self, rela_path, data, repo=None):
"""
diff --git a/git/test/objects/__init__.py b/git/test/objects/__init__.py
new file mode 100644
index 00000000..8b137891
--- /dev/null
+++ b/git/test/objects/__init__.py
@@ -0,0 +1 @@
+
diff --git a/git/test/objects/lib.py b/git/test/objects/lib.py
new file mode 100644
index 00000000..fe1d9f9d
--- /dev/null
+++ b/git/test/objects/lib.py
@@ -0,0 +1,14 @@
+"""Provide customized obhject testing facilities"""
+
+from git.test.lib import (
+ rorepo_dir,
+ TestBase,
+ assert_equal,
+ assert_not_equal,
+ with_rw_repo,
+ StringProcessAdapter,
+ )
+
+class TestObjectBase(TestBase):
+ """Provides a default read-only repository in the rorepo member"""
+ pass
diff --git a/git/test/test_blob.py b/git/test/objects/test_blob.py
index 661c0501..58ac25b7 100644
--- a/git/test/test_blob.py
+++ b/git/test/objects/test_blob.py
@@ -4,11 +4,11 @@
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
-from git.test.lib import *
-from git import *
-from gitdb.util import hex_to_bin
+from lib import *
+from git.objects.blob import *
+from git.util import hex_to_bin
-class TestBlob(TestBase):
+class TestBlob(TestObjectBase):
def test_mime_type_should_return_mime_type_for_known_types(self):
blob = Blob(self.rorepo, **{'binsha': Blob.NULL_BIN_SHA, 'path': 'foo.png'})
diff --git a/git/test/test_commit.py b/git/test/objects/test_commit.py
index 4a8d8b87..80326fe9 100644
--- a/git/test/test_commit.py
+++ b/git/test/objects/test_commit.py
@@ -5,10 +5,14 @@
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
-from git.test.lib import *
-from git import *
-from gitdb import IStream
-from gitdb.util import hex_to_bin
+from lib import *
+from git.objects.commit import *
+from git.base import IStream
+
+from git.util import (
+ hex_to_bin,
+ Actor,
+ )
from cStringIO import StringIO
import time
@@ -65,7 +69,7 @@ def assert_commit_serialization(rwrepo, commit_id, print_performance_info=False)
# END handle performance info
-class TestCommit(TestBase):
+class TestCommit(TestObjectBase):
def test_bake(self):
diff --git a/git/test/test_submodule.py b/git/test/objects/test_submodule.py
index adb4fb82..14cb074c 100644
--- a/git/test/test_submodule.py
+++ b/git/test/objects/test_submodule.py
@@ -1,29 +1,51 @@
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
-from git.test.lib import *
+from lib import *
from git.exc import *
from git.objects.submodule.base import Submodule
from git.objects.submodule.root import RootModule, RootUpdateProgress
from git.util import to_native_path_linux, join_path_native
+
import shutil
import git
import os
+import sys
class TestRootProgress(RootUpdateProgress):
"""Just prints messages, for now without checking the correctness of the states"""
- def update(self, op, index, max_count, message=''):
+ def update(self, op, index, max_count, message='', input=''):
print message
prog = TestRootProgress()
-class TestSubmodule(TestBase):
+class TestSubmodule(TestObjectBase):
k_subm_current = "468cad66ff1f80ddaeee4123c24e4d53a032c00d"
k_subm_changed = "394ed7006ee5dc8bddfd132b64001d5dfc0ffdd3"
k_no_subm_tag = "0.1.6"
+ k_github_gitdb_url = 'git://github.com/gitpython-developers/gitdb.git'
+ env_gitdb_local_path = "GITPYTHON_TEST_GITDB_LOCAL_PATH"
+ def _generate_async_local_path(self):
+ return to_native_path_linux(join_path_native(self.rorepo.working_tree_dir, 'git/ext/async'))
+
+ def _rewrite_gitdb_to_local_path(self, smgitdb):
+ """Rewrites the given submodule to point to the local path of the gitdb repository, if possible.
+ Otherwise it leaves it unchanged
+ :return: new clone path, or None if no new path was set"""
+ new_smclone_path = os.environ.get(self.env_gitdb_local_path)
+ if new_smclone_path is not None:
+ writer = smgitdb.config_writer()
+ writer.set_value('url', new_smclone_path)
+ del(writer)
+ assert smgitdb.config_reader().get_value('url') == new_smclone_path
+ assert smgitdb.url == new_smclone_path
+ else:
+ sys.stderr.write("Submodule tests need the gitdb repository. You can specify a local source setting the %s environment variable. Otherwise it will be downloaded from the internet" % self.env_gitdb_local_path)
+ #END handle submodule path
+ return new_smclone_path
def _do_base_tests(self, rwrepo):
"""Perform all tests in the given repository, it may be bare or nonbare"""
@@ -42,7 +64,7 @@ class TestSubmodule(TestBase):
assert sm.path == 'git/ext/gitdb'
assert sm.path != sm.name # in our case, we have ids there, which don't equal the path
- assert sm.url == 'git://github.com/gitpython-developers/gitdb.git'
+ assert sm.url == self.k_github_gitdb_url
assert sm.branch_path == 'refs/heads/master' # the default ...
assert sm.branch_name == 'master'
assert sm.parent_commit == rwrepo.head.commit
@@ -73,13 +95,11 @@ class TestSubmodule(TestBase):
if rwrepo.bare:
self.failUnlessRaises(InvalidGitRepositoryError, sm.config_writer)
else:
- writer = sm.config_writer()
# for faster checkout, set the url to the local path
- new_smclone_path = to_native_path_linux(join_path_native(self.rorepo.working_tree_dir, sm.path))
- writer.set_value('url', new_smclone_path)
- del(writer)
- assert sm.config_reader().get_value('url') == new_smclone_path
- assert sm.url == new_smclone_path
+ # Note: This is nice but doesn't work anymore with the latest git-python
+ # version. This would also mean we need internet for this to work which
+ # is why we allow an override using an environment variable
+ new_smclone_path = self._rewrite_gitdb_to_local_path(sm)
# END handle bare repo
smold.config_reader()
@@ -175,7 +195,8 @@ class TestSubmodule(TestBase):
csm_repopath = csm.path
# adjust the path of the submodules module to point to the local destination
- new_csmclone_path = to_native_path_linux(join_path_native(self.rorepo.working_tree_dir, sm.path, csm.path))
+ # In the current gitpython version, async is used directly by gitpython
+ new_csmclone_path = self._generate_async_local_path()
csm.config_writer().set_value('url', new_csmclone_path)
assert csm.url == new_csmclone_path
@@ -247,6 +268,10 @@ class TestSubmodule(TestBase):
self.failUnlessRaises(InvalidGitRepositoryError, sm.remove, dry_run=True)
sm.module().index.reset(working_tree=True)
+ # make sure sub-submodule is not modified by forcing it to update
+ # to the revision it is supposed to point to.
+ csm.update()
+
# this would work
assert sm.remove(dry_run=True) is sm
assert sm.module_exists()
@@ -384,9 +409,9 @@ class TestSubmodule(TestBase):
rm.config_reader()
rm.config_writer()
- # deep traversal gitdb / async
+ # deep traversal git / async
rsmsp = [sm.path for sm in rm.traverse()]
- assert len(rsmsp) == 2 # gitdb and async, async being a child of gitdb
+ assert len(rsmsp) == 1 # git and async, async being a child of git
# cannot set the parent commit as root module's path didn't exist
self.failUnlessRaises(ValueError, rm.set_parent_commit, 'HEAD')
@@ -406,8 +431,8 @@ class TestSubmodule(TestBase):
prep = sm.path
assert not sm.module_exists() # was never updated after rwrepo's clone
- # assure we clone from a local source
- sm.config_writer().set_value('url', to_native_path_linux(join_path_native(self.rorepo.working_tree_dir, sm.path)))
+ # assure we clone from a local source
+ self._rewrite_gitdb_to_local_path(sm)
# dry-run does nothing
sm.update(recursive=False, dry_run=True, progress=prog)
@@ -440,7 +465,7 @@ class TestSubmodule(TestBase):
#================
nsmn = "newsubmodule"
nsmp = "submrepo"
- async_url = to_native_path_linux(join_path_native(self.rorepo.working_tree_dir, rsmsp[0], rsmsp[1]))
+ async_url = self._generate_async_local_path()
nsm = Submodule.add(rwrepo, nsmn, nsmp, url=async_url)
csmadded = rwrepo.index.commit("Added submodule").hexsha # make sure we don't keep the repo reference
nsm.set_parent_commit(csmadded)
@@ -482,7 +507,11 @@ class TestSubmodule(TestBase):
# to the first repository, this way we have a fast checkout, and a completely different
# repository at the different url
nsm.set_parent_commit(csmremoved)
- nsmurl = to_native_path_linux(join_path_native(self.rorepo.working_tree_dir, rsmsp[0]))
+ nsmurl = os.environ.get(self.env_gitdb_local_path, self.k_github_gitdb_url)
+
+ # Note: We would have liked to have a different url, but we cannot
+ # provoke this case
+ assert nsm.url != nsmurl
nsm.config_writer().set_value('url', nsmurl)
csmpathchange = rwrepo.index.commit("changed url")
nsm.set_parent_commit(csmpathchange)
diff --git a/git/test/test_tree.py b/git/test/objects/test_tree.py
index ec10e962..bc8d3f97 100644
--- a/git/test/test_tree.py
+++ b/git/test/objects/test_tree.py
@@ -4,16 +4,18 @@
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
-import os
-from git.test.lib import *
-from git import *
+
+from lib import *
from git.objects.fun import (
traverse_tree_recursive,
traverse_trees_recursive
)
+from git.objects.blob import Blob
+from git.objects.tree import Tree
from cStringIO import StringIO
+import os
-class TestTree(TestBase):
+class TestTree(TestObjectBase):
def test_serializable(self):
# tree at the given commit contains a submodule as well
diff --git a/git/test/performance/__init__.py b/git/test/performance/__init__.py
new file mode 100644
index 00000000..6bd117b9
--- /dev/null
+++ b/git/test/performance/__init__.py
@@ -0,0 +1,2 @@
+"""Note: This init file makes the performance tests an integral part of the test suite
+as nose will now pick them up. Previously the init file was intentionally omitted"""
diff --git a/git/test/performance/db/__init__.py b/git/test/performance/db/__init__.py
new file mode 100644
index 00000000..8b137891
--- /dev/null
+++ b/git/test/performance/db/__init__.py
@@ -0,0 +1 @@
+
diff --git a/git/test/performance/db/looseodb_impl.py b/git/test/performance/db/looseodb_impl.py
new file mode 100644
index 00000000..6d3c1fa6
--- /dev/null
+++ b/git/test/performance/db/looseodb_impl.py
@@ -0,0 +1,132 @@
+"""Performance data streaming performance"""
+from git.db.py import *
+from git.base import *
+from git.stream import *
+from async import ChannelThreadTask
+from git.util import (
+ pool,
+ bin_to_hex
+ )
+import os
+import sys
+from time import time
+
+from git.test.lib import (
+ GlobalsItemDeletorMetaCls,
+ make_memory_file,
+ with_rw_repo
+ )
+
+from git.test.performance.lib import TestBigRepoR
+
+
+#{ Utilities
+
+def read_chunked_stream(stream):
+ total = 0
+ while True:
+ chunk = stream.read(chunk_size)
+ total += len(chunk)
+ if len(chunk) < chunk_size:
+ break
+ # END read stream loop
+ assert total == stream.size
+ return stream
+
+
+class TestStreamReader(ChannelThreadTask):
+ """Expects input streams and reads them in chunks. It will read one at a time,
+ requireing a queue chunk of size 1"""
+ def __init__(self, *args):
+ super(TestStreamReader, self).__init__(*args)
+ self.fun = read_chunked_stream
+ self.max_chunksize = 1
+
+
+#} END utilities
+
+class PerfBaseDeletorMetaClass(GlobalsItemDeletorMetaCls):
+ ModuleToDelete = 'TestLooseDBWPerformanceBase'
+
+
+class TestLooseDBWPerformanceBase(TestBigRepoR):
+ __metaclass__ = PerfBaseDeletorMetaClass
+
+ large_data_size_bytes = 1000*1000*10 # some MiB should do it
+ moderate_data_size_bytes = 1000*1000*1 # just 1 MiB
+
+ #{ Configuration
+ LooseODBCls = None
+ #} END configuration
+
+ @classmethod
+ def setUpAll(cls):
+ super(TestLooseDBWPerformanceBase, cls).setUpAll()
+ if cls.LooseODBCls is None:
+ raise AssertionError("LooseODBCls must be set in subtype")
+ #END assert configuration
+ # currently there is no additional configuration
+
+ @with_rw_repo("HEAD")
+ def test_large_data_streaming(self, rwrepo):
+ # TODO: This part overlaps with the same file in git.test.performance.test_stream
+ # It should be shared if possible
+ objects_path = rwrepo.db_path('')
+ ldb = self.LooseODBCls(objects_path)
+
+ for randomize in range(2):
+ desc = (randomize and 'random ') or ''
+ print >> sys.stderr, "Creating %s data ..." % desc
+ st = time()
+ size, stream = make_memory_file(self.large_data_size_bytes, randomize)
+ elapsed = time() - st
+ print >> sys.stderr, "Done (in %f s)" % elapsed
+
+ # writing - due to the compression it will seem faster than it is
+ st = time()
+ binsha = ldb.store(IStream('blob', size, stream)).binsha
+ elapsed_add = time() - st
+ assert ldb.has_object(binsha)
+ hexsha = bin_to_hex(binsha)
+ db_file = os.path.join(objects_path, hexsha[:2], hexsha[2:])
+ fsize_kib = os.path.getsize(db_file) / 1000
+
+
+ size_kib = size / 1000
+ print >> sys.stderr, "%s: Added %i KiB (filesize = %i KiB) of %s data to loose odb in %f s ( %f Write KiB / s)" % (self.LooseODBCls.__name__, size_kib, fsize_kib, desc, elapsed_add, size_kib / elapsed_add)
+
+ # reading all at once
+ st = time()
+ ostream = ldb.stream(binsha)
+ shadata = ostream.read()
+ elapsed_readall = time() - st
+
+ stream.seek(0)
+ assert shadata == stream.getvalue()
+ print >> sys.stderr, "%s: Read %i KiB of %s data at once from loose odb in %f s ( %f Read KiB / s)" % (self.LooseODBCls.__name__, size_kib, desc, elapsed_readall, size_kib / elapsed_readall)
+
+
+ # reading in chunks of 1 MiB
+ cs = 512*1000
+ chunks = list()
+ st = time()
+ ostream = ldb.stream(binsha)
+ while True:
+ data = ostream.read(cs)
+ chunks.append(data)
+ if len(data) < cs:
+ break
+ # END read in chunks
+ elapsed_readchunks = time() - st
+
+ stream.seek(0)
+ assert ''.join(chunks) == stream.getvalue()
+
+ cs_kib = cs / 1000
+ print >> sys.stderr, "%s: Read %i KiB of %s data in %i KiB chunks from loose odb in %f s ( %f Read KiB / s)" % (self.LooseODBCls.__name__, size_kib, desc, cs_kib, elapsed_readchunks, size_kib / elapsed_readchunks)
+
+ # del db file so git has something to do
+ os.remove(db_file)
+ # END for each randomization factor
+
+
diff --git a/git/test/performance/db/odb_impl.py b/git/test/performance/db/odb_impl.py
new file mode 100644
index 00000000..677cf6a8
--- /dev/null
+++ b/git/test/performance/db/odb_impl.py
@@ -0,0 +1,72 @@
+"""Performance tests for object store"""
+
+from time import time
+import sys
+import stat
+
+from git.test.performance.lib import (
+ TestBigRepoR,
+ GlobalsItemDeletorMetaCls
+ )
+
+class PerfBaseDeletorMetaClass(GlobalsItemDeletorMetaCls):
+ ModuleToDelete = 'TestObjDBPerformanceBase'
+
+
+class TestObjDBPerformanceBase(TestBigRepoR):
+ __metaclass__ = PerfBaseDeletorMetaClass
+
+ #{ Configuration
+ RepoCls = None # to be set by subclass
+ #} END configuration
+
+ def test_random_access_test(self):
+ repo = self.rorepo
+
+ # GET COMMITS
+ st = time()
+ root_commit = repo.commit(self.head_sha_2k)
+ commits = list(root_commit.traverse())
+ nc = len(commits)
+ elapsed = time() - st
+
+ print >> sys.stderr, "%s: Retrieved %i commits from ObjectStore in %g s ( %f commits / s )" % (type(repo.odb), nc, elapsed, nc / elapsed)
+
+ # GET TREES
+ # walk all trees of all commits
+ st = time()
+ blobs_per_commit = list()
+ nt = 0
+ for commit in commits:
+ tree = commit.tree
+ blobs = list()
+ for item in tree.traverse():
+ nt += 1
+ if item.type == 'blob':
+ blobs.append(item)
+ # direct access for speed
+ # END while trees are there for walking
+ blobs_per_commit.append(blobs)
+ # END for each commit
+ elapsed = time() - st
+
+ print >> sys.stderr, "%s: Retrieved %i objects from %i commits in %g s ( %f objects / s )" % (type(repo.odb), nt, len(commits), elapsed, nt / elapsed)
+
+ # GET BLOBS
+ st = time()
+ nb = 0
+ too_many = 15000
+ data_bytes = 0
+ for blob_list in blobs_per_commit:
+ for blob in blob_list:
+ data_bytes += len(blob.data_stream.read())
+ # END for each blobsha
+ nb += len(blob_list)
+ if nb > too_many:
+ break
+ # END for each bloblist
+ elapsed = time() - st
+
+ print >> sys.stderr, "%s: Retrieved %i blob (%i KiB) and their data in %g s ( %f blobs / s, %f KiB / s )" % (type(repo.odb), nb, data_bytes/1000, elapsed, nb / elapsed, (data_bytes / 1000) / elapsed)
+
+
diff --git a/git/test/performance/db/packedodb_impl.py b/git/test/performance/db/packedodb_impl.py
new file mode 100644
index 00000000..b95a8d13
--- /dev/null
+++ b/git/test/performance/db/packedodb_impl.py
@@ -0,0 +1,107 @@
+# Copyright (C) 2010, 2011 Sebastian Thiel (byronimo@gmail.com) and contributors
+#
+# This module is part of GitDB and is released under
+# the New BSD License: http://www.opensource.org/licenses/bsd-license.php
+"""Performance tests for object store"""
+from git.test.performance.lib import (
+ TestBigRepoR,
+ GlobalsItemDeletorMetaCls
+ )
+
+from git.exc import UnsupportedOperation
+
+import sys
+import os
+from time import time
+import random
+
+
+class PerfBaseDeletorMetaClass(GlobalsItemDeletorMetaCls):
+ ModuleToDelete = 'TestPurePackedODBPerformanceBase'
+
+class TestPurePackedODBPerformanceBase(TestBigRepoR):
+ __metaclass__ = PerfBaseDeletorMetaClass
+
+ #{ Configuration
+ PackedODBCls = None
+ #} END configuration
+
+ @classmethod
+ def setUpAll(cls):
+ super(TestPurePackedODBPerformanceBase, cls).setUpAll()
+ if cls.PackedODBCls is None:
+ raise AssertionError("PackedODBCls must be set in subclass")
+ #END assert configuration
+ cls.ropdb = cls.PackedODBCls(cls.rorepo.db_path("pack"))
+
+ def test_pack_random_access(self):
+ pdb = self.ropdb
+
+ # sha lookup
+ st = time()
+ sha_list = list(pdb.sha_iter())
+ elapsed = time() - st
+ ns = len(sha_list)
+ print >> sys.stderr, "PDB: looked up %i shas by index in %f s ( %f shas/s )" % (ns, elapsed, ns / elapsed)
+
+ # sha lookup: best-case and worst case access
+ pdb_pack_info = pdb._pack_info
+ # END shuffle shas
+ st = time()
+ for sha in sha_list:
+ pdb_pack_info(sha)
+ # END for each sha to look up
+ elapsed = time() - st
+
+ # discard cache
+ del(pdb._entities)
+ pdb.entities()
+ print >> sys.stderr, "PDB: looked up %i sha in %i packs in %f s ( %f shas/s )" % (ns, len(pdb.entities()), elapsed, ns / elapsed)
+ # END for each random mode
+
+ # query info and streams only
+ max_items = 10000 # can wait longer when testing memory
+ for pdb_fun in (pdb.info, pdb.stream):
+ st = time()
+ for sha in sha_list[:max_items]:
+ pdb_fun(sha)
+ elapsed = time() - st
+ print >> sys.stderr, "PDB: Obtained %i object %s by sha in %f s ( %f items/s )" % (max_items, pdb_fun.__name__.upper(), elapsed, max_items / elapsed)
+ # END for each function
+
+ # retrieve stream and read all
+ max_items = 5000
+ pdb_stream = pdb.stream
+ total_size = 0
+ st = time()
+ for sha in sha_list[:max_items]:
+ stream = pdb_stream(sha)
+ stream.read()
+ total_size += stream.size
+ elapsed = time() - st
+ total_kib = total_size / 1000
+ print >> sys.stderr, "PDB: Obtained %i streams by sha and read all bytes totallying %i KiB ( %f KiB / s ) in %f s ( %f streams/s )" % (max_items, total_kib, total_kib/elapsed , elapsed, max_items / elapsed)
+
+ def test_correctness(self):
+ pdb = self.ropdb
+ # disabled for now as it used to work perfectly, checking big repositories takes a long time
+ print >> sys.stderr, "Endurance run: verify streaming of objects (crc and sha)"
+ for crc in range(2):
+ count = 0
+ st = time()
+ for entity in pdb.entities():
+ pack_verify = entity.is_valid_stream
+ sha_by_index = entity.index().sha
+ for index in xrange(entity.index().size()):
+ try:
+ assert pack_verify(sha_by_index(index), use_crc=crc)
+ count += 1
+ except UnsupportedOperation:
+ pass
+ # END ignore old indices
+ # END for each index
+ # END for each entity
+ elapsed = time() - st
+ print >> sys.stderr, "PDB: verified %i objects (crc=%i) in %f s ( %f objects/s )" % (count, crc, elapsed, count / elapsed)
+ # END for each verify mode
+
diff --git a/git/test/performance/db/test_looseodb_cmd.py b/git/test/performance/db/test_looseodb_cmd.py
new file mode 100644
index 00000000..9738278c
--- /dev/null
+++ b/git/test/performance/db/test_looseodb_cmd.py
@@ -0,0 +1,11 @@
+from git.db.complex import CmdCompatibilityGitDB
+from looseodb_impl import TestLooseDBWPerformanceBase
+
+import sys
+
+class TestCmdLooseDB(TestLooseDBWPerformanceBase):
+ LooseODBCls = CmdCompatibilityGitDB
+
+ def test_info(self):
+ sys.stderr.write("This test does not check the write performance of the git command as it is implemented in pure python")
+
diff --git a/git/test/performance/db/test_looseodb_pure.py b/git/test/performance/db/test_looseodb_pure.py
new file mode 100644
index 00000000..46f39d5e
--- /dev/null
+++ b/git/test/performance/db/test_looseodb_pure.py
@@ -0,0 +1,6 @@
+from git.db.py.loose import PureLooseObjectODB
+from looseodb_impl import TestLooseDBWPerformanceBase
+
+class TestPureLooseDB(TestLooseDBWPerformanceBase):
+ LooseODBCls = PureLooseObjectODB
+
diff --git a/git/test/performance/db/test_odb_cmd.py b/git/test/performance/db/test_odb_cmd.py
new file mode 100644
index 00000000..acd55cc9
--- /dev/null
+++ b/git/test/performance/db/test_odb_cmd.py
@@ -0,0 +1,6 @@
+from git.db.complex import CmdCompatibilityGitDB
+from odb_impl import TestObjDBPerformanceBase
+
+class TestCmdDB(TestObjDBPerformanceBase):
+ RepoCls = CmdCompatibilityGitDB
+
diff --git a/git/test/performance/db/test_odb_pure.py b/git/test/performance/db/test_odb_pure.py
new file mode 100644
index 00000000..6ed3585d
--- /dev/null
+++ b/git/test/performance/db/test_odb_pure.py
@@ -0,0 +1,6 @@
+from git.db.complex import PureCompatibilityGitDB
+from odb_impl import TestObjDBPerformanceBase
+
+class TestPureDB(TestObjDBPerformanceBase):
+ RepoCls = PureCompatibilityGitDB
+
diff --git a/git/test/performance/db/test_packedodb_pure.py b/git/test/performance/db/test_packedodb_pure.py
new file mode 100644
index 00000000..4ea09779
--- /dev/null
+++ b/git/test/performance/db/test_packedodb_pure.py
@@ -0,0 +1,90 @@
+# Copyright (C) 2010, 2011 Sebastian Thiel (byronimo@gmail.com) and contributors
+#
+# This module is part of GitDB and is released under
+# the New BSD License: http://www.opensource.org/licenses/bsd-license.php
+from packedodb_impl import TestPurePackedODBPerformanceBase
+from git.db.py.pack import PurePackedODB
+
+from git.stream import NullStream
+
+from git.pack import PackEntity
+
+import os
+import sys
+
+from time import time
+from nose import SkipTest
+
+
+class CountedNullStream(NullStream):
+ __slots__ = '_bw'
+ def __init__(self):
+ self._bw = 0
+
+ def bytes_written(self):
+ return self._bw
+
+ def write(self, d):
+ self._bw += NullStream.write(self, d)
+
+
+class TestPurePackedODB(TestPurePackedODBPerformanceBase):
+ #{ Configuration
+ PackedODBCls = PurePackedODB
+ #} END configuration
+
+ def test_pack_writing_note(self):
+ sys.stderr.write("test_pack_writing should be adjusted to support different databases to read from - see test for more info")
+ raise SkipTest()
+
+ def test_pack_writing(self):
+ # see how fast we can write a pack from object streams.
+ # This will not be fast, as we take time for decompressing the streams as well
+ # For now we test the fast streaming and slow streaming versions manually
+ ostream = CountedNullStream()
+ # NOTE: We use the same repo twice to see whether OS caching helps
+ for rorepo in (self.rorepo, self.rorepo, self.ropdb):
+
+ ni = 5000
+ count = 0
+ total_size = 0
+ st = time()
+ objs = list()
+ for sha in rorepo.sha_iter():
+ count += 1
+ objs.append(rorepo.stream(sha))
+ if count == ni:
+ break
+ #END gather objects for pack-writing
+ elapsed = time() - st
+ print >> sys.stderr, "PDB Streaming: Got %i streams from %s by sha in in %f s ( %f streams/s )" % (ni, rorepo.__class__.__name__, elapsed, ni / elapsed)
+
+ st = time()
+ PackEntity.write_pack(objs, ostream.write)
+ elapsed = time() - st
+ total_kb = ostream.bytes_written() / 1000
+ print >> sys.stderr, "PDB Streaming: Wrote pack of size %i kb in %f s (%f kb/s)" % (total_kb, elapsed, total_kb/elapsed)
+ #END for each rorepo
+
+
+ def test_stream_reading(self):
+ raise SkipTest("This test was only used for --with-profile runs")
+ pdb = self.ropdb
+
+ # streaming only, meant for --with-profile runs
+ ni = 5000
+ count = 0
+ pdb_stream = pdb.stream
+ total_size = 0
+ st = time()
+ for sha in pdb.sha_iter():
+ if count == ni:
+ break
+ stream = pdb_stream(sha)
+ stream.read()
+ total_size += stream.size
+ count += 1
+ elapsed = time() - st
+ total_kib = total_size / 1000
+ print >> sys.stderr, "PDB Streaming: Got %i streams by sha and read all bytes totallying %i KiB ( %f KiB / s ) in %f s ( %f streams/s )" % (ni, total_kib, total_kib/elapsed , elapsed, ni / elapsed)
+
diff --git a/git/test/performance/lib.py b/git/test/performance/lib.py
index d0727b60..758d402d 100644
--- a/git/test/performance/lib.py
+++ b/git/test/performance/lib.py
@@ -1,17 +1,13 @@
"""Contains library functions"""
import os
-from git.test.lib import *
+from git.test.lib import (
+ TestBase,
+ GlobalsItemDeletorMetaCls
+ )
import shutil
import tempfile
-from git.db import (
- GitCmdObjectDB,
- GitDB
- )
-
-from git import (
- Repo
- )
+from git import Repo
#{ Invvariants
k_env_git_repo = "GIT_PYTHON_TEST_GIT_REPO_BASE"
@@ -38,11 +34,7 @@ class TestBigRepoR(TestBase):
* gitrorepo
- * Read-Only git repository - actually the repo of git itself
-
- * puregitrorepo
-
- * As gitrepo, but uses pure python implementation
+ * a big read-only git repository
"""
#{ Invariants
@@ -50,29 +42,33 @@ class TestBigRepoR(TestBase):
head_sha_50 = '32347c375250fd470973a5d76185cac718955fd5'
#} END invariants
+ #{ Configuration
+ RepoCls = Repo
+ #} END configuration
+
@classmethod
def setUpAll(cls):
super(TestBigRepoR, cls).setUpAll()
- repo_path = resolve_or_fail(k_env_git_repo)
- cls.gitrorepo = Repo(repo_path, odbt=GitCmdObjectDB)
- cls.puregitrorepo = Repo(repo_path, odbt=GitDB)
+ if cls.RepoCls is None:
+ raise AssertionError("Require RepoCls in class %s to be set" % cls)
+ #END assert configuration
+ cls.rorepo = cls.RepoCls(resolve_or_fail(k_env_git_repo))
class TestBigRepoRW(TestBigRepoR):
"""As above, but provides a big repository that we can write to.
- Provides ``self.gitrwrepo`` and ``self.puregitrwrepo``"""
+ Provides ``self.rwrepo``"""
@classmethod
def setUpAll(cls):
super(TestBigRepoRW, cls).setUpAll()
dirname = tempfile.mktemp()
os.mkdir(dirname)
- cls.gitrwrepo = cls.gitrorepo.clone(dirname, shared=True, bare=True, odbt=GitCmdObjectDB)
- cls.puregitrwrepo = Repo(dirname, odbt=GitDB)
+ cls.rwrepo = cls.rorepo.clone(dirname, shared=True, bare=True)
@classmethod
def tearDownAll(cls):
- shutil.rmtree(cls.gitrwrepo.working_dir)
+ shutil.rmtree(cls.rwrepo.working_dir)
#} END base classes
diff --git a/git/test/performance/objects/__init__.py b/git/test/performance/objects/__init__.py
new file mode 100644
index 00000000..8b137891
--- /dev/null
+++ b/git/test/performance/objects/__init__.py
@@ -0,0 +1 @@
+
diff --git a/git/test/performance/test_commit.py b/git/test/performance/objects/test_commit.py
index 80421aa2..685fba2f 100644
--- a/git/test/performance/test_commit.py
+++ b/git/test/performance/objects/test_commit.py
@@ -4,18 +4,18 @@
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
-from lib import *
+from git.test.performance.lib import TestBigRepoRW
from git import *
-from gitdb import IStream
-from git.test.test_commit import assert_commit_serialization
+from git.base import IStream
+from git.test.objects.test_commit import assert_commit_serialization
from cStringIO import StringIO
from time import time
import sys
class TestPerformance(TestBigRepoRW):
-
+
# ref with about 100 commits in its history
- ref_100 = '0.1.6'
+ ref_100 = 'v0.99'
def _query_commit_info(self, c):
c.author
@@ -45,13 +45,14 @@ class TestPerformance(TestBigRepoRW):
# END for each object
# END for each commit
elapsed_time = time() - st
+ assert no, "Should have traversed a few objects"
print >> sys.stderr, "Traversed %i Trees and a total of %i unchached objects in %s [s] ( %f objs/s )" % (nc, no, elapsed_time, no/elapsed_time)
def test_commit_traversal(self):
# bound to cat-file parsing performance
nc = 0
st = time()
- for c in self.gitrorepo.commit(self.head_sha_2k).traverse(branch_first=False):
+ for c in self.rorepo.commit(self.head_sha_2k).traverse(branch_first=False):
nc += 1
self._query_commit_info(c)
# END for each traversed commit
@@ -62,7 +63,7 @@ class TestPerformance(TestBigRepoRW):
# bound to stream parsing performance
nc = 0
st = time()
- for c in Commit.iter_items(self.gitrorepo, self.head_sha_2k):
+ for c in Commit.iter_items(self.rorepo, self.head_sha_2k):
nc += 1
self._query_commit_info(c)
# END for each traversed commit
@@ -70,10 +71,10 @@ class TestPerformance(TestBigRepoRW):
print >> sys.stderr, "Iterated %i Commits in %s [s] ( %f commits/s )" % (nc, elapsed_time, nc/elapsed_time)
def test_commit_serialization(self):
- assert_commit_serialization(self.gitrwrepo, self.head_sha_2k, True)
+ assert_commit_serialization(self.rwrepo, self.head_sha_2k, True)
- rwrepo = self.gitrwrepo
- make_object = rwrepo.odb.store
+ rwrepo = self.rwrepo
+ make_object = rwrepo.store
# direct serialization - deserialization can be tested afterwards
# serialization is probably limited on IO
hc = rwrepo.commit(self.head_sha_2k)
diff --git a/git/test/performance/test_odb.py b/git/test/performance/test_odb.py
deleted file mode 100644
index 32b70f69..00000000
--- a/git/test/performance/test_odb.py
+++ /dev/null
@@ -1,70 +0,0 @@
-"""Performance tests for object store"""
-
-from time import time
-import sys
-import stat
-
-from lib import (
- TestBigRepoR
- )
-
-
-class TestObjDBPerformance(TestBigRepoR):
-
- def test_random_access(self):
- results = [ ["Iterate Commits"], ["Iterate Blobs"], ["Retrieve Blob Data"] ]
- for repo in (self.gitrorepo, self.puregitrorepo):
- # GET COMMITS
- st = time()
- root_commit = repo.commit(self.head_sha_2k)
- commits = list(root_commit.traverse())
- nc = len(commits)
- elapsed = time() - st
-
- print >> sys.stderr, "%s: Retrieved %i commits from ObjectStore in %g s ( %f commits / s )" % (type(repo.odb), nc, elapsed, nc / elapsed)
- results[0].append(elapsed)
-
- # GET TREES
- # walk all trees of all commits
- st = time()
- blobs_per_commit = list()
- nt = 0
- for commit in commits:
- tree = commit.tree
- blobs = list()
- for item in tree.traverse():
- nt += 1
- if item.type == 'blob':
- blobs.append(item)
- # direct access for speed
- # END while trees are there for walking
- blobs_per_commit.append(blobs)
- # END for each commit
- elapsed = time() - st
-
- print >> sys.stderr, "%s: Retrieved %i objects from %i commits in %g s ( %f objects / s )" % (type(repo.odb), nt, len(commits), elapsed, nt / elapsed)
- results[1].append(elapsed)
-
- # GET BLOBS
- st = time()
- nb = 0
- too_many = 15000
- data_bytes = 0
- for blob_list in blobs_per_commit:
- for blob in blob_list:
- data_bytes += len(blob.data_stream.read())
- # END for each blobsha
- nb += len(blob_list)
- if nb > too_many:
- break
- # END for each bloblist
- elapsed = time() - st
-
- print >> sys.stderr, "%s: Retrieved %i blob (%i KiB) and their data in %g s ( %f blobs / s, %f KiB / s )" % (type(repo.odb), nb, data_bytes/1000, elapsed, nb / elapsed, (data_bytes / 1000) / elapsed)
- results[2].append(elapsed)
- # END for each repo type
-
- # final results
- for test_name, a, b in results:
- print >> sys.stderr, "%s: %f s vs %f s, pure is %f times slower" % (test_name, a, b, b / a)
- # END for each result
diff --git a/git/test/performance/test_streams.py b/git/test/performance/test_streams.py
deleted file mode 100644
index 7f17d722..00000000
--- a/git/test/performance/test_streams.py
+++ /dev/null
@@ -1,131 +0,0 @@
-"""Performance data streaming performance"""
-
-from git.test.lib import *
-from gitdb import *
-from gitdb.util import bin_to_hex
-
-from time import time
-import os
-import sys
-import stat
-import subprocess
-
-from gitdb.test.lib import make_memory_file
-
-from lib import (
- TestBigRepoR
- )
-
-
-class TestObjDBPerformance(TestBigRepoR):
-
- large_data_size_bytes = 1000*1000*10 # some MiB should do it
- moderate_data_size_bytes = 1000*1000*1 # just 1 MiB
-
- @with_rw_repo('HEAD', bare=True)
- def test_large_data_streaming(self, rwrepo):
- # TODO: This part overlaps with the same file in gitdb.test.performance.test_stream
- # It should be shared if possible
- ldb = LooseObjectDB(os.path.join(rwrepo.git_dir, 'objects'))
-
- for randomize in range(2):
- desc = (randomize and 'random ') or ''
- print >> sys.stderr, "Creating %s data ..." % desc
- st = time()
- size, stream = make_memory_file(self.large_data_size_bytes, randomize)
- elapsed = time() - st
- print >> sys.stderr, "Done (in %f s)" % elapsed
-
- # writing - due to the compression it will seem faster than it is
- st = time()
- binsha = ldb.store(IStream('blob', size, stream)).binsha
- elapsed_add = time() - st
- assert ldb.has_object(binsha)
- db_file = ldb.readable_db_object_path(bin_to_hex(binsha))
- fsize_kib = os.path.getsize(db_file) / 1000
-
-
- size_kib = size / 1000
- print >> sys.stderr, "Added %i KiB (filesize = %i KiB) of %s data to loose odb in %f s ( %f Write KiB / s)" % (size_kib, fsize_kib, desc, elapsed_add, size_kib / elapsed_add)
-
- # reading all at once
- st = time()
- ostream = ldb.stream(binsha)
- shadata = ostream.read()
- elapsed_readall = time() - st
-
- stream.seek(0)
- assert shadata == stream.getvalue()
- print >> sys.stderr, "Read %i KiB of %s data at once from loose odb in %f s ( %f Read KiB / s)" % (size_kib, desc, elapsed_readall, size_kib / elapsed_readall)
-
-
- # reading in chunks of 1 MiB
- cs = 512*1000
- chunks = list()
- st = time()
- ostream = ldb.stream(binsha)
- while True:
- data = ostream.read(cs)
- chunks.append(data)
- if len(data) < cs:
- break
- # END read in chunks
- elapsed_readchunks = time() - st
-
- stream.seek(0)
- assert ''.join(chunks) == stream.getvalue()
-
- cs_kib = cs / 1000
- print >> sys.stderr, "Read %i KiB of %s data in %i KiB chunks from loose odb in %f s ( %f Read KiB / s)" % (size_kib, desc, cs_kib, elapsed_readchunks, size_kib / elapsed_readchunks)
-
- # del db file so git has something to do
- os.remove(db_file)
-
- # VS. CGIT
- ##########
- # CGIT ! Can using the cgit programs be faster ?
- proc = rwrepo.git.hash_object('-w', '--stdin', as_process=True, istream=subprocess.PIPE)
-
- # write file - pump everything in at once to be a fast as possible
- data = stream.getvalue() # cache it
- st = time()
- proc.stdin.write(data)
- proc.stdin.close()
- gitsha = proc.stdout.read().strip()
- proc.wait()
- gelapsed_add = time() - st
- del(data)
- assert gitsha == bin_to_hex(binsha) # we do it the same way, right ?
-
- # as its the same sha, we reuse our path
- fsize_kib = os.path.getsize(db_file) / 1000
- print >> sys.stderr, "Added %i KiB (filesize = %i KiB) of %s data to using git-hash-object in %f s ( %f Write KiB / s)" % (size_kib, fsize_kib, desc, gelapsed_add, size_kib / gelapsed_add)
-
- # compare ...
- print >> sys.stderr, "Git-Python is %f %% faster than git when adding big %s files" % (100.0 - (elapsed_add / gelapsed_add) * 100, desc)
-
-
- # read all
- st = time()
- s, t, size, data = rwrepo.git.get_object_data(gitsha)
- gelapsed_readall = time() - st
- print >> sys.stderr, "Read %i KiB of %s data at once using git-cat-file in %f s ( %f Read KiB / s)" % (size_kib, desc, gelapsed_readall, size_kib / gelapsed_readall)
-
- # compare
- print >> sys.stderr, "Git-Python is %f %% faster than git when reading big %sfiles" % (100.0 - (elapsed_readall / gelapsed_readall) * 100, desc)
-
-
- # read chunks
- st = time()
- s, t, size, stream = rwrepo.git.stream_object_data(gitsha)
- while True:
- data = stream.read(cs)
- if len(data) < cs:
- break
- # END read stream
- gelapsed_readchunks = time() - st
- print >> sys.stderr, "Read %i KiB of %s data in %i KiB chunks from git-cat-file in %f s ( %f Read KiB / s)" % (size_kib, desc, cs_kib, gelapsed_readchunks, size_kib / gelapsed_readchunks)
-
- # compare
- print >> sys.stderr, "Git-Python is %f %% faster than git when reading big %s files in chunks" % (100.0 - (elapsed_readchunks / gelapsed_readchunks) * 100, desc)
- # END for each randomization factor
diff --git a/git/test/test_actor.py b/git/test/test_actor.py
deleted file mode 100644
index b8e5ba3b..00000000
--- a/git/test/test_actor.py
+++ /dev/null
@@ -1,36 +0,0 @@
-# test_actor.py
-# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
-#
-# This module is part of GitPython and is released under
-# the BSD License: http://www.opensource.org/licenses/bsd-license.php
-
-import os
-from git.test.lib import *
-from git import *
-
-class TestActor(object):
- def test_from_string_should_separate_name_and_email(self):
- a = Actor._from_string("Michael Trier <mtrier@example.com>")
- assert_equal("Michael Trier", a.name)
- assert_equal("mtrier@example.com", a.email)
-
- # base type capabilities
- assert a == a
- assert not ( a != a )
- m = set()
- m.add(a)
- m.add(a)
- assert len(m) == 1
-
- def test_from_string_should_handle_just_name(self):
- a = Actor._from_string("Michael Trier")
- assert_equal("Michael Trier", a.name)
- assert_equal(None, a.email)
-
- def test_should_display_representation(self):
- a = Actor._from_string("Michael Trier <mtrier@example.com>")
- assert_equal('<git.Actor "Michael Trier <mtrier@example.com>">', repr(a))
-
- def test_str_should_alias_name(self):
- a = Actor._from_string("Michael Trier <mtrier@example.com>")
- assert_equal(a.name, str(a)) \ No newline at end of file
diff --git a/git/test/test_base.py b/git/test/test_base.py
index e630d151..7488ac6b 100644
--- a/git/test/test_base.py
+++ b/git/test/test_base.py
@@ -3,18 +3,48 @@
#
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
+from lib import (
+ TestBase,
+ with_rw_repo,
+ DummyStream,
+ DeriveTest,
+ with_rw_and_rw_remote_repo
+ )
import git.objects.base as base
+from git.objects import (
+ Blob,
+ Tree,
+ Commit,
+ TagObject
+ )
import git.refs as refs
-import os
-from git.test.lib import *
-from git import *
+
from itertools import chain
from git.objects.util import get_object_type_by_name
-from gitdb.util import hex_to_bin
+from git.util import hex_to_bin
import tempfile
+##################
+
+from git.util import (
+ NULL_BIN_SHA
+ )
+
+from git.typ import str_blob_type
+from git.base import (
+ OInfo,
+ OPackInfo,
+ ODeltaPackInfo,
+ OStream,
+ OPackStream,
+ ODeltaPackStream,
+ IStream,
+ )
+
+import os
+
class TestBase(TestBase):
type_tuples = ( ("blob", "8741fc1d09d61f02ffd8cded15ff603eff1ec070", "blob.py"),
@@ -77,7 +107,7 @@ class TestBase(TestBase):
assert base.Object in get_object_type_by_name(tname).mro()
# END for each known type
- assert_raises( ValueError, get_object_type_by_name, "doesntexist" )
+ self.failUnlessRaises(ValueError, get_object_type_by_name, "doesntexist")
def test_object_resolution(self):
# objects must be resolved to shas so they compare equal
@@ -98,3 +128,85 @@ class TestBase(TestBase):
assert not rw_repo.config_reader("repository").getboolean("core", "bare")
assert rw_remote_repo.config_reader("repository").getboolean("core", "bare")
assert os.path.isdir(os.path.join(rw_repo.working_tree_dir,'lib'))
+
+
+
+class TestBaseTypes(TestBase):
+
+ def test_streams(self):
+ # test info
+ sha = NULL_BIN_SHA
+ s = 20
+ blob_id = 3
+
+ info = OInfo(sha, str_blob_type, s)
+ assert info.binsha == sha
+ assert info.type == str_blob_type
+ assert info.type_id == blob_id
+ assert info.size == s
+
+ # test pack info
+ # provides type_id
+ pinfo = OPackInfo(0, blob_id, s)
+ assert pinfo.type == str_blob_type
+ assert pinfo.type_id == blob_id
+ assert pinfo.pack_offset == 0
+
+ dpinfo = ODeltaPackInfo(0, blob_id, s, sha)
+ assert dpinfo.type == str_blob_type
+ assert dpinfo.type_id == blob_id
+ assert dpinfo.delta_info == sha
+ assert dpinfo.pack_offset == 0
+
+
+ # test ostream
+ stream = DummyStream()
+ ostream = OStream(*(info + (stream, )))
+ assert ostream.stream is stream
+ ostream.read(15)
+ stream._assert()
+ assert stream.bytes == 15
+ ostream.read(20)
+ assert stream.bytes == 20
+
+ # test packstream
+ postream = OPackStream(*(pinfo + (stream, )))
+ assert postream.stream is stream
+ postream.read(10)
+ stream._assert()
+ assert stream.bytes == 10
+
+ # test deltapackstream
+ dpostream = ODeltaPackStream(*(dpinfo + (stream, )))
+ dpostream.stream is stream
+ dpostream.read(5)
+ stream._assert()
+ assert stream.bytes == 5
+
+ # derive with own args
+ DeriveTest(sha, str_blob_type, s, stream, 'mine',myarg = 3)._assert()
+
+ # test istream
+ istream = IStream(str_blob_type, s, stream)
+ assert istream.binsha == None
+ istream.binsha = sha
+ assert istream.binsha == sha
+
+ assert len(istream.binsha) == 20
+ assert len(istream.hexsha) == 40
+
+ assert istream.size == s
+ istream.size = s * 2
+ istream.size == s * 2
+ assert istream.type == str_blob_type
+ istream.type = "something"
+ assert istream.type == "something"
+ assert istream.stream is stream
+ istream.stream = None
+ assert istream.stream is None
+
+ assert istream.error is None
+ istream.error = Exception()
+ assert isinstance(istream.error, Exception)
+
+
diff --git a/git/test/test_config.py b/git/test/test_config.py
index 173e380c..d2e199e3 100644
--- a/git/test/test_config.py
+++ b/git/test/test_config.py
@@ -4,13 +4,13 @@
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
-from git.test.lib import *
-from git import *
+from git.test.lib import TestBase, fixture_path
import StringIO
+from git.config import *
from copy import copy
from ConfigParser import NoSectionError
-class TestBase(TestCase):
+class TestConfig(TestBase):
def _to_memcache(self, file_path):
fp = open(file_path, "r")
@@ -30,7 +30,9 @@ class TestBase(TestCase):
w_config.read() # enforce reading
assert w_config._sections
w_config.write() # enforce writing
- assert file_obj.getvalue() == file_obj_orig.getvalue()
+
+ # we stripped lines when reading, so the results differ
+ assert file_obj.getvalue() != file_obj_orig.getvalue()
# creating an additional config writer must fail due to exclusive access
self.failUnlessRaises(IOError, GitConfigParser, file_obj, read_only = False)
@@ -56,10 +58,10 @@ class TestBase(TestCase):
file_obj.seek(0)
r_config = GitConfigParser(file_obj, read_only=True)
+ #print file_obj.getvalue()
assert r_config.has_section(sname)
assert r_config.has_option(sname, oname)
assert r_config.get(sname, oname) == val
-
# END for each filename
def test_base(self):
diff --git a/git/test/test_db.py b/git/test/test_db.py
deleted file mode 100644
index db2d7983..00000000
--- a/git/test/test_db.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# test_repo.py
-# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
-#
-# This module is part of GitPython and is released under
-# the BSD License: http://www.opensource.org/licenses/bsd-license.php
-from git.test.lib import *
-from git.db import *
-from gitdb.util import bin_to_hex
-from git.exc import BadObject
-import os
-
-class TestDB(TestBase):
-
- def test_base(self):
- gdb = GitCmdObjectDB(os.path.join(self.rorepo.git_dir, 'objects'), self.rorepo.git)
-
- # partial to complete - works with everything
- hexsha = bin_to_hex(gdb.partial_to_complete_sha_hex("0.1.6"))
- assert len(hexsha) == 40
-
- assert bin_to_hex(gdb.partial_to_complete_sha_hex(hexsha[:20])) == hexsha
-
- # fails with BadObject
- for invalid_rev in ("0000", "bad/ref", "super bad"):
- self.failUnlessRaises(BadObject, gdb.partial_to_complete_sha_hex, invalid_rev)
diff --git a/git/test/test_diff.py b/git/test/test_diff.py
index 83db2df6..79f038e8 100644
--- a/git/test/test_diff.py
+++ b/git/test/test_diff.py
@@ -4,8 +4,15 @@
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
-from git.test.lib import *
-from git import *
+from git.test.lib import (
+ TestBase,
+ StringProcessAdapter,
+ fixture,
+ assert_equal,
+ assert_true
+ )
+
+from git.diff import *
class TestDiff(TestBase):
diff --git a/git/test/test_example.py b/git/test/test_example.py
new file mode 100644
index 00000000..dbab3118
--- /dev/null
+++ b/git/test/test_example.py
@@ -0,0 +1,64 @@
+# Copyright (C) 2010, 2011 Sebastian Thiel (byronimo@gmail.com) and contributors
+#
+# This module is part of GitDB and is released under
+# the New BSD License: http://www.opensource.org/licenses/bsd-license.php
+"""Module with examples from the tutorial section of the docs"""
+from lib import TestBase, fixture_path
+from git.base import IStream
+from git.db.py.loose import PureLooseObjectODB
+from git.util import pool
+
+from cStringIO import StringIO
+
+from async import IteratorReader
+
+class TestExamples(TestBase):
+
+ def test_base(self):
+ ldb = PureLooseObjectODB(fixture_path("../../../.git/objects"))
+
+ for sha1 in ldb.sha_iter():
+ oinfo = ldb.info(sha1)
+ ostream = ldb.stream(sha1)
+ assert oinfo[:3] == ostream[:3]
+
+ assert len(ostream.read()) == ostream.size
+ assert ldb.has_object(oinfo.binsha)
+ # END for each sha in database
+ # assure we close all files
+ try:
+ del(ostream)
+ del(oinfo)
+ except UnboundLocalError:
+ pass
+ # END ignore exception if there are no loose objects
+
+ data = "my data"
+ istream = IStream("blob", len(data), StringIO(data))
+
+ # the object does not yet have a sha
+ assert istream.binsha is None
+ ldb.store(istream)
+ # now the sha is set
+ assert len(istream.binsha) == 20
+ assert ldb.has_object(istream.binsha)
+
+
+ # async operation
+ # Create a reader from an iterator
+ reader = IteratorReader(ldb.sha_iter())
+
+ # get reader for object streams
+ info_reader = ldb.stream_async(reader)
+
+ # read one
+ info = info_reader.read(1)[0]
+
+ # read all the rest until depletion
+ ostreams = info_reader.read()
+
+ # set the pool to use two threads
+ pool.set_size(2)
+
+ # synchronize the mode of operation
+ pool.set_size(0)
diff --git a/git/test/test_fun.py b/git/test/test_fun.py
index b7991cdb..ed069912 100644
--- a/git/test/test_fun.py
+++ b/git/test/test_fun.py
@@ -1,4 +1,4 @@
-from git.test.lib import *
+from git.test.lib import TestBase, with_rw_repo
from git.objects.fun import (
traverse_tree_recursive,
traverse_trees_recursive,
@@ -9,9 +9,9 @@ from git.index.fun import (
aggressive_tree_merge
)
-from gitdb.util import bin_to_hex
-from gitdb.base import IStream
-from gitdb.typ import str_tree_type
+from git.util import bin_to_hex
+from git.base import IStream
+from git.typ import str_tree_type
from stat import (
S_IFDIR,
diff --git a/git/test/test_git.py b/git/test/test_git.py
index c92a642b..b9a0b617 100644
--- a/git/test/test_git.py
+++ b/git/test/test_git.py
@@ -5,80 +5,96 @@
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
import os, sys
-from git.test.lib import *
+from git.test.lib import (
+ TestBase,
+ patch_object,
+ raises,
+ assert_equal,
+ assert_true,
+ assert_match,
+ fixture_path
+ )
from git import Git, GitCommandError
-class TestGit(TestCase):
-
- @classmethod
- def setUpAll(cls):
- cls.git = Git(GIT_REPO)
+class TestGit(TestBase):
+
+ @classmethod
+ def setUpAll(cls):
+ super(TestGit, cls).setUpAll()
+ cls.git = Git(cls.rorepo.working_dir)
- @patch_object(Git, 'execute')
- def test_call_process_calls_execute(self, git):
- git.return_value = ''
- self.git.version()
- assert_true(git.called)
- assert_equal(git.call_args, ((['git', 'version'],), {}))
+ @patch_object(Git, 'execute')
+ def test_call_process_calls_execute(self, git):
+ git.return_value = ''
+ self.git.version()
+ assert_true(git.called)
+ assert_equal(git.call_args, ((['git', 'version'],), {}))
- @raises(GitCommandError)
- def test_it_raises_errors(self):
- self.git.this_does_not_exist()
+ @raises(GitCommandError)
+ def test_it_raises_errors(self):
+ self.git.this_does_not_exist()
- def test_it_transforms_kwargs_into_git_command_arguments(self):
- assert_equal(["-s"], self.git.transform_kwargs(**{'s': True}))
- assert_equal(["-s5"], self.git.transform_kwargs(**{'s': 5}))
+ def test_it_transforms_kwargs_into_git_command_arguments(self):
+ assert_equal(["-s"], self.git.transform_kwargs(**{'s': True}))
+ assert_equal(["-s5"], self.git.transform_kwargs(**{'s': 5}))
- assert_equal(["--max-count"], self.git.transform_kwargs(**{'max_count': True}))
- assert_equal(["--max-count=5"], self.git.transform_kwargs(**{'max_count': 5}))
+ assert_equal(["--max-count"], self.git.transform_kwargs(**{'max_count': True}))
+ assert_equal(["--max-count=5"], self.git.transform_kwargs(**{'max_count': 5}))
- assert_equal(["-s", "-t"], self.git.transform_kwargs(**{'s': True, 't': True}))
+ assert_equal(["-s", "-t"], self.git.transform_kwargs(**{'s': True, 't': True}))
- def test_it_executes_git_to_shell_and_returns_result(self):
- assert_match('^git version [\d\.]{2}.*$', self.git.execute(["git","version"]))
+ def test_it_executes_git_to_shell_and_returns_result(self):
+ assert_match('^git version [\d\.]{2}.*$', self.git.execute(["git","version"]))
- def test_it_accepts_stdin(self):
- filename = fixture_path("cat_file_blob")
- fh = open(filename, 'r')
- assert_equal("70c379b63ffa0795fdbfbc128e5a2818397b7ef8",
- self.git.hash_object(istream=fh, stdin=True))
- fh.close()
+ def test_it_accepts_stdin(self):
+ filename = fixture_path("cat_file_blob")
+ fh = open(filename, 'r')
+ assert_equal("70c379b63ffa0795fdbfbc128e5a2818397b7ef8",
+ self.git.hash_object(istream=fh, stdin=True))
+ fh.close()
- @patch_object(Git, 'execute')
- def test_it_ignores_false_kwargs(self, git):
- # this_should_not_be_ignored=False implies it *should* be ignored
- output = self.git.version(pass_this_kwarg=False)
- assert_true("pass_this_kwarg" not in git.call_args[1])
-
- def test_persistent_cat_file_command(self):
- # read header only
- import subprocess as sp
- hexsha = "b2339455342180c7cc1e9bba3e9f181f7baa5167"
- g = self.git.cat_file(batch_check=True, istream=sp.PIPE,as_process=True)
- g.stdin.write("b2339455342180c7cc1e9bba3e9f181f7baa5167\n")
- g.stdin.flush()
- obj_info = g.stdout.readline()
-
- # read header + data
- g = self.git.cat_file(batch=True, istream=sp.PIPE,as_process=True)
- g.stdin.write("b2339455342180c7cc1e9bba3e9f181f7baa5167\n")
- g.stdin.flush()
- obj_info_two = g.stdout.readline()
- assert obj_info == obj_info_two
-
- # read data - have to read it in one large chunk
- size = int(obj_info.split()[2])
- data = g.stdout.read(size)
- terminating_newline = g.stdout.read(1)
-
- # now we should be able to read a new object
- g.stdin.write("b2339455342180c7cc1e9bba3e9f181f7baa5167\n")
- g.stdin.flush()
- assert g.stdout.readline() == obj_info
-
-
- # same can be achived using the respective command functions
- hexsha, typename, size = self.git.get_object_header(hexsha)
- hexsha, typename_two, size_two, data = self.git.get_object_data(hexsha)
- assert typename == typename_two and size == size_two
+ @patch_object(Git, 'execute')
+ def test_it_ignores_false_kwargs(self, git):
+ # this_should_not_be_ignored=False implies it *should* be ignored
+ output = self.git.version(pass_this_kwarg=False)
+ assert_true("pass_this_kwarg" not in git.call_args[1])
+
+ def test_persistent_cat_file_command(self):
+ # read header only
+ import subprocess as sp
+ hexsha = "b2339455342180c7cc1e9bba3e9f181f7baa5167"
+ g = self.git.cat_file(batch_check=True, istream=sp.PIPE,as_process=True)
+ g.stdin.write("b2339455342180c7cc1e9bba3e9f181f7baa5167\n")
+ g.stdin.flush()
+ obj_info = g.stdout.readline()
+
+ # read header + data
+ g = self.git.cat_file(batch=True, istream=sp.PIPE,as_process=True)
+ g.stdin.write("b2339455342180c7cc1e9bba3e9f181f7baa5167\n")
+ g.stdin.flush()
+ obj_info_two = g.stdout.readline()
+ assert obj_info == obj_info_two
+
+ # read data - have to read it in one large chunk
+ size = int(obj_info.split()[2])
+ data = g.stdout.read(size)
+ terminating_newline = g.stdout.read(1)
+
+ # now we should be able to read a new object
+ g.stdin.write("b2339455342180c7cc1e9bba3e9f181f7baa5167\n")
+ g.stdin.flush()
+ assert g.stdout.readline() == obj_info
+
+
+ # same can be achived using the respective command functions
+ hexsha, typename, size = self.git.get_object_header(hexsha)
+ hexsha, typename_two, size_two, data = self.git.get_object_data(hexsha)
+ assert typename == typename_two and size == size_two
+
+ def test_version(self):
+ v = self.git.version_info
+ assert isinstance(v, tuple)
+ for n in v:
+ assert isinstance(n, int)
+ #END verify number types
diff --git a/git/test/test_import.py b/git/test/test_import.py
new file mode 100644
index 00000000..a5a1d11b
--- /dev/null
+++ b/git/test/test_import.py
@@ -0,0 +1,58 @@
+# Copyright (C) 2010, 2011 Sebastian Thiel (byronimo@gmail.com) and contributors
+#
+# This module is part of GitDB and is released under
+# the New BSD License: http://www.opensource.org/licenses/bsd-license.php
+"""This module's whole purpose is to verify the __all__ descriptions in the respective
+module, by importing using from x import *"""
+
+# perform the actual imports
+import os
+
+from git import *
+
+def import_all(topdir, topmodule='git', skip = "test"):
+ base = os.path.basename
+ join = os.path.join
+ init_script = '__init__.py'
+ prev_cwd = os.getcwd()
+ try:
+ os.chdir(os.path.dirname(topdir))
+ for root, dirs, files in os.walk(base(topdir)):
+ if init_script not in files:
+ del(dirs[:])
+ continue
+ #END ignore non-packages
+
+ if skip in root:
+ continue
+ #END handle ignores
+
+ for relafile in files:
+ if not relafile.endswith('.py'):
+ continue
+ if relafile == init_script:
+ continue
+ module_path = join(root, os.path.splitext(relafile)[0]).replace("/", ".").replace("\\", ".")
+
+ m = __import__(module_path, globals(), locals(), [""])
+ try:
+ attrlist = m.__all__
+ for attr in attrlist:
+ assert hasattr(m, attr), "Invalid item in %s.__all__: %s" % (module_path, attr)
+ #END veriy
+ except AttributeError:
+ pass
+ # END try each listed attribute
+ #END for each file in dir
+ #END for each item
+ finally:
+ os.chdir(prev_cwd)
+ #END handle previous currentdir
+
+
+
+class TestDummy(object):
+ def test_base(self):
+ dn = os.path.dirname
+ # NOTE: i don't think this is working, as the __all__ variable is not used in this case
+ import_all(dn(dn(__file__)))
diff --git a/git/test/test_index.py b/git/test/test_index.py
index 5d227897..7d65cb9b 100644
--- a/git/test/test_index.py
+++ b/git/test/test_index.py
@@ -4,7 +4,12 @@
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
-from git.test.lib import *
+from git.test.lib import (
+ TestBase,
+ with_rw_repo,
+ fixture_path,
+ fixture
+ )
from git import *
import inspect
import os
@@ -12,6 +17,7 @@ import sys
import tempfile
import glob
import shutil
+import time
from stat import *
class TestIndex(TestBase):
diff --git a/git/test/test_pack.py b/git/test/test_pack.py
new file mode 100644
index 00000000..c398fc56
--- /dev/null
+++ b/git/test/test_pack.py
@@ -0,0 +1,247 @@
+# Copyright (C) 2010, 2011 Sebastian Thiel (byronimo@gmail.com) and contributors
+#
+# This module is part of GitDB and is released under
+# the New BSD License: http://www.opensource.org/licenses/bsd-license.php
+"""Test everything about packs reading and writing"""
+from lib import (
+ TestBase,
+ with_rw_directory,
+ with_packs_rw,
+ fixture_path
+ )
+from git.stream import DeltaApplyReader
+
+from git.pack import (
+ PackEntity,
+ PackIndexFile,
+ PackFile
+ )
+
+from git.base import (
+ OInfo,
+ OStream,
+ )
+
+from git.fun import delta_types
+from git.exc import UnsupportedOperation
+from git.util import to_bin_sha
+from itertools import izip, chain
+from nose import SkipTest
+
+import os
+import sys
+import tempfile
+
+
+#{ Utilities
+def bin_sha_from_filename(filename):
+ return to_bin_sha(os.path.splitext(os.path.basename(filename))[0][5:])
+#} END utilities
+
+class TestPack(TestBase):
+
+ packindexfile_v1 = (fixture_path('packs/pack-c0438c19fb16422b6bbcce24387b3264416d485b.idx'), 1, 67)
+ packindexfile_v2 = (fixture_path('packs/pack-11fdfa9e156ab73caae3b6da867192221f2089c2.idx'), 2, 30)
+ packindexfile_v2_3_ascii = (fixture_path('packs/pack-a2bf8e71d8c18879e499335762dd95119d93d9f1.idx'), 2, 42)
+ packfile_v2_1 = (fixture_path('packs/pack-c0438c19fb16422b6bbcce24387b3264416d485b.pack'), 2, packindexfile_v1[2])
+ packfile_v2_2 = (fixture_path('packs/pack-11fdfa9e156ab73caae3b6da867192221f2089c2.pack'), 2, packindexfile_v2[2])
+ packfile_v2_3_ascii = (fixture_path('packs/pack-a2bf8e71d8c18879e499335762dd95119d93d9f1.pack'), 2, packindexfile_v2_3_ascii[2])
+
+
+ def _assert_index_file(self, index, version, size):
+ assert index.packfile_checksum() != index.indexfile_checksum()
+ assert len(index.packfile_checksum()) == 20
+ assert len(index.indexfile_checksum()) == 20
+ assert index.version() == version
+ assert index.size() == size
+ assert len(index.offsets()) == size
+
+ # get all data of all objects
+ for oidx in xrange(index.size()):
+ sha = index.sha(oidx)
+ assert oidx == index.sha_to_index(sha)
+
+ entry = index.entry(oidx)
+ assert len(entry) == 3
+
+ assert entry[0] == index.offset(oidx)
+ assert entry[1] == sha
+ assert entry[2] == index.crc(oidx)
+
+ # verify partial sha
+ for l in (4,8,11,17,20):
+ assert index.partial_sha_to_index(sha[:l], l*2) == oidx
+
+ # END for each object index in indexfile
+ self.failUnlessRaises(ValueError, index.partial_sha_to_index, "\0", 2)
+
+
+ def _assert_pack_file(self, pack, version, size):
+ assert pack.version() == 2
+ assert pack.size() == size
+ assert len(pack.checksum()) == 20
+
+ num_obj = 0
+ for obj in pack.stream_iter():
+ num_obj += 1
+ info = pack.info(obj.pack_offset)
+ stream = pack.stream(obj.pack_offset)
+
+ assert info.pack_offset == stream.pack_offset
+ assert info.type_id == stream.type_id
+ assert hasattr(stream, 'read')
+
+ # it should be possible to read from both streams
+ assert obj.read() == stream.read()
+
+ streams = pack.collect_streams(obj.pack_offset)
+ assert streams
+
+ # read the stream
+ try:
+ dstream = DeltaApplyReader.new(streams)
+ except ValueError:
+ # ignore these, old git versions use only ref deltas,
+ # which we havent resolved ( as we are without an index )
+ # Also ignore non-delta streams
+ continue
+ # END get deltastream
+
+ # read all
+ data = dstream.read()
+ assert len(data) == dstream.size
+
+ # test seek
+ dstream.seek(0)
+ assert dstream.read() == data
+
+
+ # read chunks
+ # NOTE: the current implementation is safe, it basically transfers
+ # all calls to the underlying memory map
+
+ # END for each object
+ assert num_obj == size
+
+
+ def test_pack_index(self):
+ # check version 1 and 2
+ for indexfile, version, size in (self.packindexfile_v1, self.packindexfile_v2):
+ index = PackIndexFile(indexfile)
+ self._assert_index_file(index, version, size)
+ # END run tests
+
+ def test_pack(self):
+ # there is this special version 3, but apparently its like 2 ...
+ for packfile, version, size in (self.packfile_v2_3_ascii, self.packfile_v2_1, self.packfile_v2_2):
+ pack = PackFile(packfile)
+ self._assert_pack_file(pack, version, size)
+ # END for each pack to test
+
+ @with_rw_directory
+ def test_pack_entity(self, rw_dir):
+ pack_objs = list()
+ for packinfo, indexinfo in ( (self.packfile_v2_1, self.packindexfile_v1),
+ (self.packfile_v2_2, self.packindexfile_v2),
+ (self.packfile_v2_3_ascii, self.packindexfile_v2_3_ascii)):
+ packfile, version, size = packinfo
+ indexfile, version, size = indexinfo
+ entity = PackEntity(packfile)
+ assert entity.pack().path() == packfile
+ assert entity.index().path() == indexfile
+ pack_objs.extend(entity.stream_iter())
+
+ count = 0
+ for info, stream in izip(entity.info_iter(), entity.stream_iter()):
+ count += 1
+ assert info.binsha == stream.binsha
+ assert len(info.binsha) == 20
+ assert info.type_id == stream.type_id
+ assert info.size == stream.size
+
+ # we return fully resolved items, which is implied by the sha centric access
+ assert not info.type_id in delta_types
+
+ # try all calls
+ assert len(entity.collect_streams(info.binsha))
+ oinfo = entity.info(info.binsha)
+ assert isinstance(oinfo, OInfo)
+ assert oinfo.binsha is not None
+ ostream = entity.stream(info.binsha)
+ assert isinstance(ostream, OStream)
+ assert ostream.binsha is not None
+
+ # verify the stream
+ try:
+ assert entity.is_valid_stream(info.binsha, use_crc=True)
+ except UnsupportedOperation:
+ pass
+ # END ignore version issues
+ assert entity.is_valid_stream(info.binsha, use_crc=False)
+ # END for each info, stream tuple
+ assert count == size
+
+ # END for each entity
+
+ # pack writing - write all packs into one
+ # index path can be None
+ pack_path = tempfile.mktemp('', "pack", rw_dir)
+ index_path = tempfile.mktemp('', 'index', rw_dir)
+ iteration = 0
+ def rewind_streams():
+ for obj in pack_objs:
+ obj.stream.seek(0)
+ #END utility
+ for ppath, ipath, num_obj in zip((pack_path, )*2, (index_path, None), (len(pack_objs), None)):
+ pfile = open(ppath, 'wb')
+ iwrite = None
+ if ipath:
+ ifile = open(ipath, 'wb')
+ iwrite = ifile.write
+ #END handle ip
+
+ # make sure we rewind the streams ... we work on the same objects over and over again
+ if iteration > 0:
+ rewind_streams()
+ #END rewind streams
+ iteration += 1
+
+ pack_sha, index_sha = PackEntity.write_pack(pack_objs, pfile.write, iwrite, object_count=num_obj)
+ pfile.close()
+ assert os.path.getsize(ppath) > 100
+
+ # verify pack
+ pf = PackFile(ppath)
+ assert pf.size() == len(pack_objs)
+ assert pf.version() == PackFile.pack_version_default
+ assert pf.checksum() == pack_sha
+
+ # verify index
+ if ipath is not None:
+ ifile.close()
+ assert os.path.getsize(ipath) > 100
+ idx = PackIndexFile(ipath)
+ assert idx.version() == PackIndexFile.index_version_default
+ assert idx.packfile_checksum() == pack_sha
+ assert idx.indexfile_checksum() == index_sha
+ assert idx.size() == len(pack_objs)
+ #END verify files exist
+ #END for each packpath, indexpath pair
+
+ # verify the packs throughly
+ rewind_streams()
+ entity = PackEntity.create(pack_objs, rw_dir)
+ count = 0
+ for info in entity.info_iter():
+ count += 1
+ for use_crc in range(2):
+ assert entity.is_valid_stream(info.binsha, use_crc)
+ # END for each crc mode
+ #END for each info
+ assert count == len(pack_objs)
+
+
+ def test_pack_64(self):
+ # TODO: hex-edit a pack helping us to verify that we can handle 64 byte offsets
+ # of course without really needing such a huge pack
+ raise SkipTest()
diff --git a/git/test/test_reflog.py b/git/test/test_reflog.py
index 3fdf1fae..271924aa 100644
--- a/git/test/test_reflog.py
+++ b/git/test/test_reflog.py
@@ -1,4 +1,4 @@
-from git.test.lib import *
+from git.test.lib import TestBase, fixture_path
from git.objects import IndexObject
from git.refs import *
from git.util import Actor
diff --git a/git/test/test_refs.py b/git/test/test_refs.py
index 2338b4e4..e49b23ab 100644
--- a/git/test/test_refs.py
+++ b/git/test/test_refs.py
@@ -4,20 +4,25 @@
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
-from mock import *
-from git.test.lib import *
-from git import *
-import git.refs as refs
+from git.test.lib import TestBase, with_rw_repo
+from git.refs import *
+import git.refs as ref
+
from git.util import Actor
from git.objects.tag import TagObject
+
+from git.exc import GitCommandError
+
from itertools import chain
import os
+from nose import SkipTest
+
class TestRefs(TestBase):
def test_from_path(self):
# should be able to create any reference directly
- for ref_type in ( Reference, Head, TagReference, RemoteReference ):
+ for ref_type in (Reference, Head, TagReference, RemoteReference):
for name in ('rela_name', 'path/rela_name'):
full_path = ref_type.to_full_path(name)
instance = ref_type.from_path(self.rorepo, full_path)
@@ -27,20 +32,20 @@ class TestRefs(TestBase):
def test_tag_base(self):
tag_object_refs = list()
- for tag in self.rorepo.tags:
+ for tag in TagReference.list_items(self.rorepo):
assert "refs/tags" in tag.path
assert tag.name
- assert isinstance( tag.commit, Commit )
+ assert isinstance(tag.commit, tag.CommitCls)
if tag.tag is not None:
- tag_object_refs.append( tag )
+ tag_object_refs.append(tag)
tagobj = tag.tag
# have no dict
self.failUnlessRaises(AttributeError, setattr, tagobj, 'someattr', 1)
- assert isinstance( tagobj, TagObject )
+ assert isinstance(tagobj, TagObject)
assert tagobj.tag == tag.name
- assert isinstance( tagobj.tagger, Actor )
- assert isinstance( tagobj.tagged_date, int )
- assert isinstance( tagobj.tagger_tz_offset, int )
+ assert isinstance(tagobj.tagger, Actor)
+ assert isinstance(tagobj.tagged_date, int)
+ assert isinstance(tagobj.tagger_tz_offset, int)
assert tagobj.message
assert tag.object == tagobj
# can't assign the object
@@ -48,15 +53,15 @@ class TestRefs(TestBase):
# END if we have a tag object
# END for tag in repo-tags
assert tag_object_refs
- assert isinstance(self.rorepo.tags['0.1.5'], TagReference)
+ assert isinstance(TagReference.list_items(self.rorepo)['0.1.6'], TagReference)
def test_tags(self):
# tag refs can point to tag objects or to commits
s = set()
ref_count = 0
- for ref in chain(self.rorepo.tags, self.rorepo.heads):
+ for ref in chain(TagReference.list_items(self.rorepo), Head.list_items(self.rorepo)):
ref_count += 1
- assert isinstance(ref, refs.Reference)
+ assert isinstance(ref, Reference)
assert str(ref) == ref.name
assert repr(ref)
assert ref == ref
@@ -66,9 +71,9 @@ class TestRefs(TestBase):
assert len(s) == ref_count
assert len(s|s) == ref_count
- @with_rw_repo('HEAD', bare=False)
- def test_heads(self, rwrepo):
- for head in rwrepo.heads:
+ @with_rw_repo("0.1.6")
+ def test_heads(self, rw_repo):
+ for head in Head.iter_items(rw_repo):
assert head.name
assert head.path
assert "refs/heads" in head.path
@@ -88,7 +93,7 @@ class TestRefs(TestBase):
# after the clone, we might still have a tracking branch setup
head.set_tracking_branch(None)
assert head.tracking_branch() is None
- remote_ref = rwrepo.remotes[0].refs[0]
+ remote_ref = RemoteReference.list_items(rw_repo)[0]
assert head.set_tracking_branch(remote_ref) is head
assert head.tracking_branch() == remote_ref
head.set_tracking_branch(None)
@@ -96,7 +101,7 @@ class TestRefs(TestBase):
# END for each head
# verify REFLOG gets altered
- head = rwrepo.head
+ head = HEAD(rw_repo)
cur_head = head.ref
cur_commit = cur_head.commit
pcommit = cur_head.commit.parents[0].parents[0]
@@ -130,7 +135,7 @@ class TestRefs(TestBase):
assert len(cur_head.log()) == blog_len+2
# a new branch has just a single entry
- other_head = Head.create(rwrepo, 'mynewhead', pcommit, logmsg='new head created')
+ other_head = Head.create(rw_repo, 'mynewhead', pcommit, logmsg='new head created')
log = other_head.log()
assert len(log) == 1
assert log[0].oldhexsha == pcommit.NULL_HEX_SHA
@@ -139,24 +144,25 @@ class TestRefs(TestBase):
def test_refs(self):
types_found = set()
- for ref in self.rorepo.refs:
+ for ref in Reference.list_items(self.rorepo):
types_found.add(type(ref))
assert len(types_found) >= 3
def test_is_valid(self):
assert Reference(self.rorepo, 'refs/doesnt/exist').is_valid() == False
- assert self.rorepo.head.is_valid()
- assert self.rorepo.head.reference.is_valid()
+ assert HEAD(self.rorepo).is_valid()
+ assert HEAD(self.rorepo).reference.is_valid()
assert SymbolicReference(self.rorepo, 'hellothere').is_valid() == False
def test_orig_head(self):
- assert type(self.rorepo.head.orig_head()) == SymbolicReference
+ assert type(HEAD(self.rorepo).orig_head()) == SymbolicReference
- @with_rw_repo('0.1.6')
+ @with_rw_repo("0.1.6")
def test_head_reset(self, rw_repo):
- cur_head = rw_repo.head
+ cur_head = HEAD(rw_repo)
old_head_commit = cur_head.commit
new_head_commit = cur_head.ref.commit.parents[0]
+
cur_head.reset(new_head_commit, index=True) # index only
assert cur_head.reference.commit == new_head_commit
@@ -176,10 +182,9 @@ class TestRefs(TestBase):
cur_head.reset(new_head_commit)
rw_repo.index.checkout(["lib"], force=True)#
-
# now that we have a write write repo, change the HEAD reference - its
# like git-reset --soft
- heads = rw_repo.heads
+ heads = Head.list_items(rw_repo)
assert heads
for head in heads:
cur_head.reference = head
@@ -198,7 +203,7 @@ class TestRefs(TestBase):
self.failUnlessRaises(TypeError, getattr, cur_head, "reference")
# tags are references, hence we can point to them
- some_tag = rw_repo.tags[0]
+ some_tag = TagReference.list_items(rw_repo)[0]
cur_head.reference = some_tag
assert not cur_head.is_detached
assert cur_head.commit == some_tag.commit
@@ -231,7 +236,7 @@ class TestRefs(TestBase):
old_name = new_head.name
assert new_head.rename("hello").name == "hello"
- assert new_head.rename("hello/world").name == "hello/world"
+ assert new_head.rename("hello/world").name == "hello/world" # yes, this must work
assert new_head.rename(old_name).name == old_name and new_head.path == old_path
# rename with force
@@ -414,12 +419,11 @@ class TestRefs(TestBase):
symbol_ref_path = "refs/symbol_ref"
symref = SymbolicReference(rw_repo, symbol_ref_path)
assert symref.path == symbol_ref_path
- symbol_ref_abspath = os.path.join(rw_repo.git_dir, symref.path)
# set it
symref.reference = new_head
assert symref.reference == new_head
- assert os.path.isfile(symbol_ref_abspath)
+ assert os.path.isfile(symref.abspath)
assert symref.commit == new_head.commit
for name in ('absname','folder/rela_name'):
@@ -471,7 +475,7 @@ class TestRefs(TestBase):
rw_repo.head.reference = Head.create(rw_repo, "master")
# At least the head should still exist
- assert os.path.isfile(os.path.join(rw_repo.git_dir, 'HEAD'))
+ assert os.path.isfile(rw_repo.head.abspath)
refs = list(SymbolicReference.iter_items(rw_repo))
assert len(refs) == 1
@@ -517,5 +521,7 @@ class TestRefs(TestBase):
assert SymbolicReference.dereference_recursive(self.rorepo, 'HEAD')
def test_reflog(self):
- assert isinstance(self.rorepo.heads.master.log(), RefLog)
+ assert isinstance(Head.list_items(self.rorepo).master.log(), RefLog)
+ def test_pure_python_rename(self):
+ raise SkipTest("Pure python reference renames cannot properly handle refnames which become a directory after rename")
diff --git a/git/test/test_remote.py b/git/test/test_remote.py
index af6915a3..8ae9fe43 100644
--- a/git/test/test_remote.py
+++ b/git/test/test_remote.py
@@ -4,9 +4,23 @@
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
-from git.test.lib import *
-from git import *
+from git.test.lib import (
+ TestBase,
+ with_rw_and_rw_remote_repo,
+ with_rw_repo,
+ )
from git.util import IterableList
+from git.db.interface import PushInfo, FetchInfo, RemoteProgress
+from git.remote import *
+from git.exc import GitCommandError
+from git.refs import (
+ Reference,
+ TagReference,
+ RemoteReference,
+ Head,
+ SymbolicReference
+ )
+
import tempfile
import shutil
import os
@@ -16,430 +30,421 @@ import random
random.seed(0)
class TestRemoteProgress(RemoteProgress):
- __slots__ = ( "_seen_lines", "_stages_per_op", '_num_progress_messages' )
- def __init__(self):
- super(TestRemoteProgress, self).__init__()
- self._seen_lines = list()
- self._stages_per_op = dict()
- self._num_progress_messages = 0
-
- def _parse_progress_line(self, line):
- # we may remove the line later if it is dropped
- # Keep it for debugging
- self._seen_lines.append(line)
- rval = super(TestRemoteProgress, self)._parse_progress_line(line)
- assert len(line) > 1, "line %r too short" % line
- return rval
-
- def line_dropped(self, line):
- try:
- self._seen_lines.remove(line)
- except ValueError:
- pass
-
- def update(self, op_code, cur_count, max_count=None, message=''):
- # check each stage only comes once
- op_id = op_code & self.OP_MASK
- assert op_id in (self.COUNTING, self.COMPRESSING, self.WRITING)
-
- self._stages_per_op.setdefault(op_id, 0)
- self._stages_per_op[ op_id ] = self._stages_per_op[ op_id ] | (op_code & self.STAGE_MASK)
-
- if op_code & (self.WRITING|self.END) == (self.WRITING|self.END):
- assert message
- # END check we get message
-
- self._num_progress_messages += 1
-
-
- def make_assertion(self):
- # we don't always receive messages
- if not self._seen_lines:
- return
-
- # sometimes objects are not compressed which is okay
- assert len(self._seen_ops) in (2,3)
- assert self._stages_per_op
-
- # must have seen all stages
- for op, stages in self._stages_per_op.items():
- assert stages & self.STAGE_MASK == self.STAGE_MASK
- # END for each op/stage
+ __slots__ = ( "_seen_lines", "_stages_per_op", '_num_progress_messages')
+ def __init__(self):
+ super(TestRemoteProgress, self).__init__()
+ self._seen_lines = list()
+ self._stages_per_op = dict()
+ self._seen_ops = set()
+ self._num_progress_messages = 0
+
+ def line_dropped(self, line):
+ try:
+ self._seen_lines.remove(line)
+ except ValueError:
+ pass
+
+ def __call__(self, message, input=''):
+ pass
+
+ def update(self, op_code, cur_count, max_count=None, message='', input=''):
+ # check each stage only comes once
+ if input:
+ self._seen_lines.append(input)
+ #END handle input
+ op_id = op_code & self.OP_MASK
+ assert op_id in (self.COUNTING, self.COMPRESSING, self.WRITING)
+
+ self._stages_per_op.setdefault(op_id, 0)
+ self._stages_per_op[ op_id ] = self._stages_per_op[ op_id ] | (op_code & self.STAGE_MASK)
+
+ if op_code & (self.WRITING|self.END) == (self.WRITING|self.END):
+ assert message
+ # END check we get message
+
+ self._num_progress_messages += 1
+
+
+ def make_assertion(self):
+ # we don't always receive messages
+ if not self._seen_lines:
+ return
+
+ # sometimes objects are not compressed which is okay
+ assert len(self._stages_per_op.keys()) in (2,3)
+ assert self._stages_per_op
+
+ # must have seen all stages
+ for op, stages in self._stages_per_op.items():
+ assert stages & self.STAGE_MASK == self.STAGE_MASK
+ # END for each op/stage
- def assert_received_message(self):
- assert self._num_progress_messages
-
+ def assert_received_message(self):
+ assert self._num_progress_messages
+
class TestRemote(TestBase):
-
- def _print_fetchhead(self, repo):
- fp = open(os.path.join(repo.git_dir, "FETCH_HEAD"))
- fp.close()
-
-
- def _do_test_fetch_result(self, results, remote):
- # self._print_fetchhead(remote.repo)
- assert len(results) > 0 and isinstance(results[0], FetchInfo)
- for info in results:
- assert isinstance(info.note, basestring)
- if isinstance(info.ref, Reference):
- assert info.flags != 0
- # END reference type flags handling
- assert isinstance(info.ref, (SymbolicReference, Reference))
- if info.flags & (info.FORCED_UPDATE|info.FAST_FORWARD):
- assert isinstance(info.old_commit, Commit)
- else:
- assert info.old_commit is None
- # END forced update checking
- # END for each info
-
- def _do_test_push_result(self, results, remote):
- assert len(results) > 0 and isinstance(results[0], PushInfo)
- for info in results:
- assert info.flags
- assert isinstance(info.summary, basestring)
- if info.old_commit is not None:
- assert isinstance(info.old_commit, Commit)
- if info.flags & info.ERROR:
- has_one = False
- for bitflag in (info.REJECTED, info.REMOTE_REJECTED, info.REMOTE_FAILURE):
- has_one |= bool(info.flags & bitflag)
- # END for each bitflag
- assert has_one
- else:
- # there must be a remote commit
- if info.flags & info.DELETED == 0:
- assert isinstance(info.local_ref, Reference)
- else:
- assert info.local_ref is None
- assert type(info.remote_ref) in (TagReference, RemoteReference)
- # END error checking
- # END for each info
-
-
- def _do_test_fetch_info(self, repo):
- self.failUnlessRaises(ValueError, FetchInfo._from_line, repo, "nonsense", '')
- self.failUnlessRaises(ValueError, FetchInfo._from_line, repo, "? [up to date] 0.1.7RC -> origin/0.1.7RC", '')
-
- def _commit_random_file(self, repo):
- #Create a file with a random name and random data and commit it to repo.
- # Return the commited absolute file path
- index = repo.index
- new_file = self._make_file(os.path.basename(tempfile.mktemp()),str(random.random()), repo)
- index.add([new_file])
- index.commit("Committing %s" % new_file)
- return new_file
-
- def _do_test_fetch(self,remote, rw_repo, remote_repo):
- # specialized fetch testing to de-clutter the main test
- self._do_test_fetch_info(rw_repo)
-
- def fetch_and_test(remote, **kwargs):
- progress = TestRemoteProgress()
- kwargs['progress'] = progress
- res = remote.fetch(**kwargs)
- progress.make_assertion()
- self._do_test_fetch_result(res, remote)
- return res
- # END fetch and check
-
- def get_info(res, remote, name):
- return res["%s/%s"%(remote,name)]
-
- # put remote head to master as it is garantueed to exist
- remote_repo.head.reference = remote_repo.heads.master
-
- res = fetch_and_test(remote)
- # all uptodate
- for info in res:
- assert info.flags & info.HEAD_UPTODATE
-
- # rewind remote head to trigger rejection
- # index must be false as remote is a bare repo
- rhead = remote_repo.head
- remote_commit = rhead.commit
- rhead.reset("HEAD~2", index=False)
- res = fetch_and_test(remote)
- mkey = "%s/%s"%(remote,'master')
- master_info = res[mkey]
- assert master_info.flags & FetchInfo.FORCED_UPDATE and master_info.note is not None
-
- # normal fast forward - set head back to previous one
- rhead.commit = remote_commit
- res = fetch_and_test(remote)
- assert res[mkey].flags & FetchInfo.FAST_FORWARD
-
- # new remote branch
- new_remote_branch = Head.create(remote_repo, "new_branch")
- res = fetch_and_test(remote)
- new_branch_info = get_info(res, remote, new_remote_branch)
- assert new_branch_info.flags & FetchInfo.NEW_HEAD
-
- # remote branch rename ( causes creation of a new one locally )
- new_remote_branch.rename("other_branch_name")
- res = fetch_and_test(remote)
- other_branch_info = get_info(res, remote, new_remote_branch)
- assert other_branch_info.ref.commit == new_branch_info.ref.commit
-
- # remove new branch
- Head.delete(new_remote_branch.repo, new_remote_branch)
- res = fetch_and_test(remote)
- # deleted remote will not be fetched
- self.failUnlessRaises(IndexError, get_info, res, remote, new_remote_branch)
-
- # prune stale tracking branches
- stale_refs = remote.stale_refs
- assert len(stale_refs) == 2 and isinstance(stale_refs[0], RemoteReference)
- RemoteReference.delete(rw_repo, *stale_refs)
-
- # test single branch fetch with refspec including target remote
- res = fetch_and_test(remote, refspec="master:refs/remotes/%s/master"%remote)
- assert len(res) == 1 and get_info(res, remote, 'master')
-
- # ... with respec and no target
- res = fetch_and_test(remote, refspec='master')
- assert len(res) == 1
-
- # add new tag reference
- rtag = TagReference.create(remote_repo, "1.0-RV_hello.there")
- res = fetch_and_test(remote, tags=True)
- tinfo = res[str(rtag)]
- assert isinstance(tinfo.ref, TagReference) and tinfo.ref.commit == rtag.commit
- assert tinfo.flags & tinfo.NEW_TAG
-
- # adjust tag commit
- Reference.set_object(rtag, rhead.commit.parents[0].parents[0])
- res = fetch_and_test(remote, tags=True)
- tinfo = res[str(rtag)]
- assert tinfo.commit == rtag.commit
- assert tinfo.flags & tinfo.TAG_UPDATE
-
- # delete remote tag - local one will stay
- TagReference.delete(remote_repo, rtag)
- res = fetch_and_test(remote, tags=True)
- self.failUnlessRaises(IndexError, get_info, res, remote, str(rtag))
-
- # provoke to receive actual objects to see what kind of output we have to
- # expect. For that we need a remote transport protocol
- # Create a new UN-shared repo and fetch into it after we pushed a change
- # to the shared repo
- other_repo_dir = tempfile.mktemp("other_repo")
- # must clone with a local path for the repo implementation not to freak out
- # as it wants local paths only ( which I can understand )
- other_repo = remote_repo.clone(other_repo_dir, shared=False)
- remote_repo_url = "git://localhost%s"%remote_repo.git_dir
-
- # put origin to git-url
- other_origin = other_repo.remotes.origin
- other_origin.config_writer.set("url", remote_repo_url)
- # it automatically creates alternates as remote_repo is shared as well.
- # It will use the transport though and ignore alternates when fetching
- # assert not other_repo.alternates # this would fail
-
- # assure we are in the right state
- rw_repo.head.reset(remote.refs.master, working_tree=True)
- try:
- self._commit_random_file(rw_repo)
- remote.push(rw_repo.head.reference)
-
- # here I would expect to see remote-information about packing
- # objects and so on. Unfortunately, this does not happen
- # if we are redirecting the output - git explicitly checks for this
- # and only provides progress information to ttys
- res = fetch_and_test(other_origin)
- finally:
- shutil.rmtree(other_repo_dir)
- # END test and cleanup
-
- def _test_push_and_pull(self,remote, rw_repo, remote_repo):
- # push our changes
- lhead = rw_repo.head
- lindex = rw_repo.index
- # assure we are on master and it is checked out where the remote is
- try:
- lhead.reference = rw_repo.heads.master
- except AttributeError:
- # if the author is on a non-master branch, the clones might not have
- # a local master yet. We simply create it
- lhead.reference = rw_repo.create_head('master')
- # END master handling
- lhead.reset(remote.refs.master, working_tree=True)
-
- # push without spec should fail ( without further configuration )
- # well, works nicely
- # self.failUnlessRaises(GitCommandError, remote.push)
-
- # simple file push
- self._commit_random_file(rw_repo)
- progress = TestRemoteProgress()
- res = remote.push(lhead.reference, progress)
- assert isinstance(res, IterableList)
- self._do_test_push_result(res, remote)
- progress.make_assertion()
-
- # rejected - undo last commit
- lhead.reset("HEAD~1")
- res = remote.push(lhead.reference)
- assert res[0].flags & PushInfo.ERROR
- assert res[0].flags & PushInfo.REJECTED
- self._do_test_push_result(res, remote)
-
- # force rejected pull
- res = remote.push('+%s' % lhead.reference)
- assert res[0].flags & PushInfo.ERROR == 0
- assert res[0].flags & PushInfo.FORCED_UPDATE
- self._do_test_push_result(res, remote)
-
- # invalid refspec
- res = remote.push("hellothere")
- assert len(res) == 0
-
- # push new tags
- progress = TestRemoteProgress()
- to_be_updated = "my_tag.1.0RV"
- new_tag = TagReference.create(rw_repo, to_be_updated)
- other_tag = TagReference.create(rw_repo, "my_obj_tag.2.1aRV", message="my message")
- res = remote.push(progress=progress, tags=True)
- assert res[-1].flags & PushInfo.NEW_TAG
- progress.make_assertion()
- self._do_test_push_result(res, remote)
-
- # update push new tags
- # Rejection is default
- new_tag = TagReference.create(rw_repo, to_be_updated, ref='HEAD~1', force=True)
- res = remote.push(tags=True)
- self._do_test_push_result(res, remote)
- assert res[-1].flags & PushInfo.REJECTED and res[-1].flags & PushInfo.ERROR
-
- # push force this tag
- res = remote.push("+%s" % new_tag.path)
- assert res[-1].flags & PushInfo.ERROR == 0 and res[-1].flags & PushInfo.FORCED_UPDATE
-
- # delete tag - have to do it using refspec
- res = remote.push(":%s" % new_tag.path)
- self._do_test_push_result(res, remote)
- assert res[0].flags & PushInfo.DELETED
- # Currently progress is not properly transferred, especially not using
- # the git daemon
- # progress.assert_received_message()
-
- # push new branch
- new_head = Head.create(rw_repo, "my_new_branch")
- progress = TestRemoteProgress()
- res = remote.push(new_head, progress)
- assert res[0].flags & PushInfo.NEW_HEAD
- progress.make_assertion()
- self._do_test_push_result(res, remote)
-
- # delete new branch on the remote end and locally
- res = remote.push(":%s" % new_head.path)
- self._do_test_push_result(res, remote)
- Head.delete(rw_repo, new_head)
- assert res[-1].flags & PushInfo.DELETED
-
- # --all
- res = remote.push(all=True)
- self._do_test_push_result(res, remote)
-
- remote.pull('master')
-
- # cleanup - delete created tags and branches as we are in an innerloop on
- # the same repository
- TagReference.delete(rw_repo, new_tag, other_tag)
- remote.push(":%s" % other_tag.path)
-
- @with_rw_and_rw_remote_repo('0.1.6')
- def test_base(self, rw_repo, remote_repo):
- num_remotes = 0
- remote_set = set()
- ran_fetch_test = False
-
- for remote in rw_repo.remotes:
- num_remotes += 1
- assert remote == remote
- assert str(remote) != repr(remote)
- remote_set.add(remote)
- remote_set.add(remote) # should already exist
-
- # REFS
- refs = remote.refs
- assert refs
- for ref in refs:
- assert ref.remote_name == remote.name
- assert ref.remote_head
- # END for each ref
-
- # OPTIONS
- # cannot use 'fetch' key anymore as it is now a method
- for opt in ("url", ):
- val = getattr(remote, opt)
- reader = remote.config_reader
- assert reader.get(opt) == val
- assert reader.get_value(opt, None) == val
-
- # unable to write with a reader
- self.failUnlessRaises(IOError, reader.set, opt, "test")
-
- # change value
- writer = remote.config_writer
- new_val = "myval"
- writer.set(opt, new_val)
- assert writer.get(opt) == new_val
- writer.set(opt, val)
- assert writer.get(opt) == val
- del(writer)
- assert getattr(remote, opt) == val
- # END for each default option key
-
- # RENAME
- other_name = "totally_other_name"
- prev_name = remote.name
- assert remote.rename(other_name) == remote
- assert prev_name != remote.name
- # multiple times
- for time in range(2):
- assert remote.rename(prev_name).name == prev_name
- # END for each rename ( back to prev_name )
-
- # PUSH/PULL TESTING
- self._test_push_and_pull(remote, rw_repo, remote_repo)
-
- # FETCH TESTING
- # Only for remotes - local cases are the same or less complicated
- # as additional progress information will never be emitted
- if remote.name == "daemon_origin":
- self._do_test_fetch(remote, rw_repo, remote_repo)
- ran_fetch_test = True
- # END fetch test
-
- remote.update()
- # END for each remote
-
- assert ran_fetch_test
- assert num_remotes
- assert num_remotes == len(remote_set)
-
- origin = rw_repo.remote('origin')
- assert origin == rw_repo.remotes.origin
-
- @with_rw_repo('HEAD', bare=True)
- def test_creation_and_removal(self, bare_rw_repo):
- new_name = "test_new_one"
- arg_list = (new_name, "git@server:hello.git")
- remote = Remote.create(bare_rw_repo, *arg_list )
- assert remote.name == "test_new_one"
- assert remote in bare_rw_repo.remotes
-
- # create same one again
- self.failUnlessRaises(GitCommandError, Remote.create, bare_rw_repo, *arg_list)
-
- Remote.remove(bare_rw_repo, new_name)
-
- for remote in bare_rw_repo.remotes:
- if remote.name == new_name:
- raise AssertionError("Remote removal failed")
- # END if deleted remote matches existing remote's name
- # END for each remote
-
-
-
+
+ def _print_fetchhead(self, repo):
+ fp = open(os.path.join(repo.git_dir, "FETCH_HEAD"))
+ fp.close()
+
+
+ def _do_test_fetch_result(self, results, remote):
+ # self._print_fetchhead(remote.repo)
+ assert len(results) > 0 and isinstance(results[0], FetchInfo)
+ for info in results:
+ assert isinstance(info.note, basestring)
+ if isinstance(info.ref, Reference):
+ assert info.flags != 0
+ # END reference type flags handling
+ assert isinstance(info.ref, (SymbolicReference, Reference))
+ if info.flags & (info.FORCED_UPDATE|info.FAST_FORWARD):
+ assert isinstance(info.old_commit_binsha, str) and len(info.old_commit_binsha) == 20
+ else:
+ assert info.old_commit_binsha is None
+ # END forced update checking
+ # END for each info
+
+ def _do_test_push_result(self, results, remote):
+ assert len(results) > 0 and isinstance(results[0], PushInfo)
+ for info in results:
+ assert info.flags
+ assert isinstance(info.summary, basestring)
+ if info.old_commit_binsha is not None:
+ assert isinstance(info.old_commit_binsha, str) and len(info.old_commit_binsha) == 20
+ if info.flags & info.ERROR:
+ has_one = False
+ for bitflag in (info.REJECTED, info.REMOTE_REJECTED, info.REMOTE_FAILURE):
+ has_one |= bool(info.flags & bitflag)
+ # END for each bitflag
+ assert has_one
+ else:
+ # there must be a remote commit
+ if info.flags & info.DELETED == 0:
+ assert isinstance(info.local_ref, Reference)
+ else:
+ assert info.local_ref is None
+ assert type(info.remote_ref) in (TagReference, RemoteReference)
+ # END error checking
+ # END for each info
+
+ def _commit_random_file(self, repo):
+ #Create a file with a random name and random data and commit it to repo.
+ # Return the commited absolute file path
+ index = repo.index
+ new_file = self._make_file(os.path.basename(tempfile.mktemp()),str(random.random()), repo)
+ index.add([new_file])
+ index.commit("Committing %s" % new_file)
+ return new_file
+
+ def _do_test_fetch(self,remote, rw_repo, remote_repo):
+ def fetch_and_test(remote, **kwargs):
+ progress = TestRemoteProgress()
+ kwargs['progress'] = progress
+ res = remote.fetch(**kwargs)
+ progress.make_assertion()
+ self._do_test_fetch_result(res, remote)
+ return res
+ # END fetch and check
+
+ def get_info(res, remote, name):
+ return res["%s/%s"%(remote,name)]
+
+ # put remote head to master as it is garantueed to exist
+ remote_repo.head.reference = remote_repo.heads.master
+
+ res = fetch_and_test(remote)
+ # all uptodate
+ for info in res:
+ assert info.flags & info.HEAD_UPTODATE
+
+ # rewind remote head to trigger rejection
+ # index must be false as remote is a bare repo
+ rhead = remote_repo.head
+ remote_commit = rhead.commit
+ rhead.reset("HEAD~2", index=False)
+ res = fetch_and_test(remote)
+ mkey = "%s/%s"%(remote,'master')
+ master_info = res[mkey]
+ assert master_info.flags & FetchInfo.FORCED_UPDATE and master_info.note is not None
+
+ # normal fast forward - set head back to previous one
+ rhead.commit = remote_commit
+ res = fetch_and_test(remote)
+ assert res[mkey].flags & FetchInfo.FAST_FORWARD
+
+ # new remote branch
+ new_remote_branch = Head.create(remote_repo, "new_branch")
+ res = fetch_and_test(remote)
+ new_branch_info = get_info(res, remote, new_remote_branch)
+ assert new_branch_info.flags & FetchInfo.NEW_HEAD
+
+ # remote branch rename ( causes creation of a new one locally )
+ new_remote_branch.rename("other_branch_name")
+ res = fetch_and_test(remote)
+ other_branch_info = get_info(res, remote, new_remote_branch)
+ assert other_branch_info.ref.commit == new_branch_info.ref.commit
+
+ # remove new branch
+ Head.delete(new_remote_branch.repo, new_remote_branch)
+ res = fetch_and_test(remote)
+ # deleted remote will not be fetched
+ self.failUnlessRaises(IndexError, get_info, res, remote, new_remote_branch)
+
+ # prune stale tracking branches
+ stale_refs = remote.stale_refs
+ assert len(stale_refs) == 2 and isinstance(stale_refs[0], RemoteReference)
+ RemoteReference.delete(rw_repo, *stale_refs)
+
+ # test single branch fetch with refspec including target remote
+ res = fetch_and_test(remote, refspec="master:refs/remotes/%s/master"%remote)
+ assert len(res) == 1 and get_info(res, remote, 'master')
+
+ # ... with respec and no target
+ res = fetch_and_test(remote, refspec='master')
+ assert len(res) == 1
+
+ # add new tag reference
+ rtag = TagReference.create(remote_repo, "1.0-RV_hello.there")
+ res = fetch_and_test(remote, tags=True)
+ tinfo = res[str(rtag)]
+ assert isinstance(tinfo.ref, TagReference) and tinfo.ref.commit == rtag.commit
+ assert tinfo.flags & tinfo.NEW_TAG
+
+ # adjust tag commit
+ Reference.set_object(rtag, rhead.commit.parents[0].parents[0])
+ res = fetch_and_test(remote, tags=True)
+ tinfo = res[str(rtag)]
+ assert tinfo.commit == rtag.commit
+ assert tinfo.flags & tinfo.TAG_UPDATE
+
+ # delete remote tag - local one will stay
+ TagReference.delete(remote_repo, rtag)
+ res = fetch_and_test(remote, tags=True)
+ self.failUnlessRaises(IndexError, get_info, res, remote, str(rtag))
+
+ # provoke to receive actual objects to see what kind of output we have to
+ # expect. For that we need a remote transport protocol
+ # Create a new UN-shared repo and fetch into it after we pushed a change
+ # to the shared repo
+ other_repo_dir = tempfile.mktemp("other_repo")
+ # must clone with a local path for the repo implementation not to freak out
+ # as it wants local paths only ( which I can understand )
+ other_repo = remote_repo.clone(other_repo_dir, shared=False)
+ remote_repo_url = "git://localhost%s"%remote_repo.git_dir
+
+ # put origin to git-url
+ other_origin = other_repo.remotes.origin
+ other_origin.config_writer.set("url", remote_repo_url)
+ # it automatically creates alternates as remote_repo is shared as well.
+ # It will use the transport though and ignore alternates when fetching
+ # assert not other_repo.alternates # this would fail
+
+ # assure we are in the right state
+ rw_repo.head.reset(remote.refs.master, working_tree=True)
+ try:
+ self._commit_random_file(rw_repo)
+ remote.push(rw_repo.head.reference)
+
+ # here I would expect to see remote-information about packing
+ # objects and so on. Unfortunately, this does not happen
+ # if we are redirecting the output - git explicitly checks for this
+ # and only provides progress information to ttys
+ res = fetch_and_test(other_origin)
+ finally:
+ shutil.rmtree(other_repo_dir)
+ # END test and cleanup
+
+ def _test_push_and_pull(self,remote, rw_repo, remote_repo):
+ # push our changes
+ lhead = rw_repo.head
+ lindex = rw_repo.index
+ # assure we are on master and it is checked out where the remote is
+ try:
+ lhead.reference = rw_repo.heads.master
+ except AttributeError:
+ # if the author is on a non-master branch, the clones might not have
+ # a local master yet. We simply create it
+ lhead.reference = rw_repo.create_head('master')
+ # END master handling
+ lhead.reset(remote.refs.master, working_tree=True)
+
+ # push without spec should fail ( without further configuration )
+ # well, works nicely
+ # self.failUnlessRaises(GitCommandError, remote.push)
+
+ # simple file push
+ self._commit_random_file(rw_repo)
+ progress = TestRemoteProgress()
+ res = remote.push(lhead.reference, progress)
+ assert isinstance(res, IterableList)
+ self._do_test_push_result(res, remote)
+ progress.make_assertion()
+
+ # rejected - undo last commit
+ lhead.reset("HEAD~1")
+ res = remote.push(lhead.reference)
+ assert res[0].flags & PushInfo.ERROR
+ assert res[0].flags & PushInfo.REJECTED
+ self._do_test_push_result(res, remote)
+
+ # force rejected pull
+ res = remote.push('+%s' % lhead.reference)
+ assert res[0].flags & PushInfo.ERROR == 0
+ assert res[0].flags & PushInfo.FORCED_UPDATE
+ self._do_test_push_result(res, remote)
+
+ # invalid refspec
+ res = remote.push("hellothere")
+ assert len(res) == 0
+
+ # push new tags
+ progress = TestRemoteProgress()
+ to_be_updated = "my_tag.1.0RV"
+ new_tag = TagReference.create(rw_repo, to_be_updated)
+ other_tag = TagReference.create(rw_repo, "my_obj_tag.2.1aRV", message="my message")
+ res = remote.push(progress=progress, tags=True)
+ assert res[-1].flags & PushInfo.NEW_TAG
+ progress.make_assertion()
+ self._do_test_push_result(res, remote)
+
+ # update push new tags
+ # Rejection is default
+ new_tag = TagReference.create(rw_repo, to_be_updated, ref='HEAD~1', force=True)
+ res = remote.push(tags=True)
+ self._do_test_push_result(res, remote)
+ assert res[-1].flags & PushInfo.REJECTED and res[-1].flags & PushInfo.ERROR
+
+ # push force this tag
+ res = remote.push("+%s" % new_tag.path)
+ assert res[-1].flags & PushInfo.ERROR == 0 and res[-1].flags & PushInfo.FORCED_UPDATE
+
+ # delete tag - have to do it using refspec
+ res = remote.push(":%s" % new_tag.path)
+ self._do_test_push_result(res, remote)
+ assert res[0].flags & PushInfo.DELETED
+ # Currently progress is not properly transferred, especially not using
+ # the git daemon
+ # progress.assert_received_message()
+
+ # push new branch
+ new_head = Head.create(rw_repo, "my_new_branch")
+ progress = TestRemoteProgress()
+ res = remote.push(new_head, progress)
+ assert res[0].flags & PushInfo.NEW_HEAD
+ progress.make_assertion()
+ self._do_test_push_result(res, remote)
+
+ # delete new branch on the remote end and locally
+ res = remote.push(":%s" % new_head.path)
+ self._do_test_push_result(res, remote)
+ Head.delete(rw_repo, new_head)
+ assert res[-1].flags & PushInfo.DELETED
+
+ # --all
+ res = remote.push(all=True)
+ self._do_test_push_result(res, remote)
+
+ remote.pull('master')
+
+ # cleanup - delete created tags and branches as we are in an innerloop on
+ # the same repository
+ TagReference.delete(rw_repo, new_tag, other_tag)
+ remote.push(":%s" % other_tag.path)
+
+ @with_rw_and_rw_remote_repo('0.1.6')
+ def test_base(self, rw_repo, remote_repo):
+ num_remotes = 0
+ remote_set = set()
+ ran_fetch_test = False
+
+ for remote in rw_repo.remotes:
+ num_remotes += 1
+ assert remote == remote
+ assert str(remote) != repr(remote)
+ remote_set.add(remote)
+ remote_set.add(remote) # should already exist
+
+ # REFS
+ refs = remote.refs
+ assert refs
+ for ref in refs:
+ assert ref.remote_name == remote.name
+ assert ref.remote_head
+ # END for each ref
+
+ # OPTIONS
+ # cannot use 'fetch' key anymore as it is now a method
+ for opt in ("url", ):
+ val = getattr(remote, opt)
+ reader = remote.config_reader
+ assert reader.get(opt) == val
+ assert reader.get_value(opt, None) == val
+
+ # unable to write with a reader
+ self.failUnlessRaises(IOError, reader.set, opt, "test")
+
+ # change value
+ writer = remote.config_writer
+ new_val = "myval"
+ writer.set(opt, new_val)
+ assert writer.get(opt) == new_val
+ writer.set(opt, val)
+ assert writer.get(opt) == val
+ del(writer)
+ assert getattr(remote, opt) == val
+ # END for each default option key
+
+ # RENAME
+ other_name = "totally_other_name"
+ prev_name = remote.name
+ assert remote.rename(other_name) == remote
+ assert prev_name != remote.name
+ # multiple times
+ for time in range(2):
+ assert remote.rename(prev_name).name == prev_name
+ # END for each rename ( back to prev_name )
+
+ # PUSH/PULL TESTING
+ self._test_push_and_pull(remote, rw_repo, remote_repo)
+
+ # FETCH TESTING
+ # Only for remotes - local cases are the same or less complicated
+ # as additional progress information will never be emitted
+ if remote.name == "daemon_origin":
+ self._do_test_fetch(remote, rw_repo, remote_repo)
+ ran_fetch_test = True
+ # END fetch test
+
+ remote.update()
+ # END for each remote
+
+ assert ran_fetch_test
+ assert num_remotes
+ assert num_remotes == len(remote_set)
+
+ origin = rw_repo.remote('origin')
+ assert origin == rw_repo.remotes.origin
+
+ @with_rw_repo('HEAD', bare=True)
+ def test_creation_and_removal(self, bare_rw_repo):
+ new_name = "test_new_one"
+ arg_list = (new_name, "git@server:hello.git")
+ remote = Remote.create(bare_rw_repo, *arg_list )
+ assert remote.name == "test_new_one"
+ assert remote in bare_rw_repo.remotes
+
+ # create same one again
+ self.failUnlessRaises(GitCommandError, Remote.create, bare_rw_repo, *arg_list)
+
+ Remote.remove(bare_rw_repo, new_name)
+
+ for remote in bare_rw_repo.remotes:
+ if remote.name == new_name:
+ raise AssertionError("Remote removal failed")
+ # END if deleted remote matches existing remote's name
+ # END for each remote
+
+
+
diff --git a/git/test/test_stats.py b/git/test/test_stats.py
index 2bdb0a89..27be6a77 100644
--- a/git/test/test_stats.py
+++ b/git/test/test_stats.py
@@ -4,8 +4,12 @@
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
-from git.test.lib import *
-from git import *
+from git.test.lib import (
+ TestBase,
+ fixture,
+ assert_equal
+ )
+from git.util import Stats
class TestStats(TestBase):
diff --git a/git/test/test_stream.py b/git/test/test_stream.py
new file mode 100644
index 00000000..8d7a5f9a
--- /dev/null
+++ b/git/test/test_stream.py
@@ -0,0 +1,155 @@
+# Copyright (C) 2010, 2011 Sebastian Thiel (byronimo@gmail.com) and contributors
+#
+# This module is part of GitDB and is released under
+# the New BSD License: http://www.opensource.org/licenses/bsd-license.php
+"""Test for object db"""
+from lib import (
+ TestBase,
+ DummyStream,
+ Sha1Writer,
+ make_bytes,
+ make_object,
+ fixture_path
+ )
+
+from git.stream import *
+from git.util import (
+ NULL_HEX_SHA,
+ hex_to_bin
+ )
+
+from git.util import zlib
+from git.typ import (
+ str_blob_type
+ )
+
+from git.db.py.loose import PureLooseObjectODB
+import time
+import tempfile
+import os
+
+
+
+
+class TestStream(TestBase):
+ """Test stream classes"""
+
+ data_sizes = (15, 10000, 1000*1024+512)
+
+ def _assert_stream_reader(self, stream, cdata, rewind_stream=lambda s: None):
+ """Make stream tests - the orig_stream is seekable, allowing it to be
+ rewound and reused
+ :param cdata: the data we expect to read from stream, the contents
+ :param rewind_stream: function called to rewind the stream to make it ready
+ for reuse"""
+ ns = 10
+ assert len(cdata) > ns-1, "Data must be larger than %i, was %i" % (ns, len(cdata))
+
+ # read in small steps
+ ss = len(cdata) / ns
+ for i in range(ns):
+ data = stream.read(ss)
+ chunk = cdata[i*ss:(i+1)*ss]
+ assert data == chunk
+ # END for each step
+ rest = stream.read()
+ if rest:
+ assert rest == cdata[-len(rest):]
+ # END handle rest
+
+ if isinstance(stream, DecompressMemMapReader):
+ assert len(stream.data()) == stream.compressed_bytes_read()
+ # END handle special type
+
+ rewind_stream(stream)
+
+ # read everything
+ rdata = stream.read()
+ assert rdata == cdata
+
+ if isinstance(stream, DecompressMemMapReader):
+ assert len(stream.data()) == stream.compressed_bytes_read()
+ # END handle special type
+
+ def test_decompress_reader(self):
+ for close_on_deletion in range(2):
+ for with_size in range(2):
+ for ds in self.data_sizes:
+ cdata = make_bytes(ds, randomize=False)
+
+ # zdata = zipped actual data
+ # cdata = original content data
+
+ # create reader
+ if with_size:
+ # need object data
+ zdata = zlib.compress(make_object(str_blob_type, cdata))
+ type, size, reader = DecompressMemMapReader.new(zdata, close_on_deletion)
+ assert size == len(cdata)
+ assert type == str_blob_type
+
+ # even if we don't set the size, it will be set automatically on first read
+ test_reader = DecompressMemMapReader(zdata, close_on_deletion=False)
+ assert test_reader._s == len(cdata)
+ else:
+ # here we need content data
+ zdata = zlib.compress(cdata)
+ reader = DecompressMemMapReader(zdata, close_on_deletion, len(cdata))
+ assert reader._s == len(cdata)
+ # END get reader
+
+ self._assert_stream_reader(reader, cdata, lambda r: r.seek(0))
+
+ # put in a dummy stream for closing
+ dummy = DummyStream()
+ reader._m = dummy
+
+ assert not dummy.closed
+ del(reader)
+ assert dummy.closed == close_on_deletion
+ # END for each datasize
+ # END whether size should be used
+ # END whether stream should be closed when deleted
+
+ def test_sha_writer(self):
+ writer = Sha1Writer()
+ assert 2 == writer.write("hi")
+ assert len(writer.sha(as_hex=1)) == 40
+ assert len(writer.sha(as_hex=0)) == 20
+
+ # make sure it does something ;)
+ prev_sha = writer.sha()
+ writer.write("hi again")
+ assert writer.sha() != prev_sha
+
+ def test_compressed_writer(self):
+ for ds in self.data_sizes:
+ fd, path = tempfile.mkstemp()
+ ostream = FDCompressedSha1Writer(fd)
+ data = make_bytes(ds, randomize=False)
+
+ # for now, just a single write, code doesn't care about chunking
+ assert len(data) == ostream.write(data)
+ ostream.close()
+
+ # its closed already
+ self.failUnlessRaises(OSError, os.close, fd)
+
+ # read everything back, compare to data we zip
+ fd = os.open(path, os.O_RDONLY|getattr(os, 'O_BINARY', 0))
+ written_data = os.read(fd, os.path.getsize(path))
+ assert len(written_data) == os.path.getsize(path)
+ os.close(fd)
+ assert written_data == zlib.compress(data, 1) # best speed
+
+ os.remove(path)
+ # END for each os
+
+ def test_decompress_reader_special_case(self):
+ odb = PureLooseObjectODB(fixture_path('objects'))
+ ostream = odb.stream(hex_to_bin('7bb839852ed5e3a069966281bb08d50012fb309b'))
+
+ # if there is a bug, we will be missing one byte exactly !
+ data = ostream.read()
+ assert len(data) == ostream.size
+
diff --git a/git/test/test_util.py b/git/test/test_util.py
index e55a6d15..f737e660 100644
--- a/git/test/test_util.py
+++ b/git/test/test_util.py
@@ -7,7 +7,7 @@
import os
import tempfile
-from git.test.lib import *
+from lib import TestBase
from git.util import *
from git.objects.util import *
from git import *
@@ -15,6 +15,14 @@ from git.cmd import dashify
import time
+from git.util import (
+ to_hex_sha,
+ to_bin_sha,
+ NULL_HEX_SHA,
+ LockedFD,
+ Actor
+ )
+
class TestUtils(TestBase):
def setup(self):
@@ -25,8 +33,8 @@ class TestUtils(TestBase):
}
def test_it_should_dashify(self):
- assert_equal('this-is-my-argument', dashify('this_is_my_argument'))
- assert_equal('foo', dashify('foo'))
+ assert 'this-is-my-argument' == dashify('this_is_my_argument')
+ assert 'foo' == dashify('foo')
def test_lock_file(self):
@@ -107,3 +115,118 @@ class TestUtils(TestBase):
assert isinstance(Actor.committer(cr), Actor)
assert isinstance(Actor.author(cr), Actor)
#END assure config reader is handled
+
+ def test_basics(self):
+ assert to_hex_sha(NULL_HEX_SHA) == NULL_HEX_SHA
+ assert len(to_bin_sha(NULL_HEX_SHA)) == 20
+ assert to_hex_sha(to_bin_sha(NULL_HEX_SHA)) == NULL_HEX_SHA
+
+ def _cmp_contents(self, file_path, data):
+ # raise if data from file at file_path
+ # does not match data string
+ fp = open(file_path, "rb")
+ try:
+ assert fp.read() == data
+ finally:
+ fp.close()
+
+ def test_lockedfd(self):
+ my_file = tempfile.mktemp()
+ orig_data = "hello"
+ new_data = "world"
+ my_file_fp = open(my_file, "wb")
+ my_file_fp.write(orig_data)
+ my_file_fp.close()
+
+ try:
+ lfd = LockedFD(my_file)
+ lockfilepath = lfd._lockfilepath()
+
+ # cannot end before it was started
+ self.failUnlessRaises(AssertionError, lfd.rollback)
+ self.failUnlessRaises(AssertionError, lfd.commit)
+
+ # open for writing
+ assert not os.path.isfile(lockfilepath)
+ wfd = lfd.open(write=True)
+ assert lfd._fd is wfd
+ assert os.path.isfile(lockfilepath)
+
+ # write data and fail
+ os.write(wfd, new_data)
+ lfd.rollback()
+ assert lfd._fd is None
+ self._cmp_contents(my_file, orig_data)
+ assert not os.path.isfile(lockfilepath)
+
+ # additional call doesnt fail
+ lfd.commit()
+ lfd.rollback()
+
+ # test reading
+ lfd = LockedFD(my_file)
+ rfd = lfd.open(write=False)
+ assert os.read(rfd, len(orig_data)) == orig_data
+
+ assert os.path.isfile(lockfilepath)
+ # deletion rolls back
+ del(lfd)
+ assert not os.path.isfile(lockfilepath)
+
+
+ # write data - concurrently
+ lfd = LockedFD(my_file)
+ olfd = LockedFD(my_file)
+ assert not os.path.isfile(lockfilepath)
+ wfdstream = lfd.open(write=True, stream=True) # this time as stream
+ assert os.path.isfile(lockfilepath)
+ # another one fails
+ self.failUnlessRaises(IOError, olfd.open)
+
+ wfdstream.write(new_data)
+ lfd.commit()
+ assert not os.path.isfile(lockfilepath)
+ self._cmp_contents(my_file, new_data)
+
+ # could test automatic _end_writing on destruction
+ finally:
+ os.remove(my_file)
+ # END final cleanup
+
+ # try non-existing file for reading
+ lfd = LockedFD(tempfile.mktemp())
+ try:
+ lfd.open(write=False)
+ except OSError:
+ assert not os.path.exists(lfd._lockfilepath())
+ else:
+ self.fail("expected OSError")
+ # END handle exceptions
+
+
+class TestActor(TestBase):
+ def test_from_string_should_separate_name_and_email(self):
+ a = Actor._from_string("Michael Trier <mtrier@example.com>")
+ assert "Michael Trier" == a.name
+ assert "mtrier@example.com" == a.email
+
+ # base type capabilities
+ assert a == a
+ assert not ( a != a )
+ m = set()
+ m.add(a)
+ m.add(a)
+ assert len(m) == 1
+
+ def test_from_string_should_handle_just_name(self):
+ a = Actor._from_string("Michael Trier")
+ assert "Michael Trier" == a.name
+ assert None == a.email
+
+ def test_should_display_representation(self):
+ a = Actor._from_string("Michael Trier <mtrier@example.com>")
+ assert '<git.Actor "Michael Trier <mtrier@example.com>">' == repr(a)
+
+ def test_str_should_alias_name(self):
+ a = Actor._from_string("Michael Trier <mtrier@example.com>")
+ assert a.name == str(a)